@@ -5,7 +5,7 @@ const Offscreen = () => {
5
5
*/
6
6
let client_mediaRecorder ;
7
7
let rep_mediaRecorder ;
8
- let data = [ ] ;
8
+ const data = [ ] ;
9
9
10
10
let old_transcript = "" ;
11
11
let client_socket ;
@@ -148,16 +148,27 @@ const Offscreen = () => {
148
148
let msgData ;
149
149
try {
150
150
msgData = JSON . parse ( msg . data ) ;
151
- } catch { }
151
+ } catch { }
152
152
153
153
if ( msgData . type !== "Results" ) return ;
154
154
155
155
console . log ( "msgData: " , msgData ) ;
156
156
157
157
const { transcript } = msgData ?. channel . alternatives [ 0 ] || { } ;
158
- console . log ( "🚀 ~ client_socket.onmessage= ~ msgData?.channel.alternatives:" , msgData ?. channel . alternatives )
158
+ console . log (
159
+ "🚀 ~ client_socket.onmessage= ~ msgData?.channel.alternatives:" ,
160
+ msgData ?. channel . alternatives
161
+ ) ;
159
162
160
163
if ( transcript ) {
164
+ //sending Current transcript
165
+ chrome . runtime . sendMessage ( {
166
+ message : {
167
+ type : "CLIENT_TRANSCRIPT_CURRENT" ,
168
+ target : "sidepanel" ,
169
+ data : transcript ,
170
+ } ,
171
+ } ) ;
161
172
console . log ( "---> old_transcript: " , old_transcript ) ;
162
173
console . log (
163
174
"\x1b[31m[CLIENT] transcript ->" ,
@@ -181,7 +192,10 @@ const Offscreen = () => {
181
192
. slice ( - 50 )
182
193
. join ( " " ) ;
183
194
184
- console . log ( "🚀 ~ client_socket.onmessage= ~ transcriptionWithThreshold:" , transcriptionWithThreshold )
195
+ console . log (
196
+ "🚀 ~ client_socket.onmessage= ~ transcriptionWithThreshold:" ,
197
+ transcriptionWithThreshold
198
+ ) ;
185
199
186
200
chrome . runtime . sendMessage ( {
187
201
message : {
@@ -260,68 +274,106 @@ const Offscreen = () => {
260
274
}
261
275
262
276
async function handleTranscription ( transcription ) {
263
- console . log ( "🚀 ~ handleTranscription ~ transcription:" , transcription )
264
277
let data ;
265
278
try {
266
- const response = await fetch (
267
- "https://hallyday-dashboard.vercel.app/api/ai/reply" ,
268
- {
269
- headers : {
270
- "Content-Type" : "application/json" ,
271
- } ,
272
- method : "POST" ,
273
- body : JSON . stringify ( {
274
- transcription,
275
- } ) ,
276
- }
277
- ) ;
279
+ const response = await fetch ( "http://localhost:3000/api/ai/reply" , {
280
+ headers : {
281
+ "Content-Type" : "application/json" ,
282
+ } ,
283
+ method : "POST" ,
284
+ body : JSON . stringify ( {
285
+ transcription,
286
+ } ) ,
287
+ } ) ;
278
288
279
289
data = await response . json ( ) ;
280
290
} catch ( error ) {
281
291
console . error ( error ) ;
282
292
}
283
293
284
- if (
285
- data . aiResponseContent &&
286
- data . aiResponseContent . length > 0 &&
287
- data . aiResponseContent !== '""'
288
- ) {
294
+ if ( data ?. error ) {
289
295
chrome . runtime . sendMessage ( {
290
296
message : {
291
297
type : "CLIENT_TRANSCRIPT_CONTEXT" ,
292
298
target : "sidepanel" ,
293
299
data : {
294
- ai_insight : data . aiResponseContent ,
295
- user_request_content : data . userRequestContent ,
296
- message_text : transcription ,
300
+ ai_insight : "" ,
301
+ user_request_content : "" ,
302
+ message_text : data . transcription ,
297
303
} ,
298
304
} ,
299
305
} ) ;
306
+ return ;
307
+ }
300
308
301
- // Reset the old data
309
+ chrome . runtime . sendMessage ( {
310
+ message : {
311
+ type : "CLIENT_TRANSCRIPT_CONTEXT" ,
312
+ target : "sidepanel" ,
313
+ data : {
314
+ ai_insight :
315
+ data ?. aiResponseContent === "" ? "" : data ?. aiResponseContent ,
316
+ user_request_content :
317
+ data . userRequestContent === "" ? "" : data . userRequestContent ,
318
+ message_text : data . transcription ,
319
+ } ,
320
+ } ,
321
+ } ) ;
322
+
323
+ if ( data . aiResponseContent !== "" ) {
302
324
old_transcript = "" ;
303
325
} else {
304
- // Set the old transcript so that it can be appended with the next api call
305
- // old_transcript = old_transcript + " " + transcript;
306
-
326
+ // old_transcript = old_transcript + " " + transcription;
307
327
console . log (
308
328
"\x1b[33m[OLD TRANSCRIPT] transcript ->" ,
309
329
old_transcript ,
310
330
"\x1b"
311
331
) ;
312
-
313
- chrome . runtime . sendMessage ( {
314
- message : {
315
- type : "CLIENT_TRANSCRIPT_CONTEXT" ,
316
- target : "sidepanel" ,
317
- data : {
318
- ai_insight : "" ,
319
- message_text : "" ,
320
- user_request_content : "" ,
321
- } ,
322
- } ,
323
- } ) ;
324
332
}
333
+
334
+ // if (
335
+ // data.aiResponseContent &&
336
+ // data.aiResponseContent.length > 0 &&
337
+ // data.aiResponseContent !== '""'
338
+ // ) {
339
+ // chrome.runtime.sendMessage({
340
+ // message: {
341
+ // type: "CLIENT_TRANSCRIPT_CONTEXT",
342
+ // target: "sidepanel",
343
+ // data: {
344
+ // ai_insight: data.aiResponseContent,
345
+ // user_request_content: data.userRequestContent,
346
+ // message_text: transcription,
347
+ // },
348
+ // },
349
+ // });
350
+
351
+ // // Reset the old data
352
+ // old_transcript = "";
353
+ // } else {
354
+ // // Set the old transcript so that it can be appended with the next api call
355
+ // // old_transcript = old_transcript + " " + transcript;
356
+
357
+ // console.log(
358
+ // "\x1b[33m[OLD TRANSCRIPT] transcript ->",
359
+ // old_transcript,
360
+ // "\x1b"
361
+ // );
362
+
363
+ // chrome.runtime.sendMessage({
364
+ // message: {
365
+ // type: "CLIENT_TRANSCRIPT_CONTEXT",
366
+ // target: "sidepanel",
367
+ // data: {
368
+ // ai_insight:
369
+ // data?.aiResponseContent === "" ? "" : data?.aiResponseContent,
370
+ // user_request_content:
371
+ // data.userRequestContent === "" ? "" : data.userRequestContent,
372
+ // message_text: data.transcription,
373
+ // },
374
+ // },
375
+ // });
376
+ // }
325
377
}
326
378
327
379
// https://github.com/deepgram-devs/transcription-chrome-extension/blob/37d34f4b0b2a38ef10ced0f9c02d794dae961407/mic-and-tab/content-script.js#L47
0 commit comments