diff --git a/build/asset-manifest.json b/build/asset-manifest.json index 721f095..eac17e5 100644 --- a/build/asset-manifest.json +++ b/build/asset-manifest.json @@ -1,14 +1,14 @@ { "files": { "main.css": "/static/css/main.7cc9301b.chunk.css", - "main.js": "/static/js/main.a33677a6.chunk.js", - "main.js.map": "/static/js/main.a33677a6.chunk.js.map", + "main.js": "/static/js/main.e75e603b.chunk.js", + "main.js.map": "/static/js/main.e75e603b.chunk.js.map", "runtime-main.js": "/static/js/runtime-main.39f04a74.js", "runtime-main.js.map": "/static/js/runtime-main.39f04a74.js.map", "static/js/2.4070f991.chunk.js": "/static/js/2.4070f991.chunk.js", "static/js/2.4070f991.chunk.js.map": "/static/js/2.4070f991.chunk.js.map", "index.html": "/index.html", - "precache-manifest.96934c61ad72a39de9d0a56832ab0c27.js": "/precache-manifest.96934c61ad72a39de9d0a56832ab0c27.js", + "precache-manifest.5a232ee8c4418509be470e7e4d91151d.js": "/precache-manifest.5a232ee8c4418509be470e7e4d91151d.js", "service-worker.js": "/service-worker.js", "static/css/main.7cc9301b.chunk.css.map": "/static/css/main.7cc9301b.chunk.css.map", "static/js/2.4070f991.chunk.js.LICENSE": "/static/js/2.4070f991.chunk.js.LICENSE" @@ -17,6 +17,6 @@ "static/js/runtime-main.39f04a74.js", "static/js/2.4070f991.chunk.js", "static/css/main.7cc9301b.chunk.css", - "static/js/main.a33677a6.chunk.js" + "static/js/main.e75e603b.chunk.js" ] } \ No newline at end of file diff --git a/build/index.html b/build/index.html index 8c53c1c..d1c475f 100644 --- a/build/index.html +++ b/build/index.html @@ -1 +1 @@ -Speech to Text
\ No newline at end of file +Speech to Text
\ No newline at end of file diff --git a/build/precache-manifest.96934c61ad72a39de9d0a56832ab0c27.js b/build/precache-manifest.5a232ee8c4418509be470e7e4d91151d.js similarity index 72% rename from build/precache-manifest.96934c61ad72a39de9d0a56832ab0c27.js rename to build/precache-manifest.5a232ee8c4418509be470e7e4d91151d.js index 67abbf5..56b9ae0 100644 --- a/build/precache-manifest.96934c61ad72a39de9d0a56832ab0c27.js +++ b/build/precache-manifest.5a232ee8c4418509be470e7e4d91151d.js @@ -1,10 +1,10 @@ self.__precacheManifest = (self.__precacheManifest || []).concat([ { - "revision": "fe12d693fd5c2f20a2c6111f5156d04b", + "revision": "7d2039ecf4a3a0f47d093cf6c85c132c", "url": "/index.html" }, { - "revision": "dabfde008b6acee801b9", + "revision": "396e970b5f7fac029ad2", "url": "/static/css/main.7cc9301b.chunk.css" }, { @@ -16,8 +16,8 @@ self.__precacheManifest = (self.__precacheManifest || []).concat([ "url": "/static/js/2.4070f991.chunk.js.LICENSE" }, { - "revision": "dabfde008b6acee801b9", - "url": "/static/js/main.a33677a6.chunk.js" + "revision": "396e970b5f7fac029ad2", + "url": "/static/js/main.e75e603b.chunk.js" }, { "revision": "dd3c31bd507a68d5adc3", diff --git a/build/service-worker.js b/build/service-worker.js index bb516e9..dcaf5d3 100644 --- a/build/service-worker.js +++ b/build/service-worker.js @@ -14,7 +14,7 @@ importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js"); importScripts( - "/precache-manifest.96934c61ad72a39de9d0a56832ab0c27.js" + "/precache-manifest.5a232ee8c4418509be470e7e4d91151d.js" ); self.addEventListener('message', (event) => { diff --git a/build/static/js/main.a33677a6.chunk.js.map b/build/static/js/main.a33677a6.chunk.js.map deleted file mode 100644 index b15d356..0000000 --- a/build/static/js/main.a33677a6.chunk.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["components/Header/Header.js","components/Header/index.js","components/SubmitContainer/SubmitContainer.js","components/SubmitContainer/index.js","components/ControlContainer/ControlContainer.js","components/ControlContainer/index.js","components/AudioWave/AudioWave.js","components/AudioWave/index.js","components/KeywordTooltip/index.js","components/KeywordTooltip/KeywordTooltip.js","components/TranscriptBox/TranscriptBox.js","components/TranscriptBox/utils.js","components/TranscriptBox/index.js","components/OutputContainer/OutputContainer.js","components/OutputContainer/index.js","components/Toast/Toast.js","components/Toast/index.js","components/ServiceContainer/reducer.js","components/ServiceContainer/utils.js","utils.js","components/ServiceContainer/index.js","components/ServiceContainer/ServiceContainer.js","App.js","serviceWorker.js","index.js"],"names":["Header","description","links","title","Tile","className","map","link","defaultProps","SubmitContainer","isRecording","isSamplePlaying","isUploadPlaying","keywordText","modelName","onError","onStartPlayingFileUpload","onStopPlayingFileUpload","onStartPlayingSample","onStopPlayingSample","onStartRecording","onStopRecording","useSpeakerLabels","useState","keywordList","setKeywordList","useEffect","newKeywordList","length","split","k","trim","sampleModelInfo","models","find","model","name","sampleModelFilename","filename","getBaseAudioConfig","a","async","fetch","authResponse","json","authJson","ok","error","options","lowerCasedKeywords","keyword","toLowerCase","url","undefined","accessToken","format","keywords","keywordsThreshold","objectMode","play","realtime","resultsBySpeaker","speakerlessInterim","timestamps","getSampleAudioConfig","baseConfig","file","getMicrophoneAudioConfig","getUploadAudioConfig","Button","kind","onClick","disabled","config","FileUploader","accept","buttonKind","disableLabelChanges","labelText","onChange","evt","uploadedFile","currentTarget","files","ControlContainer","onSelectNewModel","dropdownChoices","id","label","supportsSpeakerLabels","supported_features","speaker_labels","selectModel","setKeywordText","setUseSpeakerLabels","FormGroup","legendText","Dropdown","newModel","selectedItem","newKeywordText","items","defaultText","ariaLabel","light","TextArea","placeholder","hideLabel","invalidText","value","target","ToggleSmall","aria-label","toggled","onToggle","AudioWave","props","audioWaveCanvasRef","React","createRef","audioWaveCanvasCtx","animationFrameId","draw","bind","state","startingTimestamp","microphoneData","Uint8Array","this","current","getContext","lineCap","initializeCanvasDimensions","prevProps","data","drawInitialAudioWave","isTranscribing","setStartingTimestamp","stopDrawing","setState","Date","now","canvasCtx","audioWaveContainer","audioWaveContainerRef","width","clientWidth","height","resetCanvasForNewFrame","drawEmptyDataPoints","forEach","dataPoint","i","beginPath","fillStyle","fillRect","stroke","closePath","audioAnalyzer","getByteFrequencyData","slice","call","n","duration","audioProgressPercent","highlightIndex","wholeHighlightIndex","Math","floor","decimalOpacityIndex","fillColor","toFixed","audioSource","drawMicrophoneDataPoints","drawAudioDataPoints","requestAnimationFrame","audioWaveCanvas","clearRect","cancelAnimationFrame","ref","Component","KeywordTooltip","confidence","startTime","endTime","mapTranscriptTextToElements","text","keywordInfo","totalIndex","matches","regex","allKeywords","sectionKeywords","Object","keys","regexWordSearch","word","index","reduce","arr","str","RegExp","createWordRegex","type","wordOccurences","sentenceFragment","fragmentToSearch","keywordInfoMatch","keywordOccurenceIndex","infoForOccurence","start_time","end_time","TranscriptBox","transcriptArray","transcriptItem","overallIndex","speaker","parsedTextElements","key","element","elementIndex","TooltipDefinition","align","direction","tooltipText","triggerClassName","OutputContainer","audioDataArray","audioDuration","Toast","caption","children","hideAfterFirstDisplay","hideCloseButton","iconDescription","lowContrast","onCloseButtonClick","role","subtitle","timeout","setId","hideToast","setHideToast","random","toString","substring","document","querySelector","window","localStorage","getItem","setItem","actionTypes","initialState","audioContext","audioDurationInMs","audioStream","speakerLabels","transcript","reducer","action","updatedTranscript","resultIndex","Error","AUDIO_VISUALIZATION_DIMENSIONS","readFileToArrayBuffer","fileData","fileReader","FileReader","Promise","resolve","reject","onload","arrayBuffer","result","onerror","abort","readAsArrayBuffer","convertAudioBlobToVisualizationData","audioBlob","audioCtx","audioWaveContainerWidth","audioArrayBuffer","audioUint8Array","decodeAudioData","audioDataBuffer","validContainerWidth","numberOfChunks","chunkSize","chunkedAudioDataArray","previousIndex","push","reducedFloatArray","chunk","prevValue","currentValue","createError","ServiceContainer","useReducer","dispatch","useRef","AudioContext","webkitAudioContext","createAnalyser","parseResults","results","result_index","finalKeywords","finalTranscript","isFinal","final","alternatives","keywords_result","formatStreamData","handleStreamEnd","stop","readAudioFileForVisualization","containerClientWidth","audioVisualizationWidth","isFileType","File","audioRequest","blob","captureAudioFromMicrophone","recognizeOptions","mediaStream","navigator","mediaDevices","getUserMedia","video","audio","recognizeMicrophoneStream","recognizeMicrophone","keepMic","updatedAudioAnalyzer","fttSize","createMediaStreamSource","connect","onSubmit","stream","on","cleanUpOldStreamIfNecessary","removeAllListeners","recognizeStream","resume","recognizeConfig","recognizeFile","HEADER_LINKS","Link","rel","href","renderIcon","Launch16","App","Boolean","location","hostname","match","ReactDOM","render","getElementById","serviceWorker","ready","then","registration","unregister"],"mappings":"scAIaA,EAAS,SAAC,GAAD,IAAGC,EAAH,EAAGA,YAAaC,EAAhB,EAAgBA,MAAOC,EAAvB,EAAuBA,MAAvB,OACpB,kBAACC,EAAA,EAAD,CAAMC,UAAU,UACd,yBAAKA,UAAU,mBACb,wBAAIA,UAAU,gBAAgBF,GAC9B,2BAAIF,IAEN,yBAAKI,UAAU,kBACb,yBAAKA,UAAU,gBAAgBH,EAAMI,KAAI,SAAAC,GAAI,OAAIA,SAWvDP,EAAOQ,aAAe,CACpBP,YAAa,GACbC,MAAO,GACPC,MAAO,IAGMH,IC3BAA,ED2BAA,E,uIEtBFS,EAAkB,SAAC,GAczB,IAbLC,EAaI,EAbJA,YACAC,EAYI,EAZJA,gBACAC,EAWI,EAXJA,gBACAC,EAUI,EAVJA,YACAC,EASI,EATJA,UACAC,EAQI,EARJA,QACAC,EAOI,EAPJA,yBACAC,EAMI,EANJA,wBACAC,EAKI,EALJA,qBACAC,EAII,EAJJA,oBACAC,EAGI,EAHJA,iBACAC,EAEI,EAFJA,gBACAC,EACI,EADJA,iBACI,EACkCC,mBAAS,IAD3C,mBACGC,EADH,KACgBC,EADhB,KAEJC,qBAAU,WACR,IAAIC,EAAiB,GACjBd,EAAYe,OAAS,IACvBD,EAAiBd,EAAYgB,MAAM,KAAKvB,KAAI,SAAAwB,GAAC,OAAIA,EAAEC,WAErDN,EAAeE,KACd,CAACd,IAEJ,IAAMmB,EAAkBC,EAAOC,MAAK,SAAAC,GAAK,OAAIA,EAAMC,OAAStB,KACtDuB,EAAsBL,EAAkBA,EAAgBM,SAAW,KAEnEC,EAAqB,gCAAAC,EAAAC,OAAA,kEAAAD,EAAA,MAGJE,IAAM,cAHF,cAGzBC,EAHyB,kBAAAH,EAAA,MAIRG,EAAaC,QAJL,UAIzBC,EAJyB,OAKpBF,EAAaG,GALO,uBAMvB/B,EAAQ8B,GANe,kBAOhB,CACLE,MAAOF,IARc,cAYrBG,EAAU,GAIRC,EAAqBzB,EAAYlB,KAAI,SAAA4C,GAAO,OAChDA,EAAQC,iBAGVH,EAAO,eACFA,EADE,CAELI,IAAKP,EAASO,UAAOC,EACrBC,YAAaT,EAASS,YACtBC,QAAQ,EACRC,SAAUhC,EAAYI,OAAS,EAAIqB,OAAqBI,EACxDI,kBAAmBjC,EAAYI,OAAS,EAAI,SAAOyB,EACnDlB,MAAOrB,EACP4C,YAAY,EACZC,MAAM,EACNC,UAAU,EACVC,iBAAkBvC,EAClBwC,oBAAoB,EACpBC,YAAY,IAjCW,kBAoClBf,GApCkB,uCAuCrBgB,EAAuB,0BAAAxB,EAAAC,OAAA,kEAAAD,EAAA,MACFD,KADE,cACrB0B,EADqB,wCAGtBA,EAHsB,CAIzBC,KAAK,SAAD,OAAW7B,MAJU,sCAQvB8B,EAA2B,0BAAA3B,EAAAC,OAAA,kEAAAD,EAAA,MACND,KADM,cACzB0B,EADyB,wCAG1BA,EAH0B,CAI7BJ,kBAAkB,KAJW,sCAQ3BO,EAAuB,SAAMF,GAAN,eAAA1B,EAAAC,OAAA,kEAAAD,EAAA,MACFD,KADE,cACrB0B,EADqB,wCAGtBA,EAHsB,CAIzBC,OACAL,kBAAkB,KALO,sCAS7B,OACE,yBAAKxD,UAAU,oBACZM,EACC,kBAAC0D,EAAA,EAAD,CACEhE,UAAU,gBACViE,KAAK,WACLC,QAASpD,GAHX,qBAQA,kBAACkD,EAAA,EAAD,CACEhE,UAAU,gBACVmE,UAAW1D,EACXwD,KAAK,WACLC,QAAS,0BAAA/B,EAAAC,OAAA,kEAAAD,EAAA,MACcwB,KADd,QACDS,EADC,QAEK1B,OACV7B,EAAqBuD,GAHhB,uCAJX,qBAcD/D,EACC,kBAAC2D,EAAA,EAAD,CACEhE,UAAU,gBACViE,KAAK,WACLC,QAASlD,GAHX,kBAQA,kBAACgD,EAAA,EAAD,CACEhE,UAAU,gBACVmE,UAAW1D,EACXwD,KAAK,WACLC,QAAS,0BAAA/B,EAAAC,OAAA,kEAAAD,EAAA,MACc2B,KADd,QACDM,EADC,QAEK1B,OACV3B,EAAiBqD,GAHZ,uCAJX,mBAcD7D,EACC,kBAACyD,EAAA,EAAD,CACEhE,UAAU,gBACViE,KAAK,WACLC,QAAStD,GAHX,gBAQA,kBAACyD,EAAA,EAAD,CACEC,OAAQ,CAAC,YAAa,aAAc,aAAc,cAClDC,WAAW,WACXvE,UAAU,gBACVmE,UAAW1D,EACX+D,qBAAmB,EACnBC,UAAU,cACVC,SAAU,SAAMC,GAAN,iBAAAxC,EAAAC,OAAA,uDACFwC,EAAeD,EAAIE,cAAcC,MAAM,GADrC,WAAA3C,EAAA,MAEa4B,EAAqBa,IAFlC,QAEFR,EAFE,QAGI1B,OACV/B,EAAyByD,GAJnB,0CA6BpBhE,EAAgBD,aAAe,CAC7BE,aAAa,EACbC,iBAAiB,EACjBC,iBAAiB,EACjBC,YAAa,GACbC,UAAW,KACXC,QAAS,aACTC,yBAA0B,aAC1BC,wBAAyB,aACzBC,qBAAsB,aACtBC,oBAAqB,aACrBC,iBAAkB,aAClBC,gBAAiB,aACjBC,kBAAkB,GAGLb,IC7MAA,ED6MAA,EElMF2E,EAAmB,SAAC,GAY1B,IAXL1E,EAWI,EAXJA,YACAC,EAUI,EAVJA,gBACAC,EASI,EATJA,gBACAG,EAQI,EARJA,QACAsE,EAOI,EAPJA,iBACArE,EAMI,EANJA,yBACAC,EAKI,EALJA,wBACAC,EAII,EAJJA,qBACAC,EAGI,EAHJA,oBACAC,EAEI,EAFJA,iBACAC,EACI,EADJA,gBAEMiE,EAAkBrD,EAAO3B,KAAI,SAAA6B,GAAK,MAAK,CAC3CoD,GAAIpD,EAAMC,KACVoD,MAAOrD,EAAMlC,YACbwF,sBAAuBtD,EAAMuD,mBAAmBC,mBAJ9C,EAOyBpE,mBAAS+D,EAAgB,IAPlD,mBAOGnD,EAPH,KAOUyD,EAPV,OAQkCrE,mBAASU,EAAO,GAAGuB,UARrD,mBAQG3C,EARH,KAQgBgF,EARhB,OAS4CtE,oBAAS,GATrD,mBASGD,EATH,KASqBwE,EATrB,KA0BJ,OACE,kBAAC1F,EAAA,EAAD,CAAMC,UAAU,qBACd,wBAAIA,UAAU,mBAAd,SACA,kBAAC0F,EAAA,EAAD,CAAWC,WAAW,kBACpB,kBAACC,EAAA,EAAD,CACEV,GAAG,0BACHC,MAAM,0BACNT,SAtBsB,SAAAmB,GAC5BN,EAAYM,EAASC,cAErB,IAAMC,EAAiBnE,EAAOC,MAC5B,SAAAC,GAAK,OAAIA,EAAMC,OAAS8D,EAASC,aAAaZ,MAC9C/B,SACFqC,EAAeO,GAEX9E,IAAqB4E,EAASC,aAAaV,uBAC7CK,GAAoB,GAGtBT,KAWMgB,MAAOf,EACPa,aAAchE,GAASA,EAAMqD,MAC7Bc,YAAY,0BACZC,UAAU,8BACVC,OAAK,KAGT,kBAACT,EAAA,EAAD,CAAWC,WAAW,oBACpB,kBAACS,EAAA,EAAD,CACElB,GAAG,uBACHT,UAAU,gCACV4B,YAAY,iCACZC,WAAS,EACTC,YAAY,4BACZC,MAAOhG,EACPkE,SAAU,SAAAC,GACRa,EAAeb,EAAI8B,OAAOD,QAE5BL,OAAK,KAGT,kBAACT,EAAA,EAAD,CAAWC,WAAW,+DACpB,kBAACe,EAAA,EAAD,CACExB,GAAG,uBACHyB,aAAW,uBACXxC,UAAWrC,IAAUA,EAAMsD,sBAC3BwB,QAAS3F,EACT4F,SAAU,WACRpB,GAAqBxE,OAI3B,kBAAC,EAAD,CACEZ,YAAaA,EACbC,gBAAiBA,EACjBC,gBAAiBA,EACjBC,YAAaA,EACbC,UAAWqB,GAASA,EAAMoD,GAC1BxE,QAASA,EACTC,yBAA0BA,EAC1BC,wBAAyBA,EACzBC,qBAAsBA,EACtBC,oBAAqBA,EACrBC,iBAAkBA,EAClBC,gBAAiBA,EACjBC,iBAAkBA,MAoB1B8D,EAAiB5E,aAAe,CAC9BE,aAAa,EACbC,iBAAiB,EACjBC,iBAAiB,EACjBG,QAAS,aACTsE,iBAAkB,aAClBrE,yBAA0B,aAC1BC,wBAAyB,aACzBC,qBAAsB,aACtBC,oBAAqB,aACrBC,iBAAkB,aAClBC,gBAAiB,cAGJ+D,ICxIAA,EDwIAA,E,gDEjIF+B,EAAb,YACE,WAAYC,GAAQ,IAAD,8BACjB,4CAAMA,KAEDC,mBAAqBC,IAAMC,YAChC,EAAKC,mBAAqB,KAC1B,EAAKC,iBAAmB,KAExB,EAAKC,KAAO,EAAKA,KAAKC,KAAV,gBAEZ,EAAKC,MAAQ,CACXC,kBAAmB,EACnBC,eAAgB,IAAIC,WAAW,OAXhB,EADrB,iFAiBIC,KAAKR,mBAAqBQ,KAAKX,mBAAmBY,QAAQC,WAAW,MACrEF,KAAKR,mBAAmBW,QAAU,QAClCH,KAAKI,+BAnBT,yCAsBqBC,GAAW,YACHA,EAAUC,KADP,oBAEAN,KAAKZ,MAAMkB,KAFX,OAK1BN,KAAKO,wBAIwB,IAA7BF,EAAUG,iBACoB,IAA9BR,KAAKZ,MAAMoB,gBAEXR,KAAKS,uBACLT,KAAKN,SAEwB,IAA7BW,EAAUG,iBACoB,IAA9BR,KAAKZ,MAAMoB,gBAEXR,KAAKU,gBAxCX,6CA6CIV,KAAKW,SAAS,CAAEd,kBAAmBe,KAAKC,UA7C5C,mDAiDI,IAAMC,EAAYd,KAAKX,mBAAmBY,QACpCc,EAAqBf,KAAKZ,MAAM4B,sBAAsBf,QAE5Da,EAAUG,MAAQF,EAAmBG,YACrCJ,EAAUK,OAAS,MArDvB,6CAyDInB,KAAKoB,yBACLpB,KAAKqB,wBA1DT,4CA6DyB,IAAD,OACHrB,KAAKZ,MAAdkB,KACHgB,SAAQ,SAACC,EAAWC,GACvB,EAAKhC,mBAAmBiC,YAExB,EAAKjC,mBAAmBkC,UAAY,wBACpC,EAAKlC,mBAAmBmC,SAtEJ,EAuElBH,EAtEoB,GAHH,EACC,GA2EED,GAEtB,EAAK/B,mBAAmBmC,SA5EJ,EA6ElBH,EA5EoB,GAHH,GACC,GAiFGD,GAEvB,EAAK/B,mBAAmBoC,SACxB,EAAKpC,mBAAmBqC,iBAhF9B,iDAoF8B,IAAD,OACzB7B,KAAKZ,MAAM0C,cAAcC,qBAAqB/B,KAAKJ,MAAME,gBADhC,IAGjBA,EAAmBE,KAAKJ,MAAxBE,eACU,GAAGkC,MAAMC,KAAKnC,GACHxH,KAAI,SAAA4J,GAAC,OAAIA,EAAI,OAE/BZ,SAAQ,SAACC,EAAWC,GAC7B,EAAKhC,mBAAmBiC,YAExB,EAAKjC,mBAAmBkC,UAAY,sBACpC,EAAKlC,mBAAmBmC,SAlGJ,EAmGlBH,EAlGoB,GAHH,EACC,GAuGED,GAEtB,EAAK/B,mBAAmBmC,SAxGJ,EAyGlBH,EAxGoB,GAHH,GACC,GA6GGD,GAEvB,EAAK/B,mBAAmBoC,SACxB,EAAKpC,mBAAmBqC,iBA5G9B,4CAgHyB,IAAD,SACO7B,KAAKZ,MAAxBkB,EADY,EACZA,KAAM6B,EADM,EACNA,SAMRC,GAHMxB,KAAKC,MACab,KAAKJ,MAA3BC,mBAEmCsC,EAIrCE,EADqB/B,EAAK1G,OACYwI,EAEtCE,EAAsBC,KAAKC,MAAMH,GACnCI,EAAsBJ,EAAiBC,EAEvCG,EAAsB,IACxBA,EAAsB,GAGpBA,EAAsB,KACxBA,EAAsB,IAGxBnC,EAAKgB,SAAQ,SAACC,EAAWC,GACvB,EAAKhC,mBAAmBiC,YAExB,IAAIiB,EAAY,sBACZlB,EAAIc,IACNI,EAAY,yBAGVlB,EAAI,IAAMc,IACZI,EAAS,2BAAuBD,EAAoBE,QAAQ,GAAnD,MAGX,EAAKnD,mBAAmBkC,UAAYgB,EACpC,EAAKlD,mBAAmBmC,SAxJJ,EAyJlBH,EAxJoB,GAHH,EACC,GA6JED,GAEtB,EAAK/B,mBAAmBmC,SA9JJ,EA+JlBH,EA9JoB,GAHH,GACC,GAmKGD,GAEvB,EAAK/B,mBAAmBoC,SACxB,EAAKpC,mBAAmBqC,iBAlK9B,6BAuKI7B,KAAKoB,yBAEDpB,KAAKZ,MAAMwD,aAA0C,eAA3B5C,KAAKZ,MAAMwD,YACvC5C,KAAK6C,2BAEL7C,KAAK8C,sBAGP9C,KAAKP,iBAAmBsD,sBAAsB/C,KAAKN,QA/KvD,+CAmLI,IAAMsD,EAAkBhD,KAAKX,mBAAmBY,QAChDD,KAAKR,mBAAmByD,UACtB,EACA,EACAD,EAAgB/B,MAChB+B,EAAgB7B,UAxLtB,oCA6LQnB,KAAKP,kBACPyD,qBAAqBlD,KAAKP,oBA9LhC,+BAmMI,OACE,yBAAK0D,IAAKnD,KAAKZ,MAAM4B,sBAAuB3I,UAAU,aACpD,4BAAQ8K,IAAKnD,KAAKX,0BArM1B,GAA+BC,IAAM8D,WAoNrCjE,EAAU3G,aAAe,CACvBoK,YAAa,KACbd,cAAe,MAGF3C,IChOAA,EDgOAA,E,SEhOAkE,ECEe,SAAC,GAAD,IAAGC,EAAH,EAAGA,WAAYC,EAAf,EAAeA,UAAWC,EAA1B,EAA0BA,QAA1B,OAC5B,yBAAKnL,UAAU,mBACb,0CAAgBiL,GAChB,2BACGC,EADH,OACkBC,EADlB,O,QCAEC,EAA8B,SAACC,EAAMC,EAAaC,GACtD,IACIC,EAAU,GAEd,GAAIF,EAAY/J,OAAS,EAAG,CAC1B,IAAMkK,ECXqB,SAAAH,GAC7B,IAAII,EAAc,GAClBJ,EAAYrC,SAAQ,SAAA0C,GAClBD,EAAW,sBAAOA,GAAP,YAAuBE,OAAOC,KAAKF,QAEhD,IAMMG,EANaJ,EAAYzL,KAAI,SAAC8L,EAAMC,GACxC,OAAIA,IAAUN,EAAYnK,OAAS,EAC3B,GAAN,OAAUwK,EAAV,KAEKA,KAE0BE,QAAO,SAACC,EAAKC,GAAN,OAAcD,EAAMC,IAAK,IAEnE,OADc,IAAIC,OAAJ,WAAeN,EAAf,UAAwC,MDDtCO,CAAgBf,GAC9BE,EAAUH,EAAK7J,MAAMiK,GAIvB,GAAuB,IAAnBD,EAAQjK,OACV,MAAO,CACL,CACE8J,OACAiB,KAAM,WAKZ,IAAMC,EAAiB,GAwCvB,OAvCqBf,EAAQvL,KAAI,SAACuM,EAAkBR,GAElD,IAAMS,EAAmBD,EAAiB1J,cAE1C,GAAIkJ,EAAQ,IAAM,EAChB,MAAO,CACLX,KAAMmB,EACNF,KAAM,UAMV,IAAMI,EACJpB,EAAYC,IAAeD,EAAYC,GAAYkB,GACjDE,EAAwB,EACxBJ,EAAeE,IACjBE,EAAwBJ,EAAeE,GACvCF,EAAeE,IAAqB,GAEpCF,EAAeE,GAAoB,EAErC,IAAMG,EACJF,GAAoBA,EAAiBC,GAGvC,OAAKC,EAIE,CACLvB,KAAMmB,EACNF,KAAM,UACNpB,UAAW0B,EAAiBC,WAC5B1B,QAASyB,EAAiBE,SAC1B7B,WAAY2B,EAAiB3B,YARtB,OAeA8B,GAAgB,SAAC,GAAsC,IAApCzB,EAAmC,EAAnCA,YAAa0B,EAAsB,EAAtBA,gBAC3C,OACE,yBAAKhN,UAAU,kBACZgN,EAAgB/M,KAAI,SAACgN,EAAgBC,GAAkB,IAC9CC,EAAkBF,EAAlBE,QAAS9B,EAAS4B,EAAT5B,KACX+B,EAAqBhC,EACzBC,EACAC,EACA4B,GAGF,OACE,yBAAKG,IAAG,qBAAgBH,IACT,OAAZC,GACC,0BAAMnN,UAAS,yBAAoBmN,IAAnC,kBACcA,EADd,OAIDC,EAAmBnN,KAAI,SAACqN,EAASC,GAChC,OAAKD,EAIgB,WAAjBA,EAAQhB,KAER,0BACEe,IAAG,0BAAqBH,EAArB,YAAqCK,IAD1C,UAEKD,EAAQjC,OAEW,YAAjBiC,EAAQhB,KAEf,kBAACkB,EAAA,EAAD,CACEC,MAAM,SACNC,UAAU,MACVL,IAAG,6BAAwBH,EAAxB,YAAwCK,GAC3CI,YACE,kBAAC,EAAD,CACE1C,WAAYqC,EAAQrC,WACpBC,UAAWoC,EAAQpC,UACnBC,QAASmC,EAAQnC,UAGrByC,iBAAiB,wBAEhBN,EAAQjC,MAKR,KA7BE,cA2CvB0B,GAAc5M,aAAe,CAC3BmL,YAAa,GACb0B,gBAAiB,IAGJD,IEvIAA,GFuIAA,GGlIFc,GAAkB,SAAC,GAAD,IAC7BpE,EAD6B,EAC7BA,cACAqE,EAF6B,EAE7BA,eACAC,EAH6B,EAG7BA,cACAxD,EAJ6B,EAI7BA,YACA5B,EAL6B,EAK7BA,sBACAR,EAN6B,EAM7BA,eACAmD,EAP6B,EAO7BA,YACA0B,EAR6B,EAQ7BA,gBAR6B,OAU7B,kBAACjN,EAAA,EAAD,CAAMC,UAAU,oBACd,wBAAIA,UAAU,mBAAd,UACA,kBAAC0F,EAAA,EAAD,CAAWC,WAAW,SACpB,kBAAC,EAAD,CACEgD,sBAAuBA,EACvBV,KAAM6F,EACNhE,SAAUiE,EACV5F,eAAgBA,EAChBoC,YAAaA,EACbd,cAAeA,KAGnB,kBAAC/D,EAAA,EAAD,CAAWC,WAAW,cACpB,kBAAC,GAAD,CACE2F,YAAaA,EACb0B,gBAAiBA,OAiBzBa,GAAgB1N,aAAe,CAC7B2N,eAAgB,GAChBC,cAAe,EACfxD,YAAa,GACbpC,gBAAgB,EAChBmD,YAAa,GACb0B,gBAAiB,IAGJa,ICxDAA,GDwDAA,G,UEnDFG,GAAQ,SAAC,GAcf,IAbLC,EAaI,EAbJA,QACAC,EAYI,EAZJA,SACAlO,EAWI,EAXJA,UACAmO,EAUI,EAVJA,sBACAC,EASI,EATJA,gBACAC,EAQI,EARJA,gBACApK,EAOI,EAPJA,KACAqK,EAMI,EANJA,YACAC,EAKI,EALJA,mBACAC,EAII,EAJJA,KACAC,EAGI,EAHJA,SACAC,EAEI,EAFJA,QACA5O,EACI,EADJA,MACI,EACgBoB,qBADhB,mBACGgE,EADH,KACOyJ,EADP,OAE8BzN,oBAAS,GAFvC,mBAEG0N,EAFH,KAEcC,EAFd,KAiCJ,OA7BAxN,qBAAU,WACRsN,EACEzE,KAAK4E,SACFC,SAAS,IACTC,UAAU,EAAG,IACd9E,KAAK4E,SACFC,SAAS,IACTC,UAAU,EAAG,OAEnB,IAEH3N,qBAAU,WACR,IAAMiM,EAAU2B,SAASC,cAAT,wBAAwChK,IACpDoI,IACFA,EAAQtN,WAAa,WAEtB,CAACkF,IAEJ7D,qBAAU,WAEN8M,QACkBnL,WAAXmM,aACwBnM,WAAxBmM,OAAOC,cAC8C,SAA5DD,OAAOC,aAAaC,QA3CS,4BA6C7BR,GAAa,KAEd,CAACV,IAEGS,EAAY,KACjB,kBAAC,KAAD,CACEX,QAASA,EACTjO,UAAS,uBAAkBkF,EAAlB,YAAwBlF,GACjCoO,gBAAiBA,EACjBC,gBAAiBA,EACjBpK,KAAMA,EACNqK,YAAaA,EACbC,mBAAoB,WAEhBJ,QACkBnL,WAAXmM,aACwBnM,WAAxBmM,OAAOC,cAEdD,OAAOC,aAAaE,QA/DK,0BA+D+B,QAE1Df,KAEFC,KAAMA,EACNC,SAAUA,EACVC,QAASA,EACT5O,MAAOA,GAENoO,IAqBPF,GAAM7N,aAAe,CACnB8N,QAAS,GACTC,SAAU,KACVlO,UAAW,GACXmO,uBAAuB,EACvBC,iBAAiB,EACjBC,gBAAiB,sBACjBpK,KAAM,QACNqK,aAAa,EACbC,mBAAoB,aACpBC,KAAM,QACNC,SAAU,GACVC,QAAS,EACT5O,MAAO,IAGMkO,IC/GAA,GD+GAA,GEjHFuB,GACO,qBADPA,GAEM,oBAFNA,GAGK,mBAHLA,GAIK,mBAJLA,GAKgB,+BALhBA,GAMD,YANCA,GAOO,qBAPPA,GAQK,mBARLA,GASS,wBATTA,GAUQ,sBAVRA,GAWS,wBAXTA,GAYI,iBAGJC,GAAe,CAC1B/F,cAAe,GACfgG,aAAc,KACd3B,eAAgB,GAChB4B,kBAAmB,EACnBnF,YAAa,GACboF,YAAa,KACbjN,MAAO,KACPrC,aAAa,EACbC,iBAAiB,EACjB6H,gBAAgB,EAChB5H,iBAAiB,EACjB+K,YAAa,GACbsE,cAAe,GACfC,WAAY,IAGDC,GAAU,SAACvI,EAAOwI,GAC7B,OAAQA,EAAOzD,MACb,IAAK,qBACH,OAAO,eACF/E,EADL,CAEEkC,cAAesG,EAAOtG,gBAG1B,IAAK,oBACH,OAAO,eACFlC,EADL,CAEEkI,aAAcM,EAAON,eAGzB,IAAK,mBACH,OAAO,eACFlI,EADL,CAEEgD,YAAawF,EAAOxF,cAGxB,IAAK,mBACH,OAAO,eACFhD,EADL,CAEEoI,YAAaI,EAAOJ,cAGxB,IAAK,+BACH,OAAO,eACFpI,EADL,CAEEuG,eAAgBiC,EAAOjC,eACvB4B,kBAAmBK,EAAOL,oBAG9B,IAAK,YACH,OAAO,eACFnI,EADL,CAEE7E,MAAOqN,EAAOrN,QAGlB,IAAK,mBACH,OAAO,eACF6E,EADL,CAEElH,YAAa0P,EAAO1P,cAGxB,IAAK,wBACH,OAAO,eACFkH,EADL,CAEEjH,gBAAiByP,EAAOzP,kBAG5B,IAAK,sBACH,OAAO,eACFiH,EADL,CAEEY,eAAgB4H,EAAO5H,iBAG3B,IAAK,wBACH,OAAO,eACFZ,EADL,CAEEhH,gBAAiBwP,EAAOxP,kBAG5B,IAAK,qBACH,OAAO,eACFgH,EADL,CAEEqI,cAAeG,EAAOH,gBAG1B,IAAK,iBACH,IAAII,EAAiB,YAAOzI,EAAMsI,YAOlC,OAN2B,IAAvBE,EAAOE,YACTD,EAAoBD,EAAOF,WAE3BG,EAAkBD,EAAOE,aAAeF,EAAOF,WAAW,GAGrD,eACFtI,EADL,CAEE+D,YAAayE,EAAOzE,YACpBuE,WAAYG,IAGhB,QACE,MAAM,IAAIE,QCpHVC,GAGe,EAKfC,GAAwB,SAAAC,GAC5B,IAAMC,EAAa,IAAIC,WAEvB,OAAO,IAAIC,SAAQ,SAACC,EAASC,GAC3BJ,EAAWK,OAAS,WAClB,IAAMC,EAAcN,EAAWO,OAC/BJ,EAAQG,IAGVN,EAAWQ,QAAU,WACnBR,EAAWS,QACXL,EAAO,IAAIR,MAAM,4BAInBI,EAAWU,kBAAkBX,OAsDpBY,GAAsC,SACjDC,EACAC,EACAC,GAHiD,iBAAAjP,EAAAC,OAAA,kEAAAD,EAAA,MAKlBiO,GAAsBc,IALJ,cAK3CG,EAL2C,OAM3CC,EAAkB,IAAI5J,WAAW2J,EAAiB1H,MAAM,IANb,kBAa1C,IAAI6G,SAAQ,SAACC,EAASC,GAC3BS,EAASI,gBACPF,GACA,SAAAG,GAUE,IAVkB,IACV1H,EAAa0H,EAAb1H,SAGF2H,EACJL,EAA8C,EAFlBjB,GAGxBuB,EAAiBxH,KAAKC,MAAMsH,EAAsB,GAClDE,EAAYL,EAAgB/P,OAASmQ,EAErCE,EAAwB,GACrBzI,EAAI,EAAGA,EAAIuI,EAAgBvI,GAAK,EAAG,CAC1C,IAAI0I,EAAgB1I,EAAI,EACpB0I,EAAgB,IAClBA,EAAgB,GAGlBD,EAAsBE,KACpBR,EAAgB3H,MAAMkI,EAAgBF,EAAWxI,EAAIwI,IAIzD,IAAMI,EAAoBH,EAAsB3R,KAAI,SAAA+R,GAKlD,OAJmBA,EAAM/F,QACvB,SAACgG,EAAWC,GAAZ,OAA6BD,EAAYC,MAEE,IAAZP,MAInClB,EAAQ,CACN3G,WACAiI,yBAGJ,WACErB,EAAO,IAAIR,MAAM,iCAnD0B,sCC7EtCiC,GAAc,SAACrS,EAAOF,GACjC,MAAO,CACLE,QACAF,gBCFWwS,GCoBiB,WAAO,IAAD,EACVC,qBAAWvC,GAASN,IADV,mBAC7BjI,EAD6B,KACtB+K,EADsB,KAE9B3J,EAAwB4J,iBAAO,MAErClR,qBAAU,WACR,IAAMoO,EAAe,IAAKN,OAAOqD,cAC/BrD,OAAOsD,oBACHhJ,EAAgBgG,EAAaiD,iBAEnCJ,EAAS,CACP7I,gBACA6C,KAAMiD,KAER+C,EAAS,CACP7C,eACAnD,KAAMiD,OAEP,IAEH,IAAMoD,EAAe,SAAA1K,GACnB,GAAIA,EAAK3C,eACPgN,EAAS,CACP1C,cAAe3H,EAAK3C,eACpBgH,KAAMiD,SAEH,CAAC,IAAD,EHnBqB,SAAAtH,GAAS,IAC/B2K,EAAuC3K,EAAvC2K,QAAuB3C,EAAgBhI,EAA9B4K,aAEbC,EAAgB,GACdC,EAAkB,GACpBC,GAAU,EAqCd,OAnCAJ,EAAQ3J,SAAQ,SAAA4H,GAAW,IACjBoC,EAAUpC,EAAVoC,MACJC,EAAe,KACf/F,EAAU,KACVgG,EAAkB,KAElBF,GACCC,EAA2CrC,EAA3CqC,aAAc/F,EAA6B0D,EAA7B1D,QAASgG,EAAoBtC,EAApBsC,kBAEvBD,EAA0BrC,EAA1BqC,aAAc/F,EAAY0D,EAAZ1D,SATK,MAaE+F,EAClBrD,EAdgB,oBAchBA,gBAEQ7M,IAAZmK,IACFA,EAAU,MAIZ4F,EAAgBjB,KAAK,CACnBmB,QACA9F,UACA9B,KAAMwE,IAGRmD,EAAUC,EAGNE,GACFL,EAAchB,KAAKqB,MAIhB,CACLtD,WAAYkD,EACZzH,YAAawH,EACb7C,cACAgD,MAAOD,GG1B4CI,CAAiBnL,GAA1D4H,EADH,EACGA,WAAYvE,EADf,EACeA,YAAa2E,EAD5B,EAC4BA,YAEjCqC,EAAS,CACPhH,cACA2E,cACAJ,aACAvD,KAAMiD,OAKN8D,EAAkB,WAClB9L,EAAMoI,aACRpI,EAAMoI,YAAY2D,OAGpBhB,EAAS,CACPnK,gBAAgB,EAChBmE,KAAMiD,KAER+C,EAAS,CACP/R,iBAAiB,EACjB+L,KAAMiD,KAER+C,EAAS,CACPhS,iBAAiB,EACjBgM,KAAMiD,KAER+C,EAAS,CACPjS,aAAa,EACbiM,KAAMiD,MAIJgE,EAAgC,SAAMtR,GAAN,6BAAAE,EAAAC,OAAA,mDAChCoR,EAAuB,KAEzB7K,GACAA,EAAsBf,SACtBe,EAAsBf,QAAQiB,cAE9B2K,EAAuB7K,EAAsBf,QAAQiB,aAEjD4K,EAA0BD,GAAwB,IAElDE,EAAazR,aAAoB0R,KAXH,SAa9BzC,EAAY,MAEZwC,EAf8B,iBAgBhCxC,EAAYjP,EAhBoB,2CAAAE,EAAA,MAkBLE,MAAMJ,IAlBD,eAkB1B2R,EAlB0B,mBAAAzR,EAAA,MAmBdyR,EAAaC,QAnBC,QAmBhC3C,EAnBgC,kCAAA/O,EAAA,MAwBxB8O,GACRC,EACA3J,EAAMkI,aACNgE,IA3BgC,iBAsBhC1B,EAtBgC,EAsBhCA,kBACAjI,EAvBgC,EAuBhCA,SAOFwI,EAAS,CACPxE,eAAgBiE,EAChBrC,kBAA8B,IAAX5F,EACnBwC,KAAMiD,KAjC0B,kDAoClC+C,EAAS,CACP5P,MAAOyP,GA5GiB,oBAE9B,gDA8GM7F,KAAMiD,KAzC0B,0DA8ChCuE,EAA6B,SAAMC,GAAN,mBAAA5R,EAAAC,OAAA,uDAC7B4R,EAAc,KADe,oBAAA7R,EAAA,MAGX8R,UAAUC,aAAaC,aAAa,CACtDC,OAAO,EACPC,OAAO,KALsB,OAG/BL,EAH+B,uDAQ/B1B,EAAS,CACP5P,MAAOyP,GA3Ha,yBACM,sCA2H1B7F,KAAMiD,KAVuB,eAc3B+E,EAA4BC,IAAoB,eACjDR,EADgD,CAEnDC,cACAQ,SAAS,KAGPR,KACIS,EAAuBlN,EAAMkC,eACdiL,QAAU,KAC/BpC,EAAS,CACP7I,cAAegL,EACfnI,KAAMiD,KAEkBhI,EAAMkI,aAAakF,wBAC3CX,GAEgBY,QAAQrN,EAAMkC,gBA9BD,kBAiC1B6K,GAjC0B,yDAoC7BO,EAAW,SAAAC,GACfA,EACGC,GAAG,QAAQ,SAAA9M,GACV0K,EAAa1K,MAEd8M,GAAG,OAAO,WACT1B,OAED0B,GAAG,SAAS,WACXzC,EAAS,CACP5P,MAAOyP,GA9JuB,4BAEtC,uEAgKQ7F,KAAMiD,KAGR8D,OAGJf,EAAS,CACPnK,gBAAgB,EAChBmE,KAAMiD,MAIJyF,EAA8B,WAC9BzN,EAAMoI,cACRpI,EAAMoI,YAAY2D,OAClB/L,EAAMoI,YAAYsF,qBAClB1N,EAAMoI,YAAYuF,gBAAgBD,sBAGhC1N,EAAMkI,cAA6C,cAA7BlI,EAAMkI,aAAalI,OAC3CA,EAAMkI,aAAa0F,UAuIvB,OACE,yBAAKnV,UAAU,qBACb,kBAAC,GAAD,CAAOiE,KAAK,OAAOwK,SA3TvB,yRA4TKlH,EAAM7E,OACL,kBAAC,GAAD,CACEuB,KAAK,QACLnE,MAAOyH,EAAM7E,MAAM5C,MACnB2O,SAAUlH,EAAM7E,MAAM9C,YACtBuO,uBAAuB,EACvBO,QAAS,IACTH,mBAAoB,kBAClB+D,EAAS,CAAE5P,MAAO,KAAM4J,KAAMiD,QAIpC,kBAAC,EAAD,CACElP,YAAakH,EAAMlH,YACnBC,gBAAiBiH,EAAMjH,gBACvBC,gBAAiBgH,EAAMhH,gBACvBG,QA1BU,SAAAgC,GACd4P,EAAS,CACP5P,QACA4J,KAAMiD,MAwBJvK,iBAvJmB,WACvBsN,EAAS,CACPxE,eAAgB,GAChB4B,kBAAmB,EACnBpD,KAAMiD,KAER+C,EAAS,CACPhH,YAAa,GACb2E,YAAa,EACbJ,WAAY,GACZvD,KAAMiD,MA8IJ5O,yBA1I2B,SAAMyU,GAAN,eAAAjT,EAAAC,OAAA,uDAC/B4S,IAEMF,EAASO,IAAcD,GAHE,WAAAjT,EAAA,MAIzBoR,EAA8B6B,EAAgBvR,OAJrB,OAK/ByO,EAAS,CACP/R,iBAAiB,EACjB+L,KAAMiD,KAER+C,EAAS,CACPhS,iBAAiB,EACjBgM,KAAMiD,KAER+C,EAAS,CACPjS,aAAa,EACbiM,KAAMiD,KAER+C,EAAS,CACP/H,YAAa,SACb+B,KAAMiD,KAER+C,EAAS,CACP3C,YAAamF,EACbxI,KAAMiD,KAGRsF,EAASC,GA1BsB,uCA2I3BlU,wBA9G0B,WAC9ByS,IACAf,EAAS,CACP/R,iBAAiB,EACjB+L,KAAMiD,MA2GJ1O,qBAvGuB,SAAMuU,GAAN,eAAAjT,EAAAC,OAAA,uDAC3B4S,IAEMF,EAASO,IAAcD,GAHF,WAAAjT,EAAA,MAIrBoR,EAA8B6B,EAAgBvR,OAJzB,OAK3ByO,EAAS,CACPhS,iBAAiB,EACjBgM,KAAMiD,KAER+C,EAAS,CACP/R,iBAAiB,EACjB+L,KAAMiD,KAER+C,EAAS,CACPjS,aAAa,EACbiM,KAAMiD,KAER+C,EAAS,CACP/H,YAAa,SACb+B,KAAMiD,KAER+C,EAAS,CACP3C,YAAamF,EACbxI,KAAMiD,KAGRsF,EAASC,GA1BkB,uCAwGvBhU,oBA3EsB,WAC1BuS,IACAf,EAAS,CACPhS,iBAAiB,EACjBgM,KAAMiD,MAwEJxO,iBApEmB,SAAMqU,GAAN,eAAAjT,EAAAC,OAAA,uDACvB4S,IADuB,WAAA7S,EAAA,MAGF2R,EAA2BsB,IAHzB,OAGjBN,EAHiB,OAIvBxC,EAAS,CACPjS,aAAa,EACbiM,KAAMiD,KAER+C,EAAS,CACPhS,iBAAiB,EACjBgM,KAAMiD,KAER+C,EAAS,CACP/R,iBAAiB,EACjB+L,KAAMiD,KAER+C,EAAS,CACP/H,YAAa,aACb+B,KAAMiD,KAER+C,EAAS,CACP3C,YAAamF,EACbxI,KAAMiD,KAGRsF,EAASC,GAzBc,uCAqEnB9T,gBAzCkB,WACtBqS,IACAf,EAAS,CACPjS,aAAa,EACbiM,KAAMiD,QAuCN,kBAAC,GAAD,CACE9F,cAAelC,EAAMkC,cACrBqE,eAAgBvG,EAAMuG,eACtBC,cAAexG,EAAMmI,kBACrBnF,YAAahD,EAAMgD,YACnB5B,sBAAuBA,EACvBR,eAAgBZ,EAAMY,eACtBmD,YAAa/D,EAAM+D,YACnB0B,gBAAiBzF,EAAMsI,eCnWzByF,GAAe,CACnB,kBAACC,EAAA,EAAD,CACEvV,UAAU,OACVqN,IAAI,WACJ5G,OAAO,SACP+O,IAAI,sBACJC,KAAK,gDAEL,uBAAGzV,UAAU,aAAb,iBACA,kBAAC,IAAD,CAAQA,UAAU,eAEpB,kBAACuV,EAAA,EAAD,CACEvV,UAAU,OACVqN,IAAI,YACJ5G,OAAO,SACP+O,IAAI,sBACJC,KAAK,0FAEL,uBAAGzV,UAAU,aAAb,iBACA,kBAAC,IAAD,CAAYA,UAAU,eAExB,kBAACuV,EAAA,EAAD,CACEvV,UAAU,OACVqN,IAAI,cACJ5G,OAAO,SACP+O,IAAI,sBACJC,KAAK,yEAEL,uBAAGzV,UAAU,aAAb,UACA,kBAAC,IAAD,CAAcA,UAAU,eAE1B,kBAACuV,EAAA,EAAD,CACEvV,UAAU,uBACVqN,IAAI,iBACJ5G,OAAO,SACP+O,IAAI,sBACJC,KAAK,mrBAEL,kBAACzR,EAAA,EAAD,CAAQhE,UAAU,cAAciE,KAAK,WAAWyR,WAAYC,KAA5D,+BAGA,kBAAC,IAAD,CAAY3V,UAAU,gBAeX4V,GAXI,kBACjB,yBAAK5V,UAAU,iBACb,kBAAC,EAAD,CACEJ,YAjDJ,2FAkDIC,MAAOyV,GACPxV,MArDe,0BAuDjB,kBAAC,GAAD,QCrDgB+V,QACW,cAA7B1G,OAAO2G,SAASC,UAEe,UAA7B5G,OAAO2G,SAASC,UAEhB5G,OAAO2G,SAASC,SAASC,MACvB,2DCZNC,IAASC,OAAO,kBAAC,GAAD,MAASjH,SAASkH,eAAe,SD6H3C,kBAAmBlC,WACrBA,UAAUmC,cAAcC,MAAMC,MAAK,SAAAC,GACjCA,EAAaC,iB","file":"static/js/main.a33677a6.chunk.js","sourcesContent":["import React from 'react';\nimport PropTypes from 'prop-types';\nimport { Tile } from 'carbon-components-react';\n\nexport const Header = ({ description, links, title }) => (\n \n
\n

{title}

\n

{description}

\n
\n
\n
{links.map(link => link)}
\n
\n
\n);\n\nHeader.propTypes = {\n description: PropTypes.string,\n links: PropTypes.arrayOf(PropTypes.object),\n title: PropTypes.string,\n};\n\nHeader.defaultProps = {\n description: '',\n links: [],\n title: '',\n};\n\nexport default Header;\n","import Header from './Header';\nexport default Header;\n","import React, { useEffect, useState } from 'react';\nimport PropTypes from 'prop-types';\nimport { Button, FileUploaderButton } from 'carbon-components-react';\nimport fetch from 'isomorphic-fetch';\nimport models from '../../data/models.json';\n\nexport const SubmitContainer = ({\n isRecording,\n isSamplePlaying,\n isUploadPlaying,\n keywordText,\n modelName,\n onError,\n onStartPlayingFileUpload,\n onStopPlayingFileUpload,\n onStartPlayingSample,\n onStopPlayingSample,\n onStartRecording,\n onStopRecording,\n useSpeakerLabels,\n}) => {\n const [keywordList, setKeywordList] = useState([]);\n useEffect(() => {\n let newKeywordList = [];\n if (keywordText.length > 0) {\n newKeywordList = keywordText.split(',').map(k => k.trim());\n }\n setKeywordList(newKeywordList);\n }, [keywordText]);\n\n const sampleModelInfo = models.find(model => model.name === modelName);\n const sampleModelFilename = sampleModelInfo ? sampleModelInfo.filename : null;\n\n const getBaseAudioConfig = async () => {\n let authResponse;\n let authJson;\n authResponse = await fetch('/api/auth');\n authJson = await authResponse.json();\n if (!authResponse.ok) {\n onError(authJson);\n return {\n error: authJson,\n };\n }\n\n let options = {};\n\n // We'll lowercase these so that we can ignore cases when highlighting keyword\n // occurrences later on.\n const lowerCasedKeywords = keywordList.map(keyword =>\n keyword.toLowerCase(),\n );\n\n options = {\n ...options,\n url: authJson.url || undefined,\n accessToken: authJson.accessToken,\n format: true,\n keywords: keywordList.length > 0 ? lowerCasedKeywords : undefined,\n keywordsThreshold: keywordList.length > 0 ? 0.01 : undefined,\n model: modelName,\n objectMode: true,\n play: true,\n realtime: true,\n resultsBySpeaker: useSpeakerLabels,\n speakerlessInterim: true,\n timestamps: true,\n };\n\n return options;\n };\n\n const getSampleAudioConfig = async () => {\n const baseConfig = await getBaseAudioConfig();\n return {\n ...baseConfig,\n file: `audio/${sampleModelFilename}`,\n };\n };\n\n const getMicrophoneAudioConfig = async () => {\n const baseConfig = await getBaseAudioConfig();\n return {\n ...baseConfig,\n resultsBySpeaker: false,\n };\n };\n\n const getUploadAudioConfig = async file => {\n const baseConfig = await getBaseAudioConfig();\n return {\n ...baseConfig,\n file,\n resultsBySpeaker: false,\n };\n };\n\n return (\n
\n {isSamplePlaying ? (\n \n Stop audio sample\n \n ) : (\n {\n const config = await getSampleAudioConfig();\n if (!config.error) {\n onStartPlayingSample(config);\n }\n }}\n >\n Play audio sample\n \n )}\n {isRecording ? (\n \n Stop recording\n \n ) : (\n {\n const config = await getMicrophoneAudioConfig();\n if (!config.error) {\n onStartRecording(config);\n }\n }}\n >\n Record your own\n \n )}\n {isUploadPlaying ? (\n \n Stop playing\n \n ) : (\n {\n const uploadedFile = evt.currentTarget.files[0];\n const config = await getUploadAudioConfig(uploadedFile);\n if (!config.error) {\n onStartPlayingFileUpload(config);\n }\n }}\n />\n )}\n
\n );\n};\n\nSubmitContainer.propTypes = {\n isRecording: PropTypes.bool,\n isSamplePlaying: PropTypes.bool,\n isUploadPlaying: PropTypes.bool,\n keywordText: PropTypes.string,\n modelName: PropTypes.string,\n onError: PropTypes.func,\n onStartPlayingFileUpload: PropTypes.func,\n onStopPlayingFileUpload: PropTypes.func,\n onStartPlayingSample: PropTypes.func,\n onStopPlayingSample: PropTypes.func,\n onStartRecording: PropTypes.func,\n onStopRecording: PropTypes.func,\n useSpeakerLabels: PropTypes.bool,\n};\n\nSubmitContainer.defaultProps = {\n isRecording: false,\n isSamplePlaying: false,\n isUploadPlaying: false,\n keywordText: '',\n modelName: null,\n onError: () => {},\n onStartPlayingFileUpload: () => {},\n onStopPlayingFileUpload: () => {},\n onStartPlayingSample: () => {},\n onStopPlayingSample: () => {},\n onStartRecording: () => {},\n onStopRecording: () => {},\n useSpeakerLabels: false,\n};\n\nexport default SubmitContainer;\n","import SubmitContainer from './SubmitContainer';\nexport default SubmitContainer;\n","import React, { useState } from 'react';\nimport PropTypes from 'prop-types';\nimport {\n Dropdown,\n FormGroup,\n TextArea,\n Tile,\n ToggleSmall,\n} from 'carbon-components-react';\nimport SubmitContainer from '../SubmitContainer';\nimport models from '../../data/models.json';\n\nexport const ControlContainer = ({\n isRecording,\n isSamplePlaying,\n isUploadPlaying,\n onError,\n onSelectNewModel,\n onStartPlayingFileUpload,\n onStopPlayingFileUpload,\n onStartPlayingSample,\n onStopPlayingSample,\n onStartRecording,\n onStopRecording,\n}) => {\n const dropdownChoices = models.map(model => ({\n id: model.name,\n label: model.description,\n supportsSpeakerLabels: model.supported_features.speaker_labels,\n }));\n\n const [model, selectModel] = useState(dropdownChoices[0]);\n const [keywordText, setKeywordText] = useState(models[0].keywords);\n const [useSpeakerLabels, setUseSpeakerLabels] = useState(false);\n\n const onChangeLanguageModel = newModel => {\n selectModel(newModel.selectedItem);\n\n const newKeywordText = models.find(\n model => model.name === newModel.selectedItem.id,\n ).keywords;\n setKeywordText(newKeywordText);\n\n if (useSpeakerLabels && !newModel.selectedItem.supportsSpeakerLabels) {\n setUseSpeakerLabels(false);\n }\n\n onSelectNewModel();\n };\n\n return (\n \n

Input

\n \n \n \n \n {\n setKeywordText(evt.target.value);\n }}\n light\n />\n \n \n {\n setUseSpeakerLabels(!useSpeakerLabels);\n }}\n />\n \n \n
\n );\n};\n\nControlContainer.propTypes = {\n isRecording: PropTypes.bool,\n isSamplePlaying: PropTypes.bool,\n isUploadPlaying: PropTypes.bool,\n onError: PropTypes.func,\n onSelectNewModel: PropTypes.func,\n onStartPlayingFileUpload: PropTypes.func,\n onStopPlayingFileUpload: PropTypes.func,\n onStartPlayingSample: PropTypes.func,\n onStopPlayingSample: PropTypes.func,\n onStartRecording: PropTypes.func,\n onStopRecording: PropTypes.func,\n};\n\nControlContainer.defaultProps = {\n isRecording: false,\n isSamplePlaying: false,\n isUploadPlaying: false,\n onError: () => {},\n onSelectNewModel: () => {},\n onStartPlayingFileUpload: () => {},\n onStopPlayingFileUpload: () => {},\n onStartPlayingSample: () => {},\n onStopPlayingSample: () => {},\n onStartRecording: () => {},\n onStopRecording: () => {},\n};\n\nexport default ControlContainer;\n","import ControlContainer from './ControlContainer';\nexport default ControlContainer;\n","import React from 'react';\nimport PropTypes from 'prop-types';\n\nconst DATA_POINT_WIDTH = 1;\nconst DATA_POINT_HEIGHT = 50;\nconst DATA_POINT_MARGIN = 2;\nconst DATA_POINT_Y_OFFSET = 50;\n\nexport class AudioWave extends React.Component {\n constructor(props) {\n super(props);\n\n this.audioWaveCanvasRef = React.createRef();\n this.audioWaveCanvasCtx = null;\n this.animationFrameId = null;\n\n this.draw = this.draw.bind(this);\n\n this.state = {\n startingTimestamp: 0,\n microphoneData: new Uint8Array(1024),\n };\n }\n\n componentDidMount() {\n this.audioWaveCanvasCtx = this.audioWaveCanvasRef.current.getContext('2d');\n this.audioWaveCanvasCtx.lineCap = 'round';\n this.initializeCanvasDimensions();\n }\n\n componentDidUpdate(prevProps) {\n const [firstPrevValue] = prevProps.data;\n const [firstCurrentValue] = this.props.data;\n\n if (firstPrevValue !== firstCurrentValue) {\n this.drawInitialAudioWave();\n }\n\n if (\n prevProps.isTranscribing === false &&\n this.props.isTranscribing === true\n ) {\n this.setStartingTimestamp();\n this.draw();\n } else if (\n prevProps.isTranscribing === true &&\n this.props.isTranscribing === false\n ) {\n this.stopDrawing();\n }\n }\n\n setStartingTimestamp() {\n this.setState({ startingTimestamp: Date.now() });\n }\n\n initializeCanvasDimensions() {\n const canvasCtx = this.audioWaveCanvasRef.current;\n const audioWaveContainer = this.props.audioWaveContainerRef.current;\n\n canvasCtx.width = audioWaveContainer.clientWidth;\n canvasCtx.height = 100;\n }\n\n drawInitialAudioWave() {\n this.resetCanvasForNewFrame();\n this.drawEmptyDataPoints();\n }\n\n drawEmptyDataPoints() {\n const { data } = this.props;\n data.forEach((dataPoint, i) => {\n this.audioWaveCanvasCtx.beginPath();\n\n this.audioWaveCanvasCtx.fillStyle = 'rgba(0, 98, 255, 0.5)';\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n -DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.stroke();\n this.audioWaveCanvasCtx.closePath();\n });\n }\n\n drawMicrophoneDataPoints() {\n this.props.audioAnalyzer.getByteFrequencyData(this.state.microphoneData);\n\n const { microphoneData } = this.state;\n const arrayData = [].slice.call(microphoneData);\n const floatArray = arrayData.map(n => n / 255);\n\n floatArray.forEach((dataPoint, i) => {\n this.audioWaveCanvasCtx.beginPath();\n\n this.audioWaveCanvasCtx.fillStyle = 'rgba(0, 98, 255, 1)';\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n -DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.stroke();\n this.audioWaveCanvasCtx.closePath();\n });\n }\n\n drawAudioDataPoints() {\n const { data, duration } = this.props;\n\n // Make time calculations.\n const now = Date.now();\n const { startingTimestamp } = this.state;\n const timeElapsed = now - startingTimestamp;\n const audioProgressPercent = timeElapsed / duration;\n\n // Draw the audio lines.\n const numberOfDataPoints = data.length;\n const highlightIndex = numberOfDataPoints * audioProgressPercent;\n\n const wholeHighlightIndex = Math.floor(highlightIndex);\n let decimalOpacityIndex = highlightIndex - wholeHighlightIndex;\n\n if (decimalOpacityIndex > 1) {\n decimalOpacityIndex = 1;\n }\n\n if (decimalOpacityIndex < 0.5) {\n decimalOpacityIndex = 0.5;\n }\n\n data.forEach((dataPoint, i) => {\n this.audioWaveCanvasCtx.beginPath();\n\n let fillColor = 'rgba(0, 98, 255, 1)';\n if (i > wholeHighlightIndex) {\n fillColor = 'rgba(0, 98, 255, 0.5)';\n }\n\n if (i - 1 === wholeHighlightIndex) {\n fillColor = `rgba(0, 98, 255, ${decimalOpacityIndex.toFixed(2)})`;\n }\n\n this.audioWaveCanvasCtx.fillStyle = fillColor;\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n -DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.stroke();\n this.audioWaveCanvasCtx.closePath();\n });\n }\n\n draw() {\n this.resetCanvasForNewFrame();\n\n if (this.props.audioSource && this.props.audioSource === 'microphone') {\n this.drawMicrophoneDataPoints();\n } else {\n this.drawAudioDataPoints();\n }\n\n this.animationFrameId = requestAnimationFrame(this.draw);\n }\n\n resetCanvasForNewFrame() {\n const audioWaveCanvas = this.audioWaveCanvasRef.current;\n this.audioWaveCanvasCtx.clearRect(\n 0,\n 0,\n audioWaveCanvas.width,\n audioWaveCanvas.height,\n );\n }\n\n stopDrawing() {\n if (this.animationFrameId) {\n cancelAnimationFrame(this.animationFrameId);\n }\n }\n\n render() {\n return (\n
\n \n
\n );\n }\n}\n\nAudioWave.propTypes = {\n data: PropTypes.array.isRequired,\n duration: PropTypes.number.isRequired,\n isTranscribing: PropTypes.bool.isRequired,\n audioWaveContainerRef: PropTypes.object.isRequired,\n audioSource: PropTypes.string,\n audioAnalyzer: PropTypes.object,\n};\n\nAudioWave.defaultProps = {\n audioSource: null,\n audioAnalyzer: null,\n};\n\nexport default AudioWave;\n","import AudioWave from './AudioWave';\nexport default AudioWave;\n","import KeywordTooltip from './KeywordTooltip';\nexport default KeywordTooltip;\n","import React from 'react';\nimport PropTypes from 'prop-types';\n\nexport const KeywordTooltip = ({ confidence, startTime, endTime }) => (\n
\n

Confidence: {confidence}

\n

\n {startTime}s - {endTime}s\n

\n
\n);\n\nKeywordTooltip.propTypes = {\n confidence: PropTypes.number.isRequired,\n startTime: PropTypes.number.isRequired,\n endTime: PropTypes.number.isRequired,\n};\n\nexport default KeywordTooltip;\n","import React from 'react';\nimport PropTypes from 'prop-types';\nimport { TooltipDefinition } from 'carbon-components-react';\nimport KeywordTooltip from '../KeywordTooltip';\nimport { createWordRegex } from './utils';\n\nconst mapTranscriptTextToElements = (text, keywordInfo, totalIndex) => {\n let finalSentenceArray = [];\n let matches = [];\n\n if (keywordInfo.length > 0) {\n const regex = createWordRegex(keywordInfo);\n matches = text.split(regex);\n }\n\n // If we don't have words to find yet, just return the interim text.\n if (matches.length === 0) {\n return [\n {\n text,\n type: 'normal',\n },\n ];\n }\n\n const wordOccurences = {};\n finalSentenceArray = matches.map((sentenceFragment, index) => {\n // Use lowercased version when searching through keyword map.\n const fragmentToSearch = sentenceFragment.toLowerCase();\n\n if (index % 2 === 0) {\n return {\n text: sentenceFragment,\n type: 'normal',\n };\n }\n\n // Find keyword info object to use based on text from sentenceFragment and\n // current index in wordOccurences.\n const keywordInfoMatch =\n keywordInfo[totalIndex] && keywordInfo[totalIndex][fragmentToSearch];\n let keywordOccurenceIndex = 0;\n if (wordOccurences[fragmentToSearch]) {\n keywordOccurenceIndex = wordOccurences[fragmentToSearch];\n wordOccurences[fragmentToSearch] += 1;\n } else {\n wordOccurences[fragmentToSearch] = 1;\n }\n const infoForOccurence =\n keywordInfoMatch && keywordInfoMatch[keywordOccurenceIndex];\n\n // Bail in case we can't get the keyword info for whatever reason.\n if (!infoForOccurence) {\n return {};\n }\n\n return {\n text: sentenceFragment,\n type: 'keyword',\n startTime: infoForOccurence.start_time,\n endTime: infoForOccurence.end_time,\n confidence: infoForOccurence.confidence,\n };\n });\n\n return finalSentenceArray;\n};\n\nexport const TranscriptBox = ({ keywordInfo, transcriptArray }) => {\n return (\n
\n {transcriptArray.map((transcriptItem, overallIndex) => {\n const { speaker, text } = transcriptItem;\n const parsedTextElements = mapTranscriptTextToElements(\n text,\n keywordInfo,\n overallIndex,\n );\n\n return (\n
\n {speaker !== null && (\n \n {`Speaker ${speaker}: `}\n \n )}\n {parsedTextElements.map((element, elementIndex) => {\n if (!element) {\n return null;\n }\n\n if (element.type === 'normal') {\n return (\n {`${element.text}`}\n );\n } else if (element.type === 'keyword') {\n return (\n \n }\n triggerClassName=\"keyword-info-trigger\"\n >\n {element.text}\n \n );\n }\n\n return null;\n })}\n
\n );\n })}\n
\n );\n};\n\nTranscriptBox.propTypes = {\n keywordInfo: PropTypes.arrayOf(PropTypes.object),\n transcriptArray: PropTypes.arrayOf(PropTypes.object),\n};\n\nTranscriptBox.defaultProps = {\n keywordInfo: [],\n transcriptArray: [],\n};\n\nexport default TranscriptBox;\n","export const createWordRegex = keywordInfo => {\n let allKeywords = [];\n keywordInfo.forEach(sectionKeywords => {\n allKeywords = [...allKeywords, ...Object.keys(sectionKeywords)];\n });\n const regexArray = allKeywords.map((word, index) => {\n if (index !== allKeywords.length - 1) {\n return `${word}|`;\n }\n return word;\n });\n const regexWordSearch = regexArray.reduce((arr, str) => arr + str, '');\n const regex = new RegExp(`(${regexWordSearch})(?!')`, 'gi');\n return regex;\n};\n","import TranscriptBox from './TranscriptBox';\nexport default TranscriptBox;\n","import React from 'react';\nimport PropTypes from 'prop-types';\nimport { FormGroup, Tile } from 'carbon-components-react';\nimport AudioWave from '../AudioWave';\nimport TranscriptBox from '../TranscriptBox';\n\nexport const OutputContainer = ({\n audioAnalyzer,\n audioDataArray,\n audioDuration,\n audioSource,\n audioWaveContainerRef,\n isTranscribing,\n keywordInfo,\n transcriptArray,\n}) => (\n \n

Output

\n \n \n \n \n \n \n
\n);\n\nOutputContainer.propTypes = {\n audioAnalyzer: PropTypes.object.isRequired,\n audioDataArray: PropTypes.arrayOf(PropTypes.number),\n audioDuration: PropTypes.number,\n audioSource: PropTypes.string,\n audioWaveContainerRef: PropTypes.object.isRequired,\n isTranscribing: PropTypes.bool,\n keywordInfo: PropTypes.arrayOf(PropTypes.object),\n transcriptArray: PropTypes.arrayOf(PropTypes.object),\n};\n\nOutputContainer.defaultProps = {\n audioDataArray: [],\n audioDuration: 0,\n audioSource: '',\n isTranscribing: false,\n keywordInfo: [],\n transcriptArray: [],\n};\n\nexport default OutputContainer;\n","import OutputContainer from './OutputContainer';\nexport default OutputContainer;\n","import React, { useEffect, useState } from 'react';\nimport PropTypes from 'prop-types';\nimport { ToastNotification } from 'carbon-components-react';\n\nconst NOTIFICATION_HAS_BEEN_SEEN = 'notificationHasBeenSeen';\n\nexport const Toast = ({\n caption,\n children,\n className,\n hideAfterFirstDisplay,\n hideCloseButton,\n iconDescription,\n kind,\n lowContrast,\n onCloseButtonClick,\n role,\n subtitle,\n timeout,\n title,\n}) => {\n const [id, setId] = useState();\n const [hideToast, setHideToast] = useState(false);\n\n useEffect(() => {\n setId(\n Math.random()\n .toString(36)\n .substring(2, 15) +\n Math.random()\n .toString(36)\n .substring(2, 15),\n );\n }, []);\n\n useEffect(() => {\n const element = document.querySelector(`.custom-toast-${id}`);\n if (element) {\n element.className += 'enter';\n }\n }, [id]);\n\n useEffect(() => {\n if (\n hideAfterFirstDisplay &&\n typeof window !== undefined &&\n typeof window.localStorage !== undefined &&\n window.localStorage.getItem(NOTIFICATION_HAS_BEEN_SEEN) === 'true'\n ) {\n setHideToast(true);\n }\n }, [hideAfterFirstDisplay]);\n\n return hideToast ? null : (\n {\n if (\n hideAfterFirstDisplay &&\n typeof window !== undefined &&\n typeof window.localStorage !== undefined\n ) {\n window.localStorage.setItem(NOTIFICATION_HAS_BEEN_SEEN, 'true');\n }\n onCloseButtonClick();\n }}\n role={role}\n subtitle={subtitle}\n timeout={timeout}\n title={title}\n >\n {children}\n \n );\n};\n\nToast.propTypes = {\n caption: PropTypes.string,\n children: PropTypes.node,\n className: PropTypes.string,\n hideAfterFirstDisplay: PropTypes.bool,\n hideCloseButton: PropTypes.bool,\n iconDescription: PropTypes.string,\n kind: PropTypes.string,\n lowContrast: PropTypes.bool,\n onCloseButtonClick: PropTypes.func,\n role: PropTypes.string,\n subtitle: PropTypes.string,\n timeout: PropTypes.number,\n title: PropTypes.string,\n};\n\nToast.defaultProps = {\n caption: '',\n children: null,\n className: '',\n hideAfterFirstDisplay: true,\n hideCloseButton: false,\n iconDescription: 'closes notification',\n kind: 'error',\n lowContrast: false,\n onCloseButtonClick: () => {},\n role: 'alert',\n subtitle: '',\n timeout: 0,\n title: '',\n};\n\nexport default Toast;\n","import Toast from './Toast';\n\nexport default Toast;\n","export const actionTypes = {\n setAudioAnalyzer: 'SET_AUDIO_ANALYZER',\n setAudioContext: 'SET_AUDIO_CONTEXT',\n setAudioSource: 'SET_AUDIO_SOURCE',\n setAudioStream: 'SET_AUDIO_STREAM',\n setAudioVisualizationData: 'SET_AUDIO_VISUALIZATION_DATA',\n setError: 'SET_ERROR',\n setSpeakerLabels: 'SET_SPEAKER_LABELS',\n setIsRecording: 'SET_IS_RECORDING',\n setIsSamplePlaying: 'SET_IS_SAMPLE_PLAYING',\n setIsTranscribing: 'SET_IS_TRANSCRIBING',\n setIsUploadPlaying: 'SET_IS_UPLOAD_PLAYING',\n updateResults: 'UPDATE_RESULTS',\n};\n\nexport const initialState = {\n audioAnalyzer: {},\n audioContext: null,\n audioDataArray: [],\n audioDurationInMs: 0,\n audioSource: '',\n audioStream: null,\n error: null,\n isRecording: false,\n isSamplePlaying: false,\n isTranscribing: false,\n isUploadPlaying: false,\n keywordInfo: [],\n speakerLabels: [],\n transcript: [],\n};\n\nexport const reducer = (state, action) => {\n switch (action.type) {\n case 'SET_AUDIO_ANALYZER': {\n return {\n ...state,\n audioAnalyzer: action.audioAnalyzer,\n };\n }\n case 'SET_AUDIO_CONTEXT': {\n return {\n ...state,\n audioContext: action.audioContext,\n };\n }\n case 'SET_AUDIO_SOURCE': {\n return {\n ...state,\n audioSource: action.audioSource,\n };\n }\n case 'SET_AUDIO_STREAM': {\n return {\n ...state,\n audioStream: action.audioStream,\n };\n }\n case 'SET_AUDIO_VISUALIZATION_DATA': {\n return {\n ...state,\n audioDataArray: action.audioDataArray,\n audioDurationInMs: action.audioDurationInMs,\n };\n }\n case 'SET_ERROR': {\n return {\n ...state,\n error: action.error,\n };\n }\n case 'SET_IS_RECORDING': {\n return {\n ...state,\n isRecording: action.isRecording,\n };\n }\n case 'SET_IS_SAMPLE_PLAYING': {\n return {\n ...state,\n isSamplePlaying: action.isSamplePlaying,\n };\n }\n case 'SET_IS_TRANSCRIBING': {\n return {\n ...state,\n isTranscribing: action.isTranscribing,\n };\n }\n case 'SET_IS_UPLOAD_PLAYING': {\n return {\n ...state,\n isUploadPlaying: action.isUploadPlaying,\n };\n }\n case 'SET_SPEAKER_LABELS': {\n return {\n ...state,\n speakerLabels: action.speakerLabels,\n };\n }\n case 'UPDATE_RESULTS': {\n let updatedTranscript = [...state.transcript];\n if (action.resultIndex === 0) {\n updatedTranscript = action.transcript;\n } else {\n updatedTranscript[action.resultIndex] = action.transcript[0];\n }\n\n return {\n ...state,\n keywordInfo: action.keywordInfo,\n transcript: updatedTranscript,\n };\n }\n default: {\n throw new Error();\n }\n }\n};\n","const AUDIO_VISUALIZATION_DIMENSIONS = {\n DATA_POINT_WIDTH: 1,\n DATA_POINT_HEIGHT: 50,\n DATA_POINT_MARGIN: 2,\n DATA_POINT_X_OFFSET: 25,\n DATA_POINT_Y_OFFSET: 50,\n};\n\nconst readFileToArrayBuffer = fileData => {\n const fileReader = new FileReader();\n\n return new Promise((resolve, reject) => {\n fileReader.onload = () => {\n const arrayBuffer = fileReader.result;\n resolve(arrayBuffer);\n };\n\n fileReader.onerror = () => {\n fileReader.abort();\n reject(new Error('failed to process file'));\n };\n\n // Initiate the conversion.\n fileReader.readAsArrayBuffer(fileData);\n });\n};\n\nexport const formatStreamData = data => {\n const { results, result_index: resultIndex } = data;\n\n let finalKeywords = [];\n const finalTranscript = [];\n let isFinal = false;\n\n results.forEach(result => {\n const { final } = result;\n let alternatives = null;\n let speaker = null;\n let keywords_result = null;\n\n if (final) {\n ({ alternatives, speaker, keywords_result } = result);\n } else {\n ({ alternatives, speaker } = result);\n }\n\n // Extract the main alternative to get keywords.\n const [mainAlternative] = alternatives;\n const { transcript } = mainAlternative;\n\n if (speaker === undefined) {\n speaker = null;\n }\n\n // Push object to final transcript.\n finalTranscript.push({\n final,\n speaker,\n text: transcript,\n });\n\n isFinal = final;\n\n // Push keywords to final keyword list.\n if (keywords_result) {\n finalKeywords.push(keywords_result);\n }\n });\n\n return {\n transcript: finalTranscript,\n keywordInfo: finalKeywords,\n resultIndex,\n final: isFinal,\n };\n};\n\nexport const convertAudioBlobToVisualizationData = async (\n audioBlob,\n audioCtx,\n audioWaveContainerWidth,\n) => {\n const audioArrayBuffer = await readFileToArrayBuffer(audioBlob);\n const audioUint8Array = new Uint8Array(audioArrayBuffer.slice(0));\n\n // NOTE: BaseAudioContext.decodeAudioData has a promise syntax\n // which we are unable to use in order to be compatible with Safari.\n // Therefore, we wrap the callback syntax in a promise to give us the same\n // effect while ensuring compatibility\n // see more: https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/decodeAudioData#Browser_compatibility\n return new Promise((resolve, reject) => {\n audioCtx.decodeAudioData(\n audioArrayBuffer,\n audioDataBuffer => {\n const { duration } = audioDataBuffer;\n\n const { DATA_POINT_MARGIN } = AUDIO_VISUALIZATION_DIMENSIONS;\n const validContainerWidth =\n audioWaveContainerWidth - DATA_POINT_MARGIN * 2;\n const numberOfChunks = Math.floor(validContainerWidth / 2);\n const chunkSize = audioUint8Array.length / numberOfChunks;\n\n const chunkedAudioDataArray = [];\n for (let i = 1; i < numberOfChunks; i += 1) {\n let previousIndex = i - 1;\n if (previousIndex < 0) {\n previousIndex = 0;\n }\n\n chunkedAudioDataArray.push(\n audioUint8Array.slice(previousIndex * chunkSize, i * chunkSize),\n );\n }\n\n const reducedFloatArray = chunkedAudioDataArray.map(chunk => {\n const totalValue = chunk.reduce(\n (prevValue, currentValue) => prevValue + currentValue,\n );\n const floatValue = totalValue / (chunkSize * 255);\n return floatValue;\n });\n\n resolve({\n duration,\n reducedFloatArray,\n });\n },\n () => {\n reject(new Error('failed to chunk audio'));\n },\n );\n });\n};\n","export const createError = (title, description) => {\n return {\n title,\n description,\n };\n};\n","import ServiceContainer from './ServiceContainer';\nexport default ServiceContainer;\n","import React, { useEffect, useReducer, useRef } from 'react';\nimport recognizeFile from 'watson-speech/speech-to-text/recognize-file';\nimport recognizeMicrophone from 'watson-speech/speech-to-text/recognize-microphone';\nimport ControlContainer from '../ControlContainer';\nimport OutputContainer from '../OutputContainer';\nimport Toast from '../Toast';\nimport { actionTypes, initialState, reducer } from './reducer';\nimport { convertAudioBlobToVisualizationData, formatStreamData } from './utils';\nimport { createError } from '../../utils';\n\nconst FILE_UPLOAD_ERROR_TITLE = 'File upload error';\nconst FILE_UPLOAD_ERROR_DESCRIPTION =\n 'There was a problem trying to read the file.';\nconst NO_MICROPHONE_TITLE = 'No microphone detected';\nconst NO_MICROPHONE_DESCRIPTION = 'Cannot transcribe from microphone.';\nconst AUDIO_TRANSCRIPTION_ERROR_TITLE = 'Audio transcription error';\nconst AUDIO_TRANSCRIPTION_ERROR_DESCRIPTION =\n 'There was an error trying to read the audio data. Please try again.';\nconst GDPR_DISCLAIMER =\n 'This system is for demonstration purposes only and is not intended to process Personal Data. No Personal Data is to be entered into this system as it may not have the necessary controls in place to meet the requirements of the General Data Protection Regulation (EU) 2016/679.';\n\nexport const ServiceContainer = () => {\n const [state, dispatch] = useReducer(reducer, initialState);\n const audioWaveContainerRef = useRef(null);\n\n useEffect(() => {\n const audioContext = new (window.AudioContext ||\n window.webkitAudioContext)();\n const audioAnalyzer = audioContext.createAnalyser();\n\n dispatch({\n audioAnalyzer,\n type: actionTypes.setAudioAnalyzer,\n });\n dispatch({\n audioContext,\n type: actionTypes.setAudioContext,\n });\n }, []);\n\n const parseResults = data => {\n if (data.speaker_labels) {\n dispatch({\n speakerLabels: data.speaker_labels,\n type: actionTypes.setSpeakerLabels,\n });\n } else {\n const { transcript, keywordInfo, resultIndex } = formatStreamData(data);\n\n dispatch({\n keywordInfo,\n resultIndex,\n transcript,\n type: actionTypes.updateResults,\n });\n }\n };\n\n const handleStreamEnd = () => {\n if (state.audioStream) {\n state.audioStream.stop();\n }\n\n dispatch({\n isTranscribing: false,\n type: actionTypes.setIsTranscribing,\n });\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n };\n\n const readAudioFileForVisualization = async filename => {\n let containerClientWidth = null;\n if (\n audioWaveContainerRef &&\n audioWaveContainerRef.current &&\n audioWaveContainerRef.current.clientWidth\n ) {\n containerClientWidth = audioWaveContainerRef.current.clientWidth;\n }\n const audioVisualizationWidth = containerClientWidth || 300;\n\n const isFileType = filename instanceof File;\n try {\n let audioBlob = null;\n\n if (isFileType) {\n audioBlob = filename;\n } else {\n const audioRequest = await fetch(filename);\n audioBlob = await audioRequest.blob();\n }\n const {\n reducedFloatArray,\n duration,\n } = await convertAudioBlobToVisualizationData(\n audioBlob,\n state.audioContext,\n audioVisualizationWidth,\n );\n\n dispatch({\n audioDataArray: reducedFloatArray,\n audioDurationInMs: duration * 1000,\n type: actionTypes.setAudioVisualizationData,\n });\n } catch (err) {\n dispatch({\n error: createError(\n FILE_UPLOAD_ERROR_TITLE,\n FILE_UPLOAD_ERROR_DESCRIPTION,\n ),\n type: actionTypes.setError,\n });\n }\n };\n\n const captureAudioFromMicrophone = async recognizeOptions => {\n let mediaStream = null;\n try {\n mediaStream = await navigator.mediaDevices.getUserMedia({\n video: false,\n audio: true,\n });\n } catch (err) {\n dispatch({\n error: createError(NO_MICROPHONE_TITLE, NO_MICROPHONE_DESCRIPTION),\n type: actionTypes.setError,\n });\n }\n\n const recognizeMicrophoneStream = recognizeMicrophone({\n ...recognizeOptions,\n mediaStream,\n keepMic: true,\n });\n\n if (mediaStream) {\n const updatedAudioAnalyzer = state.audioAnalyzer;\n updatedAudioAnalyzer.fttSize = 2048;\n dispatch({\n audioAnalyzer: updatedAudioAnalyzer,\n type: actionTypes.setAudioAnalyzer,\n });\n const mediaStreamSource = state.audioContext.createMediaStreamSource(\n mediaStream,\n );\n mediaStreamSource.connect(state.audioAnalyzer);\n }\n\n return recognizeMicrophoneStream;\n };\n\n const onSubmit = stream => {\n stream\n .on('data', data => {\n parseResults(data);\n })\n .on('end', () => {\n handleStreamEnd();\n })\n .on('error', () => {\n dispatch({\n error: createError(\n AUDIO_TRANSCRIPTION_ERROR_TITLE,\n AUDIO_TRANSCRIPTION_ERROR_DESCRIPTION,\n ),\n type: actionTypes.setError,\n });\n\n handleStreamEnd();\n });\n\n dispatch({\n isTranscribing: true,\n type: actionTypes.setIsTranscribing,\n });\n };\n\n const cleanUpOldStreamIfNecessary = () => {\n if (state.audioStream) {\n state.audioStream.stop();\n state.audioStream.removeAllListeners();\n state.audioStream.recognizeStream.removeAllListeners();\n }\n\n if (state.audioContext && state.audioContext.state === 'suspended') {\n state.audioContext.resume();\n }\n };\n\n const onSelectNewModel = () => {\n dispatch({\n audioDataArray: [],\n audioDurationInMs: 0,\n type: actionTypes.setAudioVisualizationData,\n });\n dispatch({\n keywordInfo: [],\n resultIndex: 0,\n transcript: [],\n type: actionTypes.updateResults,\n });\n };\n\n const onStartPlayingFileUpload = async recognizeConfig => {\n cleanUpOldStreamIfNecessary();\n\n const stream = recognizeFile(recognizeConfig);\n await readAudioFileForVisualization(recognizeConfig.file);\n dispatch({\n isUploadPlaying: true,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n dispatch({\n audioSource: 'upload',\n type: actionTypes.setAudioSource,\n });\n dispatch({\n audioStream: stream,\n type: actionTypes.setAudioStream,\n });\n\n onSubmit(stream);\n };\n\n const onStopPlayingFileUpload = () => {\n handleStreamEnd();\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n };\n\n const onStartPlayingSample = async recognizeConfig => {\n cleanUpOldStreamIfNecessary();\n\n const stream = recognizeFile(recognizeConfig);\n await readAudioFileForVisualization(recognizeConfig.file);\n dispatch({\n isSamplePlaying: true,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n dispatch({\n audioSource: 'sample',\n type: actionTypes.setAudioSource,\n });\n dispatch({\n audioStream: stream,\n type: actionTypes.setAudioStream,\n });\n\n onSubmit(stream);\n };\n\n const onStopPlayingSample = () => {\n handleStreamEnd();\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n };\n\n const onStartRecording = async recognizeConfig => {\n cleanUpOldStreamIfNecessary();\n\n const stream = await captureAudioFromMicrophone(recognizeConfig);\n dispatch({\n isRecording: true,\n type: actionTypes.setIsRecording,\n });\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n audioSource: 'microphone',\n type: actionTypes.setAudioSource,\n });\n dispatch({\n audioStream: stream,\n type: actionTypes.setAudioStream,\n });\n\n onSubmit(stream);\n };\n\n const onStopRecording = () => {\n handleStreamEnd();\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n };\n\n const onError = error => {\n dispatch({\n error,\n type: actionTypes.setError,\n });\n };\n\n return (\n
\n \n {state.error && (\n \n dispatch({ error: null, type: actionTypes.setError })\n }\n />\n )}\n \n \n
\n );\n};\n\nexport default ServiceContainer;\n","import React from 'react';\nimport { Button, Link } from 'carbon-components-react';\nimport { default as Api124 } from '@carbon/icons-react/lib/API--1/24';\nimport Document24 from '@carbon/icons-react/lib/document/24';\nimport IbmCloud24 from '@carbon/icons-react/lib/ibm-cloud/24';\nimport Launch16 from '@carbon/icons-react/lib/launch/16';\nimport LogoGithub24 from '@carbon/icons-react/lib/logo--github/24';\nimport Header from './components/Header';\nimport ServiceContainer from './components/ServiceContainer';\n\nconst HEADER_TITLE = 'Watson Speech to Text';\nconst HEADER_DESCRIPTION =\n 'IBM Watson Speech to Text is a cloud-native API that transforms voice into written text.';\nconst HEADER_LINKS = [\n \n

API reference

\n \n ,\n \n

Documentation

\n \n ,\n \n

GitHub

\n \n ,\n \n \n \n ,\n];\n\nexport const App = () => (\n
\n \n \n
\n);\n\nexport default App;\n","// This optional code is used to register a service worker.\n// register() is not called by default.\n\n// This lets the app load faster on subsequent visits in production, and gives\n// it offline capabilities. However, it also means that developers (and users)\n// will only see deployed updates on subsequent visits to a page, after all the\n// existing tabs open on the page have been closed, since previously cached\n// resources are updated in the background.\n\n// To learn more about the benefits of this model and instructions on how to\n// opt-in, read https://bit.ly/CRA-PWA\n\nconst isLocalhost = Boolean(\n window.location.hostname === 'localhost' ||\n // [::1] is the IPv6 localhost address.\n window.location.hostname === '[::1]' ||\n // 127.0.0.0/8 are considered localhost for IPv4.\n window.location.hostname.match(\n /^127(?:\\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/\n )\n);\n\nexport function register(config) {\n if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {\n // The URL constructor is available in all browsers that support SW.\n const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);\n if (publicUrl.origin !== window.location.origin) {\n // Our service worker won't work if PUBLIC_URL is on a different origin\n // from what our page is served on. This might happen if a CDN is used to\n // serve assets; see https://github.com/facebook/create-react-app/issues/2374\n return;\n }\n\n window.addEventListener('load', () => {\n const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;\n\n if (isLocalhost) {\n // This is running on localhost. Let's check if a service worker still exists or not.\n checkValidServiceWorker(swUrl, config);\n\n // Add some additional logging to localhost, pointing developers to the\n // service worker/PWA documentation.\n navigator.serviceWorker.ready.then(() => {\n console.log(\n 'This web app is being served cache-first by a service ' +\n 'worker. To learn more, visit https://bit.ly/CRA-PWA'\n );\n });\n } else {\n // Is not localhost. Just register service worker\n registerValidSW(swUrl, config);\n }\n });\n }\n}\n\nfunction registerValidSW(swUrl, config) {\n navigator.serviceWorker\n .register(swUrl)\n .then(registration => {\n registration.onupdatefound = () => {\n const installingWorker = registration.installing;\n if (installingWorker == null) {\n return;\n }\n installingWorker.onstatechange = () => {\n if (installingWorker.state === 'installed') {\n if (navigator.serviceWorker.controller) {\n // At this point, the updated precached content has been fetched,\n // but the previous service worker will still serve the older\n // content until all client tabs are closed.\n console.log(\n 'New content is available and will be used when all ' +\n 'tabs for this page are closed. See https://bit.ly/CRA-PWA.'\n );\n\n // Execute callback\n if (config && config.onUpdate) {\n config.onUpdate(registration);\n }\n } else {\n // At this point, everything has been precached.\n // It's the perfect time to display a\n // \"Content is cached for offline use.\" message.\n console.log('Content is cached for offline use.');\n\n // Execute callback\n if (config && config.onSuccess) {\n config.onSuccess(registration);\n }\n }\n }\n };\n };\n })\n .catch(error => {\n console.error('Error during service worker registration:', error);\n });\n}\n\nfunction checkValidServiceWorker(swUrl, config) {\n // Check if the service worker can be found. If it can't reload the page.\n fetch(swUrl, {\n headers: { 'Service-Worker': 'script' }\n })\n .then(response => {\n // Ensure service worker exists, and that we really are getting a JS file.\n const contentType = response.headers.get('content-type');\n if (\n response.status === 404 ||\n (contentType != null && contentType.indexOf('javascript') === -1)\n ) {\n // No service worker found. Probably a different app. Reload the page.\n navigator.serviceWorker.ready.then(registration => {\n registration.unregister().then(() => {\n window.location.reload();\n });\n });\n } else {\n // Service worker found. Proceed as normal.\n registerValidSW(swUrl, config);\n }\n })\n .catch(() => {\n console.log(\n 'No internet connection found. App is running in offline mode.'\n );\n });\n}\n\nexport function unregister() {\n if ('serviceWorker' in navigator) {\n navigator.serviceWorker.ready.then(registration => {\n registration.unregister();\n });\n }\n}\n","import React from 'react';\nimport ReactDOM from 'react-dom';\nimport './styles/main.scss';\nimport App from './App';\nimport * as serviceWorker from './serviceWorker';\n\nReactDOM.render(, document.getElementById('root'));\n\n// If you want your app to work offline and load faster, you can change\n// unregister() to register() below. Note this comes with some pitfalls.\n// Learn more about service workers: https://bit.ly/CRA-PWA\nserviceWorker.unregister();\n"],"sourceRoot":""} \ No newline at end of file diff --git a/build/static/js/main.a33677a6.chunk.js b/build/static/js/main.e75e603b.chunk.js similarity index 59% rename from build/static/js/main.a33677a6.chunk.js rename to build/static/js/main.e75e603b.chunk.js index 73e97fa..45f6475 100644 --- a/build/static/js/main.a33677a6.chunk.js +++ b/build/static/js/main.e75e603b.chunk.js @@ -1,2 +1,2 @@ -(this["webpackJsonp@ibm-watson/speech-to-text-code-pattern"]=this["webpackJsonp@ibm-watson/speech-to-text-code-pattern"]||[]).push([[0],{100:function(e,a){},102:function(e,a){},111:function(e,a){},113:function(e,a){},150:function(e,a){},152:function(e,a){},158:function(e,a,t){"use strict";t.r(a);var r=t(0),n=t.n(r),o=t(74),s=t.n(o),i=(t(92),t(168)),l=t(159),d=t(79),u=t.n(d),c=t(80),p=t.n(c),m=t(83),f=t.n(m),g=t(82),b=t.n(g),_=t(81),h=t.n(_),w=t(162),v=function(e){var a=e.description,t=e.links,r=e.title;return n.a.createElement(w.a,{className:"header"},n.a.createElement("div",{className:"title-container"},n.a.createElement("h2",{className:"header-title"},r),n.a.createElement("p",null,a)),n.a.createElement("div",{className:"link-container"},n.a.createElement("div",{className:"link-wrapper"},t.map((function(e){return e})))))};v.defaultProps={description:"",links:[],title:""};var y=v,k=t(8),S=t(4),E=t.n(S),x=t(10),N=t(47),C=t.n(N),P=t(75),R=t.n(P),B=t(163),A=t(169),M=t(164),T=t(165),I=t(170),D=t(46),O=t.n(D),U=t(27),z=function(e){var a=e.isRecording,t=e.isSamplePlaying,o=e.isUploadPlaying,s=e.keywordText,i=e.modelName,d=e.onError,u=e.onStartPlayingFileUpload,c=e.onStopPlayingFileUpload,p=e.onStartPlayingSample,m=e.onStopPlayingSample,f=e.onStartRecording,g=e.onStopRecording,b=e.useSpeakerLabels,_=Object(r.useState)([]),h=Object(x.a)(_,2),w=h[0],v=h[1];Object(r.useEffect)((function(){var e=[];s.length>0&&(e=s.split(",").map((function(e){return e.trim()}))),v(e)}),[s]);var y=U.find((function(e){return e.name===i})),S=y?y.filename:null,N=function(){var e,a,t,r;return E.a.async((function(n){for(;;)switch(n.prev=n.next){case 0:return n.next=2,E.a.awrap(O()("/api/auth"));case 2:return e=n.sent,n.next=5,E.a.awrap(e.json());case 5:if(a=n.sent,e.ok){n.next=9;break}return d(a),n.abrupt("return",{error:a});case 9:return t={},r=w.map((function(e){return e.toLowerCase()})),t=Object(k.a)({},t,{url:a.url||void 0,accessToken:a.accessToken,format:!0,keywords:w.length>0?r:void 0,keywordsThreshold:w.length>0?.01:void 0,model:i,objectMode:!0,play:!0,realtime:!0,resultsBySpeaker:b,speakerlessInterim:!0,timestamps:!0}),n.abrupt("return",t);case 13:case"end":return n.stop()}}))},C=function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(N());case 2:return e=a.sent,a.abrupt("return",Object(k.a)({},e,{file:"audio/".concat(S)}));case 4:case"end":return a.stop()}}))},P=function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(N());case 2:return e=a.sent,a.abrupt("return",Object(k.a)({},e,{resultsBySpeaker:!1}));case 4:case"end":return a.stop()}}))},R=function(e){var a;return E.a.async((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,E.a.awrap(N());case 2:return a=t.sent,t.abrupt("return",Object(k.a)({},a,{file:e,resultsBySpeaker:!1}));case 4:case"end":return t.stop()}}))};return n.a.createElement("div",{className:"submit-container"},t?n.a.createElement(l.a,{className:"submit-button",kind:"tertiary",onClick:m},"Stop audio sample"):n.a.createElement(l.a,{className:"submit-button",disabled:!i,kind:"tertiary",onClick:function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(C());case 2:(e=a.sent).error||p(e);case 4:case"end":return a.stop()}}))}},"Play audio sample"),a?n.a.createElement(l.a,{className:"submit-button",kind:"tertiary",onClick:g},"Stop recording"):n.a.createElement(l.a,{className:"submit-button",disabled:!i,kind:"tertiary",onClick:function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(P());case 2:(e=a.sent).error||f(e);case 4:case"end":return a.stop()}}))}},"Record your own"),o?n.a.createElement(l.a,{className:"submit-button",kind:"tertiary",onClick:c},"Stop playing"):n.a.createElement(I.a,{accept:["audio/wav","audio/mpeg","audio/flac","audio/opus"],buttonKind:"tertiary",className:"submit-button",disabled:!i,disableLabelChanges:!0,labelText:"Upload file",onChange:function(e){var a,t;return E.a.async((function(r){for(;;)switch(r.prev=r.next){case 0:return a=e.currentTarget.files[0],r.next=3,E.a.awrap(R(a));case 3:(t=r.sent).error||u(t);case 5:case"end":return r.stop()}}))}}))};z.defaultProps={isRecording:!1,isSamplePlaying:!1,isUploadPlaying:!1,keywordText:"",modelName:null,onError:function(){},onStartPlayingFileUpload:function(){},onStopPlayingFileUpload:function(){},onStartPlayingSample:function(){},onStopPlayingSample:function(){},onStartRecording:function(){},onStopRecording:function(){},useSpeakerLabels:!1};var j=z,W=function(e){var a=e.isRecording,t=e.isSamplePlaying,o=e.isUploadPlaying,s=e.onError,i=e.onSelectNewModel,l=e.onStartPlayingFileUpload,d=e.onStopPlayingFileUpload,u=e.onStartPlayingSample,c=e.onStopPlayingSample,p=e.onStartRecording,m=e.onStopRecording,f=U.map((function(e){return{id:e.name,label:e.description,supportsSpeakerLabels:e.supported_features.speaker_labels}})),g=Object(r.useState)(f[0]),b=Object(x.a)(g,2),_=b[0],h=b[1],v=Object(r.useState)(U[0].keywords),y=Object(x.a)(v,2),k=y[0],S=y[1],E=Object(r.useState)(!1),N=Object(x.a)(E,2),C=N[0],P=N[1];return n.a.createElement(w.a,{className:"control-container"},n.a.createElement("h3",{className:"container-title"},"Input"),n.a.createElement(B.a,{legendText:"Language model"},n.a.createElement(A.a,{id:"language-model-dropdown",label:"Select a language model",onChange:function(e){h(e.selectedItem);var a=U.find((function(a){return a.name===e.selectedItem.id})).keywords;S(a),C&&!e.selectedItem.supportsSpeakerLabels&&P(!1),i()},items:f,selectedItem:_&&_.label,defaultText:"Select a language model",ariaLabel:"Language selection dropdown",light:!0})),n.a.createElement(B.a,{legendText:"Keywords to spot"},n.a.createElement(M.a,{id:"custom-keyword-input",labelText:"Custom language keyword input",placeholder:"Enter custom language keywords",hideLabel:!0,invalidText:"Invalid keywords provided",value:k,onChange:function(e){S(e.target.value)},light:!0})),n.a.createElement(B.a,{legendText:"Detect multiple speakers (only supported with sample audio)"},n.a.createElement(T.a,{id:"speaker-label-toggle","aria-label":"Speaker label toggle",disabled:!_||!_.supportsSpeakerLabels,toggled:C,onToggle:function(){P(!C)}})),n.a.createElement(j,{isRecording:a,isSamplePlaying:t,isUploadPlaying:o,keywordText:k,modelName:_&&_.id,onError:s,onStartPlayingFileUpload:l,onStopPlayingFileUpload:d,onStartPlayingSample:u,onStopPlayingSample:c,onStartRecording:p,onStopRecording:m,useSpeakerLabels:C}))};W.defaultProps={isRecording:!1,isSamplePlaying:!1,isUploadPlaying:!1,onError:function(){},onSelectNewModel:function(){},onStartPlayingFileUpload:function(){},onStopPlayingFileUpload:function(){},onStartPlayingSample:function(){},onStopPlayingSample:function(){},onStartRecording:function(){},onStopRecording:function(){}};var F=W,L=t(76),G=t(77),K=t(85),J=t(78),X=t(36),q=t(84),Y=function(e){function a(e){var t;return Object(L.a)(this,a),(t=Object(K.a)(this,Object(J.a)(a).call(this,e))).audioWaveCanvasRef=n.a.createRef(),t.audioWaveCanvasCtx=null,t.animationFrameId=null,t.draw=t.draw.bind(Object(X.a)(t)),t.state={startingTimestamp:0,microphoneData:new Uint8Array(1024)},t}return Object(q.a)(a,e),Object(G.a)(a,[{key:"componentDidMount",value:function(){this.audioWaveCanvasCtx=this.audioWaveCanvasRef.current.getContext("2d"),this.audioWaveCanvasCtx.lineCap="round",this.initializeCanvasDimensions()}},{key:"componentDidUpdate",value:function(e){Object(x.a)(e.data,1)[0]!==Object(x.a)(this.props.data,1)[0]&&this.drawInitialAudioWave(),!1===e.isTranscribing&&!0===this.props.isTranscribing?(this.setStartingTimestamp(),this.draw()):!0===e.isTranscribing&&!1===this.props.isTranscribing&&this.stopDrawing()}},{key:"setStartingTimestamp",value:function(){this.setState({startingTimestamp:Date.now()})}},{key:"initializeCanvasDimensions",value:function(){var e=this.audioWaveCanvasRef.current,a=this.props.audioWaveContainerRef.current;e.width=a.clientWidth,e.height=100}},{key:"drawInitialAudioWave",value:function(){this.resetCanvasForNewFrame(),this.drawEmptyDataPoints()}},{key:"drawEmptyDataPoints",value:function(){var e=this;this.props.data.forEach((function(a,t){e.audioWaveCanvasCtx.beginPath(),e.audioWaveCanvasCtx.fillStyle="rgba(0, 98, 255, 0.5)",e.audioWaveCanvasCtx.fillRect(2*t,50,1,50*a),e.audioWaveCanvasCtx.fillRect(2*t,50,1,-50*a),e.audioWaveCanvasCtx.stroke(),e.audioWaveCanvasCtx.closePath()}))}},{key:"drawMicrophoneDataPoints",value:function(){var e=this;this.props.audioAnalyzer.getByteFrequencyData(this.state.microphoneData);var a=this.state.microphoneData;[].slice.call(a).map((function(e){return e/255})).forEach((function(a,t){e.audioWaveCanvasCtx.beginPath(),e.audioWaveCanvasCtx.fillStyle="rgba(0, 98, 255, 1)",e.audioWaveCanvasCtx.fillRect(2*t,50,1,50*a),e.audioWaveCanvasCtx.fillRect(2*t,50,1,-50*a),e.audioWaveCanvasCtx.stroke(),e.audioWaveCanvasCtx.closePath()}))}},{key:"drawAudioDataPoints",value:function(){var e=this,a=this.props,t=a.data,r=a.duration,n=(Date.now()-this.state.startingTimestamp)/r,o=t.length*n,s=Math.floor(o),i=o-s;i>1&&(i=1),i<.5&&(i=.5),t.forEach((function(a,t){e.audioWaveCanvasCtx.beginPath();var r="rgba(0, 98, 255, 1)";t>s&&(r="rgba(0, 98, 255, 0.5)"),t-1===s&&(r="rgba(0, 98, 255, ".concat(i.toFixed(2),")")),e.audioWaveCanvasCtx.fillStyle=r,e.audioWaveCanvasCtx.fillRect(2*t,50,1,50*a),e.audioWaveCanvasCtx.fillRect(2*t,50,1,-50*a),e.audioWaveCanvasCtx.stroke(),e.audioWaveCanvasCtx.closePath()}))}},{key:"draw",value:function(){this.resetCanvasForNewFrame(),this.props.audioSource&&"microphone"===this.props.audioSource?this.drawMicrophoneDataPoints():this.drawAudioDataPoints(),this.animationFrameId=requestAnimationFrame(this.draw)}},{key:"resetCanvasForNewFrame",value:function(){var e=this.audioWaveCanvasRef.current;this.audioWaveCanvasCtx.clearRect(0,0,e.width,e.height)}},{key:"stopDrawing",value:function(){this.animationFrameId&&cancelAnimationFrame(this.animationFrameId)}},{key:"render",value:function(){return n.a.createElement("div",{ref:this.props.audioWaveContainerRef,className:"audiowave"},n.a.createElement("canvas",{ref:this.audioWaveCanvasRef}))}}]),a}(n.a.Component);Y.defaultProps={audioSource:null,audioAnalyzer:null};var H=Y,V=t(166),Z=function(e){var a=e.confidence,t=e.startTime,r=e.endTime;return n.a.createElement("div",{className:"keyword-tooltip"},n.a.createElement("p",null,"Confidence: ",a),n.a.createElement("p",null,t,"s - ",r,"s"))},Q=t(32),$=function(e,a,t){var r=[];if(a.length>0){var n=function(e){var a=[];e.forEach((function(e){a=[].concat(Object(Q.a)(a),Object(Q.a)(Object.keys(e)))}));var t=a.map((function(e,t){return t!==a.length-1?"".concat(e,"|"):e})).reduce((function(e,a){return e+a}),"");return new RegExp("(".concat(t,")(?!')"),"gi")}(a);r=e.split(n)}if(0===r.length)return[{text:e,type:"normal"}];var o={};return r.map((function(e,r){var n=e.toLowerCase();if(r%2===0)return{text:e,type:"normal"};var s=a[t]&&a[t][n],i=0;o[n]?(i=o[n],o[n]+=1):o[n]=1;var l=s&&s[i];return l?{text:e,type:"keyword",startTime:l.start_time,endTime:l.end_time,confidence:l.confidence}:{}}))},ee=function(e){var a=e.keywordInfo,t=e.transcriptArray;return n.a.createElement("div",{className:"transcript-box"},t.map((function(e,t){var r=e.speaker,o=e.text,s=$(o,a,t);return n.a.createElement("div",{key:"transcript-".concat(t)},null!==r&&n.a.createElement("span",{className:"speaker-label--".concat(r)},"Speaker ".concat(r,": ")),s.map((function(e,a){return e?"normal"===e.type?n.a.createElement("span",{key:"transcript-text-".concat(t,"-").concat(a)},"".concat(e.text)):"keyword"===e.type?n.a.createElement(V.a,{align:"center",direction:"top",key:"transcript-keyword-".concat(t,"-").concat(a),tooltipText:n.a.createElement(Z,{confidence:e.confidence,startTime:e.startTime,endTime:e.endTime}),triggerClassName:"keyword-info-trigger"},e.text):null:null})))})))};ee.defaultProps={keywordInfo:[],transcriptArray:[]};var ae=ee,te=function(e){var a=e.audioAnalyzer,t=e.audioDataArray,r=e.audioDuration,o=e.audioSource,s=e.audioWaveContainerRef,i=e.isTranscribing,l=e.keywordInfo,d=e.transcriptArray;return n.a.createElement(w.a,{className:"output-container"},n.a.createElement("h3",{className:"container-title"},"Output"),n.a.createElement(B.a,{legendText:"Audio"},n.a.createElement(H,{audioWaveContainerRef:s,data:t,duration:r,isTranscribing:i,audioSource:o,audioAnalyzer:a})),n.a.createElement(B.a,{legendText:"Transcript"},n.a.createElement(ae,{keywordInfo:l,transcriptArray:d})))};te.defaultProps={audioDataArray:[],audioDuration:0,audioSource:"",isTranscribing:!1,keywordInfo:[],transcriptArray:[]};var re=te,ne=t(167),oe=function(e){var a=e.caption,t=e.children,o=e.className,s=e.hideAfterFirstDisplay,i=e.hideCloseButton,l=e.iconDescription,d=e.kind,u=e.lowContrast,c=e.onCloseButtonClick,p=e.role,m=e.subtitle,f=e.timeout,g=e.title,b=Object(r.useState)(),_=Object(x.a)(b,2),h=_[0],w=_[1],v=Object(r.useState)(!1),y=Object(x.a)(v,2),k=y[0],S=y[1];return Object(r.useEffect)((function(){w(Math.random().toString(36).substring(2,15)+Math.random().toString(36).substring(2,15))}),[]),Object(r.useEffect)((function(){var e=document.querySelector(".custom-toast-".concat(h));e&&(e.className+="enter")}),[h]),Object(r.useEffect)((function(){s&&void 0!==typeof window&&void 0!==typeof window.localStorage&&"true"===window.localStorage.getItem("notificationHasBeenSeen")&&S(!0)}),[s]),k?null:n.a.createElement(ne.a,{caption:a,className:"custom-toast-".concat(h," ").concat(o),hideCloseButton:i,iconDescription:l,kind:d,lowContrast:u,onCloseButtonClick:function(){s&&void 0!==typeof window&&void 0!==typeof window.localStorage&&window.localStorage.setItem("notificationHasBeenSeen","true"),c()},role:p,subtitle:m,timeout:f,title:g},t)};oe.defaultProps={caption:"",children:null,className:"",hideAfterFirstDisplay:!0,hideCloseButton:!1,iconDescription:"closes notification",kind:"error",lowContrast:!1,onCloseButtonClick:function(){},role:"alert",subtitle:"",timeout:0,title:""};var se=oe,ie="SET_AUDIO_ANALYZER",le="SET_AUDIO_CONTEXT",de="SET_AUDIO_SOURCE",ue="SET_AUDIO_STREAM",ce="SET_AUDIO_VISUALIZATION_DATA",pe="SET_ERROR",me="SET_SPEAKER_LABELS",fe="SET_IS_RECORDING",ge="SET_IS_SAMPLE_PLAYING",be="SET_IS_TRANSCRIBING",_e="SET_IS_UPLOAD_PLAYING",he="UPDATE_RESULTS",we={audioAnalyzer:{},audioContext:null,audioDataArray:[],audioDurationInMs:0,audioSource:"",audioStream:null,error:null,isRecording:!1,isSamplePlaying:!1,isTranscribing:!1,isUploadPlaying:!1,keywordInfo:[],speakerLabels:[],transcript:[]},ve=function(e,a){switch(a.type){case"SET_AUDIO_ANALYZER":return Object(k.a)({},e,{audioAnalyzer:a.audioAnalyzer});case"SET_AUDIO_CONTEXT":return Object(k.a)({},e,{audioContext:a.audioContext});case"SET_AUDIO_SOURCE":return Object(k.a)({},e,{audioSource:a.audioSource});case"SET_AUDIO_STREAM":return Object(k.a)({},e,{audioStream:a.audioStream});case"SET_AUDIO_VISUALIZATION_DATA":return Object(k.a)({},e,{audioDataArray:a.audioDataArray,audioDurationInMs:a.audioDurationInMs});case"SET_ERROR":return Object(k.a)({},e,{error:a.error});case"SET_IS_RECORDING":return Object(k.a)({},e,{isRecording:a.isRecording});case"SET_IS_SAMPLE_PLAYING":return Object(k.a)({},e,{isSamplePlaying:a.isSamplePlaying});case"SET_IS_TRANSCRIBING":return Object(k.a)({},e,{isTranscribing:a.isTranscribing});case"SET_IS_UPLOAD_PLAYING":return Object(k.a)({},e,{isUploadPlaying:a.isUploadPlaying});case"SET_SPEAKER_LABELS":return Object(k.a)({},e,{speakerLabels:a.speakerLabels});case"UPDATE_RESULTS":var t=Object(Q.a)(e.transcript);return 0===a.resultIndex?t=a.transcript:t[a.resultIndex]=a.transcript[0],Object(k.a)({},e,{keywordInfo:a.keywordInfo,transcript:t});default:throw new Error}},ye=2,ke=function(e){var a=new FileReader;return new Promise((function(t,r){a.onload=function(){var e=a.result;t(e)},a.onerror=function(){a.abort(),r(new Error("failed to process file"))},a.readAsArrayBuffer(e)}))},Se=function(e,a,t){var r,n;return E.a.async((function(o){for(;;)switch(o.prev=o.next){case 0:return o.next=2,E.a.awrap(ke(e));case 2:return r=o.sent,n=new Uint8Array(r.slice(0)),o.abrupt("return",new Promise((function(e,o){a.decodeAudioData(r,(function(a){for(var r=a.duration,o=t-2*ye,s=Math.floor(o/2),i=n.length/s,l=[],d=1;d0&&(e=s.split(",").map((function(e){return e.trim()}))),v(e)}),[s]);var y=U.find((function(e){return e.name===i})),S=y?y.filename:null,N=function(){var e,a,t,r;return E.a.async((function(n){for(;;)switch(n.prev=n.next){case 0:return n.next=2,E.a.awrap(O()("/api/auth"));case 2:return e=n.sent,n.next=5,E.a.awrap(e.json());case 5:if(a=n.sent,e.ok){n.next=9;break}return d(a),n.abrupt("return",{error:a});case 9:return t={},r=w.map((function(e){return e.toLowerCase()})),t=Object(k.a)({},t,{url:a.url||void 0,accessToken:a.accessToken,format:!0,keywords:w.length>0?r:void 0,keywordsThreshold:w.length>0?.01:void 0,model:i,objectMode:!0,play:!0,realtime:!0,resultsBySpeaker:b,speakerlessInterim:!0,timestamps:!0}),n.abrupt("return",t);case 13:case"end":return n.stop()}}))},C=function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(N());case 2:return e=a.sent,a.abrupt("return",Object(k.a)({},e,{file:"audio/".concat(S)}));case 4:case"end":return a.stop()}}))},P=function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(N());case 2:return e=a.sent,a.abrupt("return",Object(k.a)({},e,{resultsBySpeaker:!1}));case 4:case"end":return a.stop()}}))},R=function(e){var a;return E.a.async((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,E.a.awrap(N());case 2:return a=t.sent,t.abrupt("return",Object(k.a)({},a,{file:e,resultsBySpeaker:!1}));case 4:case"end":return t.stop()}}))};return n.a.createElement("div",{className:"submit-container"},t?n.a.createElement(l.a,{className:"submit-button",kind:"tertiary",onClick:m},"Stop audio sample"):n.a.createElement(l.a,{className:"submit-button",disabled:!i,kind:"tertiary",onClick:function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(C());case 2:(e=a.sent).error||p(e);case 4:case"end":return a.stop()}}))}},"Play audio sample"),a?n.a.createElement(l.a,{className:"submit-button",kind:"tertiary",onClick:g},"Stop recording"):n.a.createElement(l.a,{className:"submit-button",disabled:!i,kind:"tertiary",onClick:function(){var e;return E.a.async((function(a){for(;;)switch(a.prev=a.next){case 0:return a.next=2,E.a.awrap(P());case 2:(e=a.sent).error||f(e);case 4:case"end":return a.stop()}}))}},"Record your own"),o?n.a.createElement(l.a,{className:"submit-button",kind:"tertiary",onClick:c},"Stop playing"):n.a.createElement(I.a,{accept:["audio/wav","audio/mpeg","audio/flac","audio/opus"],buttonKind:"tertiary",className:"submit-button",disabled:!i,disableLabelChanges:!0,labelText:"Upload file",onChange:function(e){var a,t;return E.a.async((function(r){for(;;)switch(r.prev=r.next){case 0:return a=e.currentTarget.files[0],r.next=3,E.a.awrap(R(a));case 3:(t=r.sent).error||u(t);case 5:case"end":return r.stop()}}))}}))};j.defaultProps={isRecording:!1,isSamplePlaying:!1,isUploadPlaying:!1,keywordText:"",modelName:null,onError:function(){},onStartPlayingFileUpload:function(){},onStopPlayingFileUpload:function(){},onStartPlayingSample:function(){},onStopPlayingSample:function(){},onStartRecording:function(){},onStopRecording:function(){},useSpeakerLabels:!1};var z=j,W=function(e){var a=e.isRecording,t=e.isSamplePlaying,o=e.isUploadPlaying,s=e.onError,i=e.onSelectNewModel,l=e.onStartPlayingFileUpload,d=e.onStopPlayingFileUpload,u=e.onStartPlayingSample,c=e.onStopPlayingSample,p=e.onStartRecording,m=e.onStopRecording,f=U.map((function(e){return{id:e.name,label:e.description,supportsSpeakerLabels:e.supported_features.speaker_labels}})),g=Object(r.useState)(f[0]),b=Object(x.a)(g,2),_=b[0],h=b[1],v=Object(r.useState)(U[0].keywords),y=Object(x.a)(v,2),k=y[0],S=y[1],E=Object(r.useState)(!1),N=Object(x.a)(E,2),C=N[0],P=N[1];return n.a.createElement(w.a,{className:"control-container"},n.a.createElement("h3",{className:"container-title"},"Input"),n.a.createElement(B.a,{legendText:"Language model"},n.a.createElement(A.a,{id:"language-model-dropdown",label:"Select a language model",onChange:function(e){h(e.selectedItem);var a=U.find((function(a){return a.name===e.selectedItem.id})).keywords;S(a),C&&!e.selectedItem.supportsSpeakerLabels&&P(!1),i()},items:f,selectedItem:_&&_.label,defaultText:"Select a language model",ariaLabel:"Language selection dropdown",light:!0})),n.a.createElement(B.a,{legendText:"Keywords to spot"},n.a.createElement(M.a,{id:"custom-keyword-input",labelText:"Custom language keyword input",placeholder:"Enter custom language keywords",hideLabel:!0,invalidText:"Invalid keywords provided",value:k,onChange:function(e){S(e.target.value)},light:!0})),n.a.createElement(B.a,{legendText:"Detect multiple speakers (only supported with sample audio)"},n.a.createElement(T.a,{id:"speaker-label-toggle","aria-label":"Speaker label toggle",disabled:!_||!_.supportsSpeakerLabels,toggled:C,onToggle:function(){P(!C)}})),n.a.createElement(z,{isRecording:a,isSamplePlaying:t,isUploadPlaying:o,keywordText:k,modelName:_&&_.id,onError:s,onStartPlayingFileUpload:l,onStopPlayingFileUpload:d,onStartPlayingSample:u,onStopPlayingSample:c,onStartRecording:p,onStopRecording:m,useSpeakerLabels:C}))};W.defaultProps={isRecording:!1,isSamplePlaying:!1,isUploadPlaying:!1,onError:function(){},onSelectNewModel:function(){},onStartPlayingFileUpload:function(){},onStopPlayingFileUpload:function(){},onStartPlayingSample:function(){},onStopPlayingSample:function(){},onStartRecording:function(){},onStopRecording:function(){}};var F=W,L=t(76),G=t(77),K=t(85),J=t(78),X=t(36),q=t(84),Y=function(e){function a(e){var t;return Object(L.a)(this,a),(t=Object(K.a)(this,Object(J.a)(a).call(this,e))).audioWaveCanvasRef=n.a.createRef(),t.audioWaveCanvasCtx=null,t.animationFrameId=null,t.draw=t.draw.bind(Object(X.a)(t)),t.state={startingTimestamp:0,microphoneData:new Uint8Array(1024)},t}return Object(q.a)(a,e),Object(G.a)(a,[{key:"componentDidMount",value:function(){this.audioWaveCanvasCtx=this.audioWaveCanvasRef.current.getContext("2d"),this.audioWaveCanvasCtx.lineCap="round",this.initializeCanvasDimensions()}},{key:"componentDidUpdate",value:function(e){Object(x.a)(e.data,1)[0]!==Object(x.a)(this.props.data,1)[0]&&this.drawInitialAudioWave(),!1===e.isTranscribing&&!0===this.props.isTranscribing?(this.setStartingTimestamp(),this.draw()):!0===e.isTranscribing&&!1===this.props.isTranscribing&&this.stopDrawing()}},{key:"setStartingTimestamp",value:function(){this.setState({startingTimestamp:Date.now()})}},{key:"initializeCanvasDimensions",value:function(){var e=this.audioWaveCanvasRef.current,a=this.props.audioWaveContainerRef.current;e.width=a.clientWidth,e.height=100}},{key:"drawInitialAudioWave",value:function(){this.resetCanvasForNewFrame(),this.drawEmptyDataPoints()}},{key:"drawEmptyDataPoints",value:function(){var e=this;this.props.data.forEach((function(a,t){e.audioWaveCanvasCtx.beginPath(),e.audioWaveCanvasCtx.fillStyle="rgba(0, 98, 255, 0.5)",e.audioWaveCanvasCtx.fillRect(2*t,50,1,50*a),e.audioWaveCanvasCtx.fillRect(2*t,50,1,-50*a),e.audioWaveCanvasCtx.stroke(),e.audioWaveCanvasCtx.closePath()}))}},{key:"drawMicrophoneDataPoints",value:function(){var e=this;this.props.audioAnalyzer.getByteFrequencyData(this.state.microphoneData);var a=this.state.microphoneData;[].slice.call(a).map((function(e){return e/255})).forEach((function(a,t){e.audioWaveCanvasCtx.beginPath(),e.audioWaveCanvasCtx.fillStyle="rgba(0, 98, 255, 1)",e.audioWaveCanvasCtx.fillRect(2*t,50,1,50*a),e.audioWaveCanvasCtx.fillRect(2*t,50,1,-50*a),e.audioWaveCanvasCtx.stroke(),e.audioWaveCanvasCtx.closePath()}))}},{key:"drawAudioDataPoints",value:function(){var e=this,a=this.props,t=a.data,r=a.duration,n=(Date.now()-this.state.startingTimestamp)/r,o=t.length*n,s=Math.floor(o),i=o-s;i>1&&(i=1),i<.5&&(i=.5),t.forEach((function(a,t){e.audioWaveCanvasCtx.beginPath();var r="rgba(0, 98, 255, 1)";t>s&&(r="rgba(0, 98, 255, 0.5)"),t-1===s&&(r="rgba(0, 98, 255, ".concat(i.toFixed(2),")")),e.audioWaveCanvasCtx.fillStyle=r,e.audioWaveCanvasCtx.fillRect(2*t,50,1,50*a),e.audioWaveCanvasCtx.fillRect(2*t,50,1,-50*a),e.audioWaveCanvasCtx.stroke(),e.audioWaveCanvasCtx.closePath()}))}},{key:"draw",value:function(){this.resetCanvasForNewFrame(),this.props.audioSource&&"microphone"===this.props.audioSource?this.drawMicrophoneDataPoints():this.drawAudioDataPoints(),this.animationFrameId=requestAnimationFrame(this.draw)}},{key:"resetCanvasForNewFrame",value:function(){var e=this.audioWaveCanvasRef.current;this.audioWaveCanvasCtx.clearRect(0,0,e.width,e.height)}},{key:"stopDrawing",value:function(){this.animationFrameId&&cancelAnimationFrame(this.animationFrameId)}},{key:"render",value:function(){return n.a.createElement("div",{ref:this.props.audioWaveContainerRef,className:"audiowave"},n.a.createElement("canvas",{ref:this.audioWaveCanvasRef}))}}]),a}(n.a.Component);Y.defaultProps={audioSource:null,audioAnalyzer:null};var H=Y,V=t(166),Z=function(e){var a=e.confidence,t=e.startTime,r=e.endTime;return n.a.createElement("div",{className:"keyword-tooltip"},n.a.createElement("p",null,"Confidence: ",a),n.a.createElement("p",null,t,"s - ",r,"s"))},Q=t(32),$=function(e,a,t){var r=[];if(a.length>0){var n=function(e){var a=[];e.forEach((function(e){a=[].concat(Object(Q.a)(a),Object(Q.a)(Object.keys(e)))}));var t=a.map((function(e,t){return t!==a.length-1?"".concat(e,"|"):e})).reduce((function(e,a){return e+a}),"");return new RegExp("(".concat(t,")(?!')"),"gi")}(a);r=e.split(n)}if(0===r.length)return[{text:e,type:"normal"}];var o={};return r.map((function(e,r){var n=e.toLowerCase();if(r%2===0)return{text:e,type:"normal"};var s=a[t]&&a[t][n],i=0;o[n]?(i=o[n],o[n]+=1):o[n]=1;var l=s&&s[i];return l?{text:e,type:"keyword",startTime:l.start_time,endTime:l.end_time,confidence:l.confidence}:{}}))},ee=function(e){var a=e.keywordInfo,t=e.transcriptArray;return n.a.createElement("div",{className:"transcript-box"},t.map((function(e,t){var r=e.speaker,o=e.text,s=$(o,a,t);return n.a.createElement("div",{key:"transcript-".concat(t)},null!==r&&n.a.createElement("span",{className:"speaker-label--".concat(r)},"Speaker ".concat(r,": ")),s.map((function(e,a){return e?"normal"===e.type?n.a.createElement("span",{key:"transcript-text-".concat(t,"-").concat(a)},"".concat(e.text)):"keyword"===e.type?n.a.createElement(V.a,{align:"center",direction:"top",key:"transcript-keyword-".concat(t,"-").concat(a),tooltipText:n.a.createElement(Z,{confidence:e.confidence,startTime:e.startTime,endTime:e.endTime}),triggerClassName:"keyword-info-trigger"},e.text):null:null})))})))};ee.defaultProps={keywordInfo:[],transcriptArray:[]};var ae=ee,te=function(e){var a=e.audioAnalyzer,t=e.audioDataArray,r=e.audioDuration,o=e.audioSource,s=e.audioWaveContainerRef,i=e.isTranscribing,l=e.keywordInfo,d=e.transcriptArray;return n.a.createElement(w.a,{className:"output-container"},n.a.createElement("h3",{className:"container-title"},"Output"),n.a.createElement(B.a,{legendText:"Audio"},n.a.createElement(H,{audioWaveContainerRef:s,data:t,duration:r,isTranscribing:i,audioSource:o,audioAnalyzer:a})),n.a.createElement(B.a,{legendText:"Transcript"},n.a.createElement(ae,{keywordInfo:l,transcriptArray:d})))};te.defaultProps={audioDataArray:[],audioDuration:0,audioSource:"",isTranscribing:!1,keywordInfo:[],transcriptArray:[]};var re=te,ne=t(167),oe=function(e){var a=e.caption,t=e.children,o=e.className,s=e.hideAfterFirstDisplay,i=e.hideCloseButton,l=e.iconDescription,d=e.kind,u=e.lowContrast,c=e.onCloseButtonClick,p=e.role,m=e.subtitle,f=e.timeout,g=e.title,b=Object(r.useState)(),_=Object(x.a)(b,2),h=_[0],w=_[1],v=Object(r.useState)(!1),y=Object(x.a)(v,2),k=y[0],S=y[1];return Object(r.useEffect)((function(){w(Math.random().toString(36).substring(2,15)+Math.random().toString(36).substring(2,15))}),[]),Object(r.useEffect)((function(){var e=document.querySelector(".custom-toast-".concat(h));e&&(e.className+="enter")}),[h]),Object(r.useEffect)((function(){s&&void 0!==typeof window&&void 0!==typeof window.localStorage&&"true"===window.localStorage.getItem("notificationHasBeenSeen")&&S(!0)}),[s]),k?null:n.a.createElement(ne.a,{caption:a,className:"custom-toast-".concat(h," ").concat(o),hideCloseButton:i,iconDescription:l,kind:d,lowContrast:u,onCloseButtonClick:function(){s&&void 0!==typeof window&&void 0!==typeof window.localStorage&&window.localStorage.setItem("notificationHasBeenSeen","true"),c()},role:p,subtitle:m,timeout:f,title:g},t)};oe.defaultProps={caption:"",children:null,className:"",hideAfterFirstDisplay:!0,hideCloseButton:!1,iconDescription:"closes notification",kind:"error",lowContrast:!1,onCloseButtonClick:function(){},role:"alert",subtitle:"",timeout:0,title:""};var se=oe,ie="SET_AUDIO_ANALYZER",le="SET_AUDIO_CONTEXT",de="SET_AUDIO_SOURCE",ue="SET_AUDIO_STREAM",ce="SET_AUDIO_VISUALIZATION_DATA",pe="SET_ERROR",me="SET_SPEAKER_LABELS",fe="SET_IS_RECORDING",ge="SET_IS_SAMPLE_PLAYING",be="SET_IS_TRANSCRIBING",_e="SET_IS_UPLOAD_PLAYING",he="UPDATE_RESULTS",we={audioAnalyzer:{},audioContext:null,audioDataArray:[],audioDurationInMs:0,audioSource:"",audioStream:null,error:null,isRecording:!1,isSamplePlaying:!1,isTranscribing:!1,isUploadPlaying:!1,keywordInfo:[],speakerLabels:[],transcript:[]},ve=function(e,a){switch(a.type){case"SET_AUDIO_ANALYZER":return Object(k.a)({},e,{audioAnalyzer:a.audioAnalyzer});case"SET_AUDIO_CONTEXT":return Object(k.a)({},e,{audioContext:a.audioContext});case"SET_AUDIO_SOURCE":return Object(k.a)({},e,{audioSource:a.audioSource});case"SET_AUDIO_STREAM":return Object(k.a)({},e,{audioStream:a.audioStream});case"SET_AUDIO_VISUALIZATION_DATA":return Object(k.a)({},e,{audioDataArray:a.audioDataArray,audioDurationInMs:a.audioDurationInMs});case"SET_ERROR":return Object(k.a)({},e,{error:a.error});case"SET_IS_RECORDING":return Object(k.a)({},e,{isRecording:a.isRecording});case"SET_IS_SAMPLE_PLAYING":return Object(k.a)({},e,{isSamplePlaying:a.isSamplePlaying});case"SET_IS_TRANSCRIBING":return Object(k.a)({},e,{isTranscribing:a.isTranscribing});case"SET_IS_UPLOAD_PLAYING":return Object(k.a)({},e,{isUploadPlaying:a.isUploadPlaying});case"SET_SPEAKER_LABELS":return Object(k.a)({},e,{speakerLabels:a.speakerLabels});case"UPDATE_RESULTS":var t=Object(Q.a)(e.transcript);return 0===a.resultIndex?t=a.transcript:t[a.resultIndex]=a.transcript[0],Object(k.a)({},e,{keywordInfo:a.keywordInfo,transcript:t});default:throw new Error}},ye=2,ke=function(e){var a=new FileReader;return new Promise((function(t,r){a.onload=function(){var e=a.result;t(e)},a.onerror=function(){a.abort(),r(new Error("failed to process file"))},a.readAsArrayBuffer(e)}))},Se=function(e,a,t){var r,n;return E.a.async((function(o){for(;;)switch(o.prev=o.next){case 0:return o.next=2,E.a.awrap(ke(e));case 2:return r=o.sent,n=new Uint8Array(r.slice(0)),o.abrupt("return",new Promise((function(e,o){a.decodeAudioData(r,(function(a){for(var r=a.duration,o=t-2*ye,s=Math.floor(o/2),i=n.length/s,l=[],d=1;d (\n \n
\n

{title}

\n

{description}

\n
\n
\n
{links.map(link => link)}
\n
\n
\n);\n\nHeader.propTypes = {\n description: PropTypes.string,\n links: PropTypes.arrayOf(PropTypes.object),\n title: PropTypes.string,\n};\n\nHeader.defaultProps = {\n description: '',\n links: [],\n title: '',\n};\n\nexport default Header;\n","import Header from './Header';\nexport default Header;\n","import React, { useEffect, useState } from 'react';\nimport PropTypes from 'prop-types';\nimport { Button, FileUploaderButton } from 'carbon-components-react';\nimport fetch from 'isomorphic-fetch';\nimport models from '../../data/models.json';\n\nexport const SubmitContainer = ({\n isRecording,\n isSamplePlaying,\n isUploadPlaying,\n keywordText,\n modelName,\n onError,\n onStartPlayingFileUpload,\n onStopPlayingFileUpload,\n onStartPlayingSample,\n onStopPlayingSample,\n onStartRecording,\n onStopRecording,\n useSpeakerLabels,\n}) => {\n const [keywordList, setKeywordList] = useState([]);\n useEffect(() => {\n let newKeywordList = [];\n if (keywordText.length > 0) {\n newKeywordList = keywordText.split(',').map(k => k.trim());\n }\n setKeywordList(newKeywordList);\n }, [keywordText]);\n\n const sampleModelInfo = models.find(model => model.name === modelName);\n const sampleModelFilename = sampleModelInfo ? sampleModelInfo.filename : null;\n\n const getBaseAudioConfig = async () => {\n let authResponse;\n let authJson;\n authResponse = await fetch('/api/auth');\n authJson = await authResponse.json();\n if (!authResponse.ok) {\n onError(authJson);\n return {\n error: authJson,\n };\n }\n\n let options = {};\n\n // We'll lowercase these so that we can ignore cases when highlighting keyword\n // occurrences later on.\n const lowerCasedKeywords = keywordList.map(keyword =>\n keyword.toLowerCase(),\n );\n\n options = {\n ...options,\n url: authJson.url || undefined,\n accessToken: authJson.accessToken,\n format: true,\n keywords: keywordList.length > 0 ? lowerCasedKeywords : undefined,\n keywordsThreshold: keywordList.length > 0 ? 0.01 : undefined,\n model: modelName,\n objectMode: true,\n play: true,\n realtime: true,\n resultsBySpeaker: useSpeakerLabels,\n speakerlessInterim: true,\n timestamps: true,\n };\n\n return options;\n };\n\n const getSampleAudioConfig = async () => {\n const baseConfig = await getBaseAudioConfig();\n return {\n ...baseConfig,\n file: `audio/${sampleModelFilename}`,\n };\n };\n\n const getMicrophoneAudioConfig = async () => {\n const baseConfig = await getBaseAudioConfig();\n return {\n ...baseConfig,\n resultsBySpeaker: false,\n };\n };\n\n const getUploadAudioConfig = async file => {\n const baseConfig = await getBaseAudioConfig();\n return {\n ...baseConfig,\n file,\n resultsBySpeaker: false,\n };\n };\n\n return (\n
\n {isSamplePlaying ? (\n \n Stop audio sample\n \n ) : (\n {\n const config = await getSampleAudioConfig();\n if (!config.error) {\n onStartPlayingSample(config);\n }\n }}\n >\n Play audio sample\n \n )}\n {isRecording ? (\n \n Stop recording\n \n ) : (\n {\n const config = await getMicrophoneAudioConfig();\n if (!config.error) {\n onStartRecording(config);\n }\n }}\n >\n Record your own\n \n )}\n {isUploadPlaying ? (\n \n Stop playing\n \n ) : (\n {\n const uploadedFile = evt.currentTarget.files[0];\n const config = await getUploadAudioConfig(uploadedFile);\n if (!config.error) {\n onStartPlayingFileUpload(config);\n }\n }}\n />\n )}\n
\n );\n};\n\nSubmitContainer.propTypes = {\n isRecording: PropTypes.bool,\n isSamplePlaying: PropTypes.bool,\n isUploadPlaying: PropTypes.bool,\n keywordText: PropTypes.string,\n modelName: PropTypes.string,\n onError: PropTypes.func,\n onStartPlayingFileUpload: PropTypes.func,\n onStopPlayingFileUpload: PropTypes.func,\n onStartPlayingSample: PropTypes.func,\n onStopPlayingSample: PropTypes.func,\n onStartRecording: PropTypes.func,\n onStopRecording: PropTypes.func,\n useSpeakerLabels: PropTypes.bool,\n};\n\nSubmitContainer.defaultProps = {\n isRecording: false,\n isSamplePlaying: false,\n isUploadPlaying: false,\n keywordText: '',\n modelName: null,\n onError: () => {},\n onStartPlayingFileUpload: () => {},\n onStopPlayingFileUpload: () => {},\n onStartPlayingSample: () => {},\n onStopPlayingSample: () => {},\n onStartRecording: () => {},\n onStopRecording: () => {},\n useSpeakerLabels: false,\n};\n\nexport default SubmitContainer;\n","import SubmitContainer from './SubmitContainer';\nexport default SubmitContainer;\n","import React, { useState } from 'react';\nimport PropTypes from 'prop-types';\nimport {\n Dropdown,\n FormGroup,\n TextArea,\n Tile,\n ToggleSmall,\n} from 'carbon-components-react';\nimport SubmitContainer from '../SubmitContainer';\nimport models from '../../data/models.json';\n\nexport const ControlContainer = ({\n isRecording,\n isSamplePlaying,\n isUploadPlaying,\n onError,\n onSelectNewModel,\n onStartPlayingFileUpload,\n onStopPlayingFileUpload,\n onStartPlayingSample,\n onStopPlayingSample,\n onStartRecording,\n onStopRecording,\n}) => {\n const dropdownChoices = models.map(model => ({\n id: model.name,\n label: model.description,\n supportsSpeakerLabels: model.supported_features.speaker_labels,\n }));\n\n const [model, selectModel] = useState(dropdownChoices[0]);\n const [keywordText, setKeywordText] = useState(models[0].keywords);\n const [useSpeakerLabels, setUseSpeakerLabels] = useState(false);\n\n const onChangeLanguageModel = newModel => {\n selectModel(newModel.selectedItem);\n\n const newKeywordText = models.find(\n model => model.name === newModel.selectedItem.id,\n ).keywords;\n setKeywordText(newKeywordText);\n\n if (useSpeakerLabels && !newModel.selectedItem.supportsSpeakerLabels) {\n setUseSpeakerLabels(false);\n }\n\n onSelectNewModel();\n };\n\n return (\n \n

Input

\n \n \n \n \n {\n setKeywordText(evt.target.value);\n }}\n light\n />\n \n \n {\n setUseSpeakerLabels(!useSpeakerLabels);\n }}\n />\n \n \n
\n );\n};\n\nControlContainer.propTypes = {\n isRecording: PropTypes.bool,\n isSamplePlaying: PropTypes.bool,\n isUploadPlaying: PropTypes.bool,\n onError: PropTypes.func,\n onSelectNewModel: PropTypes.func,\n onStartPlayingFileUpload: PropTypes.func,\n onStopPlayingFileUpload: PropTypes.func,\n onStartPlayingSample: PropTypes.func,\n onStopPlayingSample: PropTypes.func,\n onStartRecording: PropTypes.func,\n onStopRecording: PropTypes.func,\n};\n\nControlContainer.defaultProps = {\n isRecording: false,\n isSamplePlaying: false,\n isUploadPlaying: false,\n onError: () => {},\n onSelectNewModel: () => {},\n onStartPlayingFileUpload: () => {},\n onStopPlayingFileUpload: () => {},\n onStartPlayingSample: () => {},\n onStopPlayingSample: () => {},\n onStartRecording: () => {},\n onStopRecording: () => {},\n};\n\nexport default ControlContainer;\n","import ControlContainer from './ControlContainer';\nexport default ControlContainer;\n","import React from 'react';\nimport PropTypes from 'prop-types';\n\nconst DATA_POINT_WIDTH = 1;\nconst DATA_POINT_HEIGHT = 50;\nconst DATA_POINT_MARGIN = 2;\nconst DATA_POINT_Y_OFFSET = 50;\n\nexport class AudioWave extends React.Component {\n constructor(props) {\n super(props);\n\n this.audioWaveCanvasRef = React.createRef();\n this.audioWaveCanvasCtx = null;\n this.animationFrameId = null;\n\n this.draw = this.draw.bind(this);\n\n this.state = {\n startingTimestamp: 0,\n microphoneData: new Uint8Array(1024),\n };\n }\n\n componentDidMount() {\n this.audioWaveCanvasCtx = this.audioWaveCanvasRef.current.getContext('2d');\n this.audioWaveCanvasCtx.lineCap = 'round';\n this.initializeCanvasDimensions();\n }\n\n componentDidUpdate(prevProps) {\n const [firstPrevValue] = prevProps.data;\n const [firstCurrentValue] = this.props.data;\n\n if (firstPrevValue !== firstCurrentValue) {\n this.drawInitialAudioWave();\n }\n\n if (\n prevProps.isTranscribing === false &&\n this.props.isTranscribing === true\n ) {\n this.setStartingTimestamp();\n this.draw();\n } else if (\n prevProps.isTranscribing === true &&\n this.props.isTranscribing === false\n ) {\n this.stopDrawing();\n }\n }\n\n setStartingTimestamp() {\n this.setState({ startingTimestamp: Date.now() });\n }\n\n initializeCanvasDimensions() {\n const canvasCtx = this.audioWaveCanvasRef.current;\n const audioWaveContainer = this.props.audioWaveContainerRef.current;\n\n canvasCtx.width = audioWaveContainer.clientWidth;\n canvasCtx.height = 100;\n }\n\n drawInitialAudioWave() {\n this.resetCanvasForNewFrame();\n this.drawEmptyDataPoints();\n }\n\n drawEmptyDataPoints() {\n const { data } = this.props;\n data.forEach((dataPoint, i) => {\n this.audioWaveCanvasCtx.beginPath();\n\n this.audioWaveCanvasCtx.fillStyle = 'rgba(0, 98, 255, 0.5)';\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n -DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.stroke();\n this.audioWaveCanvasCtx.closePath();\n });\n }\n\n drawMicrophoneDataPoints() {\n this.props.audioAnalyzer.getByteFrequencyData(this.state.microphoneData);\n\n const { microphoneData } = this.state;\n const arrayData = [].slice.call(microphoneData);\n const floatArray = arrayData.map(n => n / 255);\n\n floatArray.forEach((dataPoint, i) => {\n this.audioWaveCanvasCtx.beginPath();\n\n this.audioWaveCanvasCtx.fillStyle = 'rgba(0, 98, 255, 1)';\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n -DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.stroke();\n this.audioWaveCanvasCtx.closePath();\n });\n }\n\n drawAudioDataPoints() {\n const { data, duration } = this.props;\n\n // Make time calculations.\n const now = Date.now();\n const { startingTimestamp } = this.state;\n const timeElapsed = now - startingTimestamp;\n const audioProgressPercent = timeElapsed / duration;\n\n // Draw the audio lines.\n const numberOfDataPoints = data.length;\n const highlightIndex = numberOfDataPoints * audioProgressPercent;\n\n const wholeHighlightIndex = Math.floor(highlightIndex);\n let decimalOpacityIndex = highlightIndex - wholeHighlightIndex;\n\n if (decimalOpacityIndex > 1) {\n decimalOpacityIndex = 1;\n }\n\n if (decimalOpacityIndex < 0.5) {\n decimalOpacityIndex = 0.5;\n }\n\n data.forEach((dataPoint, i) => {\n this.audioWaveCanvasCtx.beginPath();\n\n let fillColor = 'rgba(0, 98, 255, 1)';\n if (i > wholeHighlightIndex) {\n fillColor = 'rgba(0, 98, 255, 0.5)';\n }\n\n if (i - 1 === wholeHighlightIndex) {\n fillColor = `rgba(0, 98, 255, ${decimalOpacityIndex.toFixed(2)})`;\n }\n\n this.audioWaveCanvasCtx.fillStyle = fillColor;\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.fillRect(\n i * DATA_POINT_MARGIN, // x position\n DATA_POINT_Y_OFFSET, // y position\n DATA_POINT_WIDTH, // rect width\n -DATA_POINT_HEIGHT * dataPoint, // rect height\n );\n this.audioWaveCanvasCtx.stroke();\n this.audioWaveCanvasCtx.closePath();\n });\n }\n\n draw() {\n this.resetCanvasForNewFrame();\n\n if (this.props.audioSource && this.props.audioSource === 'microphone') {\n this.drawMicrophoneDataPoints();\n } else {\n this.drawAudioDataPoints();\n }\n\n this.animationFrameId = requestAnimationFrame(this.draw);\n }\n\n resetCanvasForNewFrame() {\n const audioWaveCanvas = this.audioWaveCanvasRef.current;\n this.audioWaveCanvasCtx.clearRect(\n 0,\n 0,\n audioWaveCanvas.width,\n audioWaveCanvas.height,\n );\n }\n\n stopDrawing() {\n if (this.animationFrameId) {\n cancelAnimationFrame(this.animationFrameId);\n }\n }\n\n render() {\n return (\n
\n \n
\n );\n }\n}\n\nAudioWave.propTypes = {\n data: PropTypes.array.isRequired,\n duration: PropTypes.number.isRequired,\n isTranscribing: PropTypes.bool.isRequired,\n audioWaveContainerRef: PropTypes.object.isRequired,\n audioSource: PropTypes.string,\n audioAnalyzer: PropTypes.object,\n};\n\nAudioWave.defaultProps = {\n audioSource: null,\n audioAnalyzer: null,\n};\n\nexport default AudioWave;\n","import AudioWave from './AudioWave';\nexport default AudioWave;\n","import KeywordTooltip from './KeywordTooltip';\nexport default KeywordTooltip;\n","import React from 'react';\nimport PropTypes from 'prop-types';\n\nexport const KeywordTooltip = ({ confidence, startTime, endTime }) => (\n
\n

Confidence: {confidence}

\n

\n {startTime}s - {endTime}s\n

\n
\n);\n\nKeywordTooltip.propTypes = {\n confidence: PropTypes.number.isRequired,\n startTime: PropTypes.number.isRequired,\n endTime: PropTypes.number.isRequired,\n};\n\nexport default KeywordTooltip;\n","import React from 'react';\nimport PropTypes from 'prop-types';\nimport { TooltipDefinition } from 'carbon-components-react';\nimport KeywordTooltip from '../KeywordTooltip';\nimport { createWordRegex } from './utils';\n\nconst mapTranscriptTextToElements = (text, keywordInfo, totalIndex) => {\n let finalSentenceArray = [];\n let matches = [];\n\n if (keywordInfo.length > 0) {\n const regex = createWordRegex(keywordInfo);\n matches = text.split(regex);\n }\n\n // If we don't have words to find yet, just return the interim text.\n if (matches.length === 0) {\n return [\n {\n text,\n type: 'normal',\n },\n ];\n }\n\n const wordOccurences = {};\n finalSentenceArray = matches.map((sentenceFragment, index) => {\n // Use lowercased version when searching through keyword map.\n const fragmentToSearch = sentenceFragment.toLowerCase();\n\n if (index % 2 === 0) {\n return {\n text: sentenceFragment,\n type: 'normal',\n };\n }\n\n // Find keyword info object to use based on text from sentenceFragment and\n // current index in wordOccurences.\n const keywordInfoMatch =\n keywordInfo[totalIndex] && keywordInfo[totalIndex][fragmentToSearch];\n let keywordOccurenceIndex = 0;\n if (wordOccurences[fragmentToSearch]) {\n keywordOccurenceIndex = wordOccurences[fragmentToSearch];\n wordOccurences[fragmentToSearch] += 1;\n } else {\n wordOccurences[fragmentToSearch] = 1;\n }\n const infoForOccurence =\n keywordInfoMatch && keywordInfoMatch[keywordOccurenceIndex];\n\n // Bail in case we can't get the keyword info for whatever reason.\n if (!infoForOccurence) {\n return {};\n }\n\n return {\n text: sentenceFragment,\n type: 'keyword',\n startTime: infoForOccurence.start_time,\n endTime: infoForOccurence.end_time,\n confidence: infoForOccurence.confidence,\n };\n });\n\n return finalSentenceArray;\n};\n\nexport const TranscriptBox = ({ keywordInfo, transcriptArray }) => {\n return (\n
\n {transcriptArray.map((transcriptItem, overallIndex) => {\n const { speaker, text } = transcriptItem;\n const parsedTextElements = mapTranscriptTextToElements(\n text,\n keywordInfo,\n overallIndex,\n );\n\n return (\n
\n {speaker !== null && (\n \n {`Speaker ${speaker}: `}\n \n )}\n {parsedTextElements.map((element, elementIndex) => {\n if (!element) {\n return null;\n }\n\n if (element.type === 'normal') {\n return (\n {`${element.text}`}\n );\n } else if (element.type === 'keyword') {\n return (\n \n }\n triggerClassName=\"keyword-info-trigger\"\n >\n {element.text}\n \n );\n }\n\n return null;\n })}\n
\n );\n })}\n
\n );\n};\n\nTranscriptBox.propTypes = {\n keywordInfo: PropTypes.arrayOf(PropTypes.object),\n transcriptArray: PropTypes.arrayOf(PropTypes.object),\n};\n\nTranscriptBox.defaultProps = {\n keywordInfo: [],\n transcriptArray: [],\n};\n\nexport default TranscriptBox;\n","export const createWordRegex = keywordInfo => {\n let allKeywords = [];\n keywordInfo.forEach(sectionKeywords => {\n allKeywords = [...allKeywords, ...Object.keys(sectionKeywords)];\n });\n const regexArray = allKeywords.map((word, index) => {\n if (index !== allKeywords.length - 1) {\n return `${word}|`;\n }\n return word;\n });\n const regexWordSearch = regexArray.reduce((arr, str) => arr + str, '');\n const regex = new RegExp(`(${regexWordSearch})(?!')`, 'gi');\n return regex;\n};\n","import TranscriptBox from './TranscriptBox';\nexport default TranscriptBox;\n","import React from 'react';\nimport PropTypes from 'prop-types';\nimport { FormGroup, Tile } from 'carbon-components-react';\nimport AudioWave from '../AudioWave';\nimport TranscriptBox from '../TranscriptBox';\n\nexport const OutputContainer = ({\n audioAnalyzer,\n audioDataArray,\n audioDuration,\n audioSource,\n audioWaveContainerRef,\n isTranscribing,\n keywordInfo,\n transcriptArray,\n}) => (\n \n

Output

\n \n \n \n \n \n \n
\n);\n\nOutputContainer.propTypes = {\n audioAnalyzer: PropTypes.object.isRequired,\n audioDataArray: PropTypes.arrayOf(PropTypes.number),\n audioDuration: PropTypes.number,\n audioSource: PropTypes.string,\n audioWaveContainerRef: PropTypes.object.isRequired,\n isTranscribing: PropTypes.bool,\n keywordInfo: PropTypes.arrayOf(PropTypes.object),\n transcriptArray: PropTypes.arrayOf(PropTypes.object),\n};\n\nOutputContainer.defaultProps = {\n audioDataArray: [],\n audioDuration: 0,\n audioSource: '',\n isTranscribing: false,\n keywordInfo: [],\n transcriptArray: [],\n};\n\nexport default OutputContainer;\n","import OutputContainer from './OutputContainer';\nexport default OutputContainer;\n","import React, { useEffect, useState } from 'react';\nimport PropTypes from 'prop-types';\nimport { ToastNotification } from 'carbon-components-react';\n\nconst NOTIFICATION_HAS_BEEN_SEEN = 'notificationHasBeenSeen';\n\nexport const Toast = ({\n caption,\n children,\n className,\n hideAfterFirstDisplay,\n hideCloseButton,\n iconDescription,\n kind,\n lowContrast,\n onCloseButtonClick,\n role,\n subtitle,\n timeout,\n title,\n}) => {\n const [id, setId] = useState();\n const [hideToast, setHideToast] = useState(false);\n\n useEffect(() => {\n setId(\n Math.random()\n .toString(36)\n .substring(2, 15) +\n Math.random()\n .toString(36)\n .substring(2, 15),\n );\n }, []);\n\n useEffect(() => {\n const element = document.querySelector(`.custom-toast-${id}`);\n if (element) {\n element.className += 'enter';\n }\n }, [id]);\n\n useEffect(() => {\n if (\n hideAfterFirstDisplay &&\n typeof window !== undefined &&\n typeof window.localStorage !== undefined &&\n window.localStorage.getItem(NOTIFICATION_HAS_BEEN_SEEN) === 'true'\n ) {\n setHideToast(true);\n }\n }, [hideAfterFirstDisplay]);\n\n return hideToast ? null : (\n {\n if (\n hideAfterFirstDisplay &&\n typeof window !== undefined &&\n typeof window.localStorage !== undefined\n ) {\n window.localStorage.setItem(NOTIFICATION_HAS_BEEN_SEEN, 'true');\n }\n onCloseButtonClick();\n }}\n role={role}\n subtitle={subtitle}\n timeout={timeout}\n title={title}\n >\n {children}\n \n );\n};\n\nToast.propTypes = {\n caption: PropTypes.string,\n children: PropTypes.node,\n className: PropTypes.string,\n hideAfterFirstDisplay: PropTypes.bool,\n hideCloseButton: PropTypes.bool,\n iconDescription: PropTypes.string,\n kind: PropTypes.string,\n lowContrast: PropTypes.bool,\n onCloseButtonClick: PropTypes.func,\n role: PropTypes.string,\n subtitle: PropTypes.string,\n timeout: PropTypes.number,\n title: PropTypes.string,\n};\n\nToast.defaultProps = {\n caption: '',\n children: null,\n className: '',\n hideAfterFirstDisplay: true,\n hideCloseButton: false,\n iconDescription: 'closes notification',\n kind: 'error',\n lowContrast: false,\n onCloseButtonClick: () => {},\n role: 'alert',\n subtitle: '',\n timeout: 0,\n title: '',\n};\n\nexport default Toast;\n","import Toast from './Toast';\n\nexport default Toast;\n","export const actionTypes = {\n setAudioAnalyzer: 'SET_AUDIO_ANALYZER',\n setAudioContext: 'SET_AUDIO_CONTEXT',\n setAudioSource: 'SET_AUDIO_SOURCE',\n setAudioStream: 'SET_AUDIO_STREAM',\n setAudioVisualizationData: 'SET_AUDIO_VISUALIZATION_DATA',\n setError: 'SET_ERROR',\n setSpeakerLabels: 'SET_SPEAKER_LABELS',\n setIsRecording: 'SET_IS_RECORDING',\n setIsSamplePlaying: 'SET_IS_SAMPLE_PLAYING',\n setIsTranscribing: 'SET_IS_TRANSCRIBING',\n setIsUploadPlaying: 'SET_IS_UPLOAD_PLAYING',\n updateResults: 'UPDATE_RESULTS',\n};\n\nexport const initialState = {\n audioAnalyzer: {},\n audioContext: null,\n audioDataArray: [],\n audioDurationInMs: 0,\n audioSource: '',\n audioStream: null,\n error: null,\n isRecording: false,\n isSamplePlaying: false,\n isTranscribing: false,\n isUploadPlaying: false,\n keywordInfo: [],\n speakerLabels: [],\n transcript: [],\n};\n\nexport const reducer = (state, action) => {\n switch (action.type) {\n case 'SET_AUDIO_ANALYZER': {\n return {\n ...state,\n audioAnalyzer: action.audioAnalyzer,\n };\n }\n case 'SET_AUDIO_CONTEXT': {\n return {\n ...state,\n audioContext: action.audioContext,\n };\n }\n case 'SET_AUDIO_SOURCE': {\n return {\n ...state,\n audioSource: action.audioSource,\n };\n }\n case 'SET_AUDIO_STREAM': {\n return {\n ...state,\n audioStream: action.audioStream,\n };\n }\n case 'SET_AUDIO_VISUALIZATION_DATA': {\n return {\n ...state,\n audioDataArray: action.audioDataArray,\n audioDurationInMs: action.audioDurationInMs,\n };\n }\n case 'SET_ERROR': {\n return {\n ...state,\n error: action.error,\n };\n }\n case 'SET_IS_RECORDING': {\n return {\n ...state,\n isRecording: action.isRecording,\n };\n }\n case 'SET_IS_SAMPLE_PLAYING': {\n return {\n ...state,\n isSamplePlaying: action.isSamplePlaying,\n };\n }\n case 'SET_IS_TRANSCRIBING': {\n return {\n ...state,\n isTranscribing: action.isTranscribing,\n };\n }\n case 'SET_IS_UPLOAD_PLAYING': {\n return {\n ...state,\n isUploadPlaying: action.isUploadPlaying,\n };\n }\n case 'SET_SPEAKER_LABELS': {\n return {\n ...state,\n speakerLabels: action.speakerLabels,\n };\n }\n case 'UPDATE_RESULTS': {\n let updatedTranscript = [...state.transcript];\n if (action.resultIndex === 0) {\n updatedTranscript = action.transcript;\n } else {\n updatedTranscript[action.resultIndex] = action.transcript[0];\n }\n\n return {\n ...state,\n keywordInfo: action.keywordInfo,\n transcript: updatedTranscript,\n };\n }\n default: {\n throw new Error();\n }\n }\n};\n","const AUDIO_VISUALIZATION_DIMENSIONS = {\n DATA_POINT_WIDTH: 1,\n DATA_POINT_HEIGHT: 50,\n DATA_POINT_MARGIN: 2,\n DATA_POINT_X_OFFSET: 25,\n DATA_POINT_Y_OFFSET: 50,\n};\n\nconst readFileToArrayBuffer = fileData => {\n const fileReader = new FileReader();\n\n return new Promise((resolve, reject) => {\n fileReader.onload = () => {\n const arrayBuffer = fileReader.result;\n resolve(arrayBuffer);\n };\n\n fileReader.onerror = () => {\n fileReader.abort();\n reject(new Error('failed to process file'));\n };\n\n // Initiate the conversion.\n fileReader.readAsArrayBuffer(fileData);\n });\n};\n\nexport const formatStreamData = data => {\n const { results, result_index: resultIndex } = data;\n\n let finalKeywords = [];\n const finalTranscript = [];\n let isFinal = false;\n\n results.forEach(result => {\n const { final } = result;\n let alternatives = null;\n let speaker = null;\n let keywords_result = null;\n\n if (final) {\n ({ alternatives, speaker, keywords_result } = result);\n } else {\n ({ alternatives, speaker } = result);\n }\n\n // Extract the main alternative to get keywords.\n const [mainAlternative] = alternatives;\n const { transcript } = mainAlternative;\n\n if (speaker === undefined) {\n speaker = null;\n }\n\n // Push object to final transcript.\n finalTranscript.push({\n final,\n speaker,\n text: transcript,\n });\n\n isFinal = final;\n\n // Push keywords to final keyword list.\n if (keywords_result) {\n finalKeywords.push(keywords_result);\n }\n });\n\n return {\n transcript: finalTranscript,\n keywordInfo: finalKeywords,\n resultIndex,\n final: isFinal,\n };\n};\n\nexport const convertAudioBlobToVisualizationData = async (\n audioBlob,\n audioCtx,\n audioWaveContainerWidth,\n) => {\n const audioArrayBuffer = await readFileToArrayBuffer(audioBlob);\n const audioUint8Array = new Uint8Array(audioArrayBuffer.slice(0));\n\n // NOTE: BaseAudioContext.decodeAudioData has a promise syntax\n // which we are unable to use in order to be compatible with Safari.\n // Therefore, we wrap the callback syntax in a promise to give us the same\n // effect while ensuring compatibility\n // see more: https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/decodeAudioData#Browser_compatibility\n return new Promise((resolve, reject) => {\n audioCtx.decodeAudioData(\n audioArrayBuffer,\n audioDataBuffer => {\n const { duration } = audioDataBuffer;\n\n const { DATA_POINT_MARGIN } = AUDIO_VISUALIZATION_DIMENSIONS;\n const validContainerWidth =\n audioWaveContainerWidth - DATA_POINT_MARGIN * 2;\n const numberOfChunks = Math.floor(validContainerWidth / 2);\n const chunkSize = audioUint8Array.length / numberOfChunks;\n\n const chunkedAudioDataArray = [];\n for (let i = 1; i < numberOfChunks; i += 1) {\n let previousIndex = i - 1;\n if (previousIndex < 0) {\n previousIndex = 0;\n }\n\n chunkedAudioDataArray.push(\n audioUint8Array.slice(previousIndex * chunkSize, i * chunkSize),\n );\n }\n\n const reducedFloatArray = chunkedAudioDataArray.map(chunk => {\n const totalValue = chunk.reduce(\n (prevValue, currentValue) => prevValue + currentValue,\n );\n const floatValue = totalValue / (chunkSize * 255);\n return floatValue;\n });\n\n resolve({\n duration,\n reducedFloatArray,\n });\n },\n () => {\n reject(new Error('failed to chunk audio'));\n },\n );\n });\n};\n","export const createError = (title, description) => {\n return {\n title,\n description,\n };\n};\n","import ServiceContainer from './ServiceContainer';\nexport default ServiceContainer;\n","import React, { useEffect, useReducer, useRef } from 'react';\nimport recognizeFile from 'watson-speech/speech-to-text/recognize-file';\nimport recognizeMicrophone from 'watson-speech/speech-to-text/recognize-microphone';\nimport ControlContainer from '../ControlContainer';\nimport OutputContainer from '../OutputContainer';\nimport Toast from '../Toast';\nimport { actionTypes, initialState, reducer } from './reducer';\nimport { convertAudioBlobToVisualizationData, formatStreamData } from './utils';\nimport { createError } from '../../utils';\n\nconst FILE_UPLOAD_ERROR_TITLE = 'File upload error';\nconst FILE_UPLOAD_ERROR_DESCRIPTION =\n 'There was a problem trying to read the file.';\nconst NO_MICROPHONE_TITLE = 'No microphone detected';\nconst NO_MICROPHONE_DESCRIPTION = 'Cannot transcribe from microphone.';\nconst AUDIO_TRANSCRIPTION_ERROR_TITLE = 'Audio transcription error';\nconst AUDIO_TRANSCRIPTION_ERROR_DESCRIPTION =\n 'There was an error trying to read the audio data. Please try again.';\nconst GDPR_DISCLAIMER =\n 'This system is for demonstration purposes only and is not intended to process Personal Data. No Personal Data is to be entered into this system as it may not have the necessary controls in place to meet the requirements of the General Data Protection Regulation (EU) 2016/679.';\n\nexport const ServiceContainer = () => {\n const [state, dispatch] = useReducer(reducer, initialState);\n const audioWaveContainerRef = useRef(null);\n\n useEffect(() => {\n const audioContext = new (window.AudioContext ||\n window.webkitAudioContext)();\n const audioAnalyzer = audioContext.createAnalyser();\n\n dispatch({\n audioAnalyzer,\n type: actionTypes.setAudioAnalyzer,\n });\n dispatch({\n audioContext,\n type: actionTypes.setAudioContext,\n });\n }, []);\n\n const parseResults = data => {\n if (data.speaker_labels) {\n dispatch({\n speakerLabels: data.speaker_labels,\n type: actionTypes.setSpeakerLabels,\n });\n } else {\n const { transcript, keywordInfo, resultIndex } = formatStreamData(data);\n\n dispatch({\n keywordInfo,\n resultIndex,\n transcript,\n type: actionTypes.updateResults,\n });\n }\n };\n\n const handleStreamEnd = () => {\n if (state.audioStream) {\n state.audioStream.stop();\n }\n\n dispatch({\n isTranscribing: false,\n type: actionTypes.setIsTranscribing,\n });\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n };\n\n const readAudioFileForVisualization = async filename => {\n let containerClientWidth = null;\n if (\n audioWaveContainerRef &&\n audioWaveContainerRef.current &&\n audioWaveContainerRef.current.clientWidth\n ) {\n containerClientWidth = audioWaveContainerRef.current.clientWidth;\n }\n const audioVisualizationWidth = containerClientWidth || 300;\n\n const isFileType = filename instanceof File;\n try {\n let audioBlob = null;\n\n if (isFileType) {\n audioBlob = filename;\n } else {\n const audioRequest = await fetch(filename);\n audioBlob = await audioRequest.blob();\n }\n const {\n reducedFloatArray,\n duration,\n } = await convertAudioBlobToVisualizationData(\n audioBlob,\n state.audioContext,\n audioVisualizationWidth,\n );\n\n dispatch({\n audioDataArray: reducedFloatArray,\n audioDurationInMs: duration * 1000,\n type: actionTypes.setAudioVisualizationData,\n });\n } catch (err) {\n dispatch({\n error: createError(\n FILE_UPLOAD_ERROR_TITLE,\n FILE_UPLOAD_ERROR_DESCRIPTION,\n ),\n type: actionTypes.setError,\n });\n }\n };\n\n const captureAudioFromMicrophone = async recognizeOptions => {\n let mediaStream = null;\n try {\n mediaStream = await navigator.mediaDevices.getUserMedia({\n video: false,\n audio: true,\n });\n } catch (err) {\n dispatch({\n error: createError(NO_MICROPHONE_TITLE, NO_MICROPHONE_DESCRIPTION),\n type: actionTypes.setError,\n });\n }\n\n const recognizeMicrophoneStream = recognizeMicrophone({\n ...recognizeOptions,\n mediaStream,\n keepMic: true,\n });\n\n if (mediaStream) {\n const updatedAudioAnalyzer = state.audioAnalyzer;\n updatedAudioAnalyzer.fttSize = 2048;\n dispatch({\n audioAnalyzer: updatedAudioAnalyzer,\n type: actionTypes.setAudioAnalyzer,\n });\n const mediaStreamSource = state.audioContext.createMediaStreamSource(\n mediaStream,\n );\n mediaStreamSource.connect(state.audioAnalyzer);\n }\n\n return recognizeMicrophoneStream;\n };\n\n const onSubmit = stream => {\n stream\n .on('data', data => {\n parseResults(data);\n })\n .on('end', () => {\n handleStreamEnd();\n })\n .on('error', () => {\n dispatch({\n error: createError(\n AUDIO_TRANSCRIPTION_ERROR_TITLE,\n AUDIO_TRANSCRIPTION_ERROR_DESCRIPTION,\n ),\n type: actionTypes.setError,\n });\n\n handleStreamEnd();\n });\n\n dispatch({\n isTranscribing: true,\n type: actionTypes.setIsTranscribing,\n });\n };\n\n const cleanUpOldStreamIfNecessary = () => {\n if (state.audioStream) {\n state.audioStream.stop();\n state.audioStream.removeAllListeners();\n state.audioStream.recognizeStream.removeAllListeners();\n }\n\n if (state.audioContext && state.audioContext.state === 'suspended') {\n state.audioContext.resume();\n }\n };\n\n const onSelectNewModel = () => {\n dispatch({\n audioDataArray: [],\n audioDurationInMs: 0,\n type: actionTypes.setAudioVisualizationData,\n });\n dispatch({\n keywordInfo: [],\n resultIndex: 0,\n transcript: [],\n type: actionTypes.updateResults,\n });\n };\n\n const onStartPlayingFileUpload = async recognizeConfig => {\n cleanUpOldStreamIfNecessary();\n\n const stream = recognizeFile(recognizeConfig);\n await readAudioFileForVisualization(recognizeConfig.file);\n dispatch({\n isUploadPlaying: true,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n dispatch({\n audioSource: 'upload',\n type: actionTypes.setAudioSource,\n });\n dispatch({\n audioStream: stream,\n type: actionTypes.setAudioStream,\n });\n\n onSubmit(stream);\n };\n\n const onStopPlayingFileUpload = () => {\n handleStreamEnd();\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n };\n\n const onStartPlayingSample = async recognizeConfig => {\n cleanUpOldStreamIfNecessary();\n\n const stream = recognizeFile(recognizeConfig);\n await readAudioFileForVisualization(recognizeConfig.file);\n dispatch({\n isSamplePlaying: true,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n dispatch({\n audioSource: 'sample',\n type: actionTypes.setAudioSource,\n });\n dispatch({\n audioStream: stream,\n type: actionTypes.setAudioStream,\n });\n\n onSubmit(stream);\n };\n\n const onStopPlayingSample = () => {\n handleStreamEnd();\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n };\n\n const onStartRecording = async recognizeConfig => {\n cleanUpOldStreamIfNecessary();\n\n const stream = await captureAudioFromMicrophone(recognizeConfig);\n dispatch({\n isRecording: true,\n type: actionTypes.setIsRecording,\n });\n dispatch({\n isSamplePlaying: false,\n type: actionTypes.setIsSamplePlaying,\n });\n dispatch({\n isUploadPlaying: false,\n type: actionTypes.setIsUploadPlaying,\n });\n dispatch({\n audioSource: 'microphone',\n type: actionTypes.setAudioSource,\n });\n dispatch({\n audioStream: stream,\n type: actionTypes.setAudioStream,\n });\n\n onSubmit(stream);\n };\n\n const onStopRecording = () => {\n handleStreamEnd();\n dispatch({\n isRecording: false,\n type: actionTypes.setIsRecording,\n });\n };\n\n const onError = error => {\n dispatch({\n error,\n type: actionTypes.setError,\n });\n };\n\n return (\n
\n \n {state.error && (\n \n dispatch({ error: null, type: actionTypes.setError })\n }\n />\n )}\n \n \n
\n );\n};\n\nexport default ServiceContainer;\n","import { useEffect } from 'react';\n\nconst useScript = url => {\n useEffect(() => {\n const script = document.createElement('script');\n\n script.src = url;\n script.async = true;\n\n document.body.appendChild(script);\n\n return () => {\n document.body.removeChild(script);\n };\n }, [url]);\n};\n\nexport default useScript;\n","import React from 'react';\nimport { Button, Link } from 'carbon-components-react';\nimport { default as Api124 } from '@carbon/icons-react/lib/API--1/24';\nimport Document24 from '@carbon/icons-react/lib/document/24';\nimport IbmCloud24 from '@carbon/icons-react/lib/ibm-cloud/24';\nimport Launch16 from '@carbon/icons-react/lib/launch/16';\nimport LogoGithub24 from '@carbon/icons-react/lib/logo--github/24';\nimport Header from './components/Header';\nimport ServiceContainer from './components/ServiceContainer';\nimport useScript from './hooks/useScript';\n\nconst HEADER_TITLE = 'Watson Speech to Text';\nconst HEADER_DESCRIPTION =\n 'IBM Watson Speech to Text is a cloud-native API that transforms voice into written text.';\nconst HEADER_LINKS = [\n \n

API reference

\n \n ,\n \n

Documentation

\n \n ,\n \n

GitHub

\n \n ,\n \n \n \n ,\n];\n\nexport const App = () => {\n useScript(\n 'https://cdn.jsdelivr.net/gh/watson-developer-cloud/watson-developer-cloud.github.io@master/analytics.js',\n );\n\n return (\n
\n \n \n
\n );\n};\n\nexport default App;\n","// This optional code is used to register a service worker.\n// register() is not called by default.\n\n// This lets the app load faster on subsequent visits in production, and gives\n// it offline capabilities. However, it also means that developers (and users)\n// will only see deployed updates on subsequent visits to a page, after all the\n// existing tabs open on the page have been closed, since previously cached\n// resources are updated in the background.\n\n// To learn more about the benefits of this model and instructions on how to\n// opt-in, read https://bit.ly/CRA-PWA\n\nconst isLocalhost = Boolean(\n window.location.hostname === 'localhost' ||\n // [::1] is the IPv6 localhost address.\n window.location.hostname === '[::1]' ||\n // 127.0.0.0/8 are considered localhost for IPv4.\n window.location.hostname.match(\n /^127(?:\\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/\n )\n);\n\nexport function register(config) {\n if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {\n // The URL constructor is available in all browsers that support SW.\n const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);\n if (publicUrl.origin !== window.location.origin) {\n // Our service worker won't work if PUBLIC_URL is on a different origin\n // from what our page is served on. This might happen if a CDN is used to\n // serve assets; see https://github.com/facebook/create-react-app/issues/2374\n return;\n }\n\n window.addEventListener('load', () => {\n const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;\n\n if (isLocalhost) {\n // This is running on localhost. Let's check if a service worker still exists or not.\n checkValidServiceWorker(swUrl, config);\n\n // Add some additional logging to localhost, pointing developers to the\n // service worker/PWA documentation.\n navigator.serviceWorker.ready.then(() => {\n console.log(\n 'This web app is being served cache-first by a service ' +\n 'worker. To learn more, visit https://bit.ly/CRA-PWA'\n );\n });\n } else {\n // Is not localhost. Just register service worker\n registerValidSW(swUrl, config);\n }\n });\n }\n}\n\nfunction registerValidSW(swUrl, config) {\n navigator.serviceWorker\n .register(swUrl)\n .then(registration => {\n registration.onupdatefound = () => {\n const installingWorker = registration.installing;\n if (installingWorker == null) {\n return;\n }\n installingWorker.onstatechange = () => {\n if (installingWorker.state === 'installed') {\n if (navigator.serviceWorker.controller) {\n // At this point, the updated precached content has been fetched,\n // but the previous service worker will still serve the older\n // content until all client tabs are closed.\n console.log(\n 'New content is available and will be used when all ' +\n 'tabs for this page are closed. See https://bit.ly/CRA-PWA.'\n );\n\n // Execute callback\n if (config && config.onUpdate) {\n config.onUpdate(registration);\n }\n } else {\n // At this point, everything has been precached.\n // It's the perfect time to display a\n // \"Content is cached for offline use.\" message.\n console.log('Content is cached for offline use.');\n\n // Execute callback\n if (config && config.onSuccess) {\n config.onSuccess(registration);\n }\n }\n }\n };\n };\n })\n .catch(error => {\n console.error('Error during service worker registration:', error);\n });\n}\n\nfunction checkValidServiceWorker(swUrl, config) {\n // Check if the service worker can be found. If it can't reload the page.\n fetch(swUrl, {\n headers: { 'Service-Worker': 'script' }\n })\n .then(response => {\n // Ensure service worker exists, and that we really are getting a JS file.\n const contentType = response.headers.get('content-type');\n if (\n response.status === 404 ||\n (contentType != null && contentType.indexOf('javascript') === -1)\n ) {\n // No service worker found. Probably a different app. Reload the page.\n navigator.serviceWorker.ready.then(registration => {\n registration.unregister().then(() => {\n window.location.reload();\n });\n });\n } else {\n // Service worker found. Proceed as normal.\n registerValidSW(swUrl, config);\n }\n })\n .catch(() => {\n console.log(\n 'No internet connection found. App is running in offline mode.'\n );\n });\n}\n\nexport function unregister() {\n if ('serviceWorker' in navigator) {\n navigator.serviceWorker.ready.then(registration => {\n registration.unregister();\n });\n }\n}\n","import React from 'react';\nimport ReactDOM from 'react-dom';\nimport './styles/main.scss';\nimport App from './App';\nimport * as serviceWorker from './serviceWorker';\n\nReactDOM.render(, document.getElementById('root'));\n\n// If you want your app to work offline and load faster, you can change\n// unregister() to register() below. Note this comes with some pitfalls.\n// Learn more about service workers: https://bit.ly/CRA-PWA\nserviceWorker.unregister();\n"],"sourceRoot":""} \ No newline at end of file diff --git a/public/index.html b/public/index.html index eb0464f..249f5ba 100644 --- a/public/index.html +++ b/public/index.html @@ -38,8 +38,6 @@ To begin the development, run `npm start` or `yarn start`. To create a production bundle, use `npm run build` or `yarn build`. --> -