Skip to content

Commit

Permalink
Updates to frontend UI and NLP capabilities
Browse files Browse the repository at this point in the history
  • Loading branch information
amulchapla committed Sep 16, 2021
1 parent 7e3bc29 commit 50dc519
Show file tree
Hide file tree
Showing 4 changed files with 65 additions and 55 deletions.
6 changes: 4 additions & 2 deletions azure-speech-streaming-reactjs/speechexpressbackend/.env
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
SPEECH_KEY=<YOUR SPEECH SERVICE KEY>
SPEECH_REGION=<YOUR SPEECH SERVICE REGION - eastus2>
SPEECH_KEY='paste-your-speech-key-here'
SPEECH_REGION='paste-your-speech-region-here eg: eastus2'
TEXTANALYTICS_KEY='paste-your-text-analytics-key-here'
TEXTANALYTICS_ENDPOINT='put your text analytics endpoint here'
27 changes: 20 additions & 7 deletions azure-speech-streaming-reactjs/speechexpressbackend/serverapp.js
Original file line number Diff line number Diff line change
Expand Up @@ -56,29 +56,41 @@ app.post('/api/ta-key-phrases', async (req, res) => {
];
const textAnalyticsClient = new TextAnalyticsClient(textAnalyticsEndpoint, new AzureKeyCredential(textAnalyticsKey));

let keyPhrasesText = "KEY PHRASES: ";
const keyPhraseResult = await textAnalyticsClient.extractKeyPhrases(keyPhrasesInput);
keyPhraseResult.forEach(document => {
keyPhraseResponse = document.keyPhrases;
keyPhraseResponse = document.keyPhrases;
keyPhrasesText += document.keyPhrases;
});

let entityText = "ENTITIES: ";
const entityResults = await textAnalyticsClient.recognizeEntities(keyPhrasesInput);
entityResults.forEach(document => {
console.log(`Document ID: ${document.id}`);
//console.log(`Document ID: ${document.id}`);
document.entities.forEach(entity => {
if(entity.confidenceScore > 0.5){
console.log(`\tName: ${entity.text} \tCategory: ${entity.category} \tSubcategory: ${entity.subCategory ? entity.subCategory : "N/A"}`);
//console.log(`\tName: ${entity.text} \tCategory: ${entity.category} \tSubcategory: ${entity.subCategory ? entity.subCategory : "N/A"}`);
const currentEntity = entity.category + ": " + entity.text;
entityText += " " + currentEntity;
//console.log(`\tScore: ${entity.confidenceScore}`);
}
});
});

let piiText = "PII Redacted Text: ";
const piiResults = await textAnalyticsClient.recognizePiiEntities(keyPhrasesInput, "en");
for (const result of piiResults) {
if (result.error === undefined) {
console.log("Redacted Text: ", result.redactedText);
console.log(" -- Recognized PII entities for input", result.id, "--");
if(result.redactedText.indexOf('*') > -1){
//console.log("Redacted Text: ", result.redactedText);
piiText += result.redactedText;
//console.log(" -- Recognized PII entities for input", result.id, "--");
}

for (const entity of result.entities) {
console.log(entity.text, ":", entity.category, "(Score:", entity.confidenceScore, ")");
//console.log(entity.text, ":", entity.category, "(Score:", entity.confidenceScore, ")");
const currentEntity = entity.category + ": " + entity.text;
piiText += currentEntity;
}
} else {
console.error("Encountered an error:", result.error);
Expand All @@ -88,7 +100,8 @@ app.post('/api/ta-key-phrases', async (req, res) => {

const headers = { 'Content-Type': 'application/json' };
res.headers = headers;
res.send({ keyPhrasesExtracted: keyPhraseResponse, entityExtracted: entityResults, piiExtracted: piiResults });
//res.send({ keyPhrasesExtracted: keyPhraseResponse, entityExtracted: entityResults, piiExtracted: piiResults });
res.send({ keyPhrasesExtracted: keyPhrasesText, entityExtracted: entityText, piiExtracted: piiText });
} catch (err) {
console.log(err);
res.status(401).send('There was an error authorizing your text analytics key. Check your text analytics service key or endpoint to the .env file.');
Expand Down
87 changes: 41 additions & 46 deletions azure-speech-streaming-reactjs/speechreactfrontend/src/App.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,16 @@ export default class App extends Component {
constructor(props) {
super(props);

this.state = {value: ''};
this.state = {color: 'white', value: '' };

this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);

this.state = {
displayText: 'INITIALIZED: ready to test speech...',
color: 'white',
displayText: 'INITIALIZED: ready to test speech...',
displayNLPOutput: 'NLP Output: ...'
};

}

handleChange(event) {
Expand Down Expand Up @@ -49,7 +49,7 @@ export default class App extends Component {
speechConfig.speechRecognitionLanguage = 'en-US';

//Setting below specifies custom speech model ID that is created using Speech Studio
speechConfig.endpointId = '5c0e6aec-f9b6-4da5-9228-a02b17d7a749';
speechConfig.endpointId = 'd26026b7-aaa0-40bf-84e7-35054451a3f4';

//Setting below allows specifying custom GUID that can be used to correlnpate audio captured by Speech Logging
speechConfig.setServiceProperty("clientConnectionId", this.state.value, speechsdk.ServicePropertyChannel.UriQueryParameter);
Expand All @@ -62,7 +62,7 @@ export default class App extends Component {
});

let resultText = "";
let nlpText = "KEY PHRASES IDENTIFIED: ";
let nlpText = " ";
recognizer.sessionStarted = (s, e) => {
resultText = "Session ID: " + e.sessionId;

Expand All @@ -75,45 +75,38 @@ export default class App extends Component {
if(e.result.reason === ResultReason.RecognizedSpeech){

//Display continuous transcript
resultText += `\n${e.result.text}`;
this.setState({
displayText: resultText
});

//Perform continuous NLP
const nlpObj = await getKeyPhrases(e.result.text);

//Display extracted Key Phrases
if(nlpObj.keyPhrasesExtracted.length > 0){
nlpText += JSON.stringify(nlpObj.keyPhrasesExtracted);
this.setState({
displayNLPOutput: nlpText
});
}

/*//Display extracted entities
if(nlpObj.entityExtracted.length > 0){
nlpText += JSON.stringify(nlpObj.entityExtracted);
this.setState({
displayNLPOutput: nlpText
});
}*/

//Display PII Detected
nlpText += JSON.stringify(nlpObj.piiExtracted);
resultText += `\n${e.result.text}`;
this.setState({
displayNLPOutput: nlpText
});







displayText: resultText
});

//Perform continuous NLP
const nlpObj = await getKeyPhrases(e.result.text);

//Display extracted Key Phrases
const keyPhraseText = JSON.stringify(nlpObj.keyPhrasesExtracted);
if(keyPhraseText.length > 15){
//nlpText += "\n" + keyPhraseText;
//this.setState({ displayNLPOutput: nlpText });
}

//Display extracted entities
const entityText = JSON.stringify(nlpObj.entityExtracted);
if(entityText.length > 12){
nlpText += "\n" + entityText;
this.setState({ displayNLPOutput: nlpText.replace('<br/>', '\n') });
}

//Display PII Detected
const piiText = JSON.stringify(nlpObj.piiExtracted);
if(piiText.length > 21){
nlpText += "\n" + piiText;
this.setState({ displayNLPOutput: nlpText.replace('<br/>', '\n') });
}
}
else if (e.result.reason === ResultReason.NoMatch) {
resultText += `\nNo Match`
//resultText += `\nNo Match`
resultText += `\n`
}

};
Expand Down Expand Up @@ -166,22 +159,24 @@ export default class App extends Component {
</label>
<input type="submit" value="Submit" />
</form>
<div style={{ color: 'green', fontSize: 20, display: 'flex', justifyContent:'center', alignItems:'center' }}>-----------------------------------------------------------</div>


<div className="col-6">
<i className="fas fa-microphone fa-lg mr-2" onClick={() => this.sttFromMic()}></i>
STEP 2 - Click on Microphone and start talking for real-time insights..
</div>

<div style={{ color: 'blue', fontSize: 20, display: 'flex', justifyContent:'center', alignItems:'center' }}>----- Speech-to-text Output ---------------------------------------------------- AI-powered Call Insights ------</div>
<div className="row">
<div className="col-6 output-display rounded ">

<div className="row" style={{ height: 900}}>
<div className="col-6 output-display rounded" style={{ color: 'white', fontSize: 18, "borderWidth":"1px", 'borderColor':"black", 'borderStyle':'solid'}}>
<code>{this.state.displayText}</code>
</div>
<div className="col-6 nlpoutput-display rounded ">
<div className="col-6 nlpoutput-display rounded " style={{ color: 'green', fontSize: 18, "borderWidth":"1px", 'borderColor':"black", 'borderStyle':'solid'}}>
<code>{this.state.displayNLPOutput}</code>
</div>
</div>
</div>

</Container>
);
}
Expand Down
Binary file not shown.

0 comments on commit 50dc519

Please sign in to comment.