Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
} else {
config = BotFrameworkConfig.fromSubscription(subscriptionKey, region);
}
// Supported options can be found in DialogConnectorFactory.js.
// Set the language used for recognition.
config.setProperty(PropertyId.SpeechServiceConnection_RecoLanguage, speechRecognitionLanguage);
// The following code sets the output format.
// As advised by the Speech team, this API may be subject to future changes.
// We are not enabling output format option because it does not send detailed output format to the bot, rendering this option useless.
// config.setProperty(PropertyId.SpeechServiceResponse_OutputFormatOption, OutputFormat[OutputFormat.Detailed]);
// Set the user ID for starting the conversation.
userID && config.setProperty(PropertyId.Conversation_From_Id, userID);
// Set Custom Speech and Custom Voice.
// The following code is copied from C#, and it is not working yet.
// https://github.com/Azure-Samples/Cognitive-Services-Direct-Line-Speech-Client/blob/master/DLSpeechClient/MainWindow.xaml.cs
// speechRecognitionEndpointId && config.setServiceProperty('cid', speechRecognitionEndpointId, ServicePropertyChannel.UriQueryParameter);
// speechSynthesisDeploymentId && config.setProperty(PropertyId.conversation_Custom_Voice_Deployment_Ids, speechSynthesisDeploymentId);
const dialogServiceConnector = patchDialogServiceConnectorInline(new DialogServiceConnector(config, audioConfig));
dialogServiceConnector.connect();
// Renew token per interval.
if (authorizationToken) {
const interval = setInterval(async () => {
// #2660 If the connector has been disposed, we should stop renewing the token.