@onereach/step-voice
Version:
Onereach.ai Voice Steps
440 lines (439 loc) • 17 kB
JavaScript
"use strict";
/* eslint-disable @typescript-eslint/strict-boolean-expressions */
Object.defineProperty(exports, "__esModule", { value: true });
exports.VoiceStepError = void 0;
const tslib_1 = require("tslib");
const lodash_1 = tslib_1.__importDefault(require("lodash"));
const nanoid_1 = require("nanoid");
const uuid_1 = require("uuid");
// TODO: !!!!! import ConvStep from '@onereach/step-conversation/dst/step' !!!!!
const step_1 = tslib_1.__importDefault(require("./step"));
const base_1 = tslib_1.__importDefault(require("@onereach/flow-sdk/dst/errors/base"));
class VoiceStepError extends base_1.default {
}
exports.VoiceStepError = VoiceStepError;
class VoiceStep extends step_1.default {
// static Error = VoiceStepError
async runBefore() {
await super.runBefore();
if (this.cache != null) {
if (this.thread.id !== this.dataThreadId)
this.thread.refThread(this.dataThreadId);
if (!this.state.direct)
await this.handleHeartbeat(this.cache);
}
}
exitStep(exitId, data, byThread = false) {
if (byThread)
return this.exitStepByThread(exitId, data);
return super.exitStep(exitId, data);
}
exitStepByThread(exitId, result) {
this.log.debug('exitStepByThread', exitId, result);
const exitLabel = lodash_1.default.replace(this.getExitStepLabel(exitId) ?? exitId, /\W+/g, '');
return this.process.runThread({
id: `${exitLabel}_${(0, nanoid_1.nanoid)(8)}`,
state: {
name: this.exitToThread.name,
direct: true,
result: {
conversation: this.conversation,
conversationThreadId: this.dataThreadId,
...result
},
exitStep: exitId,
step: this.currentStepId
}
});
}
exitToThread() {
this.thread.exitStep(this.state.exitStep, this.state.result);
}
async sendCommands({ id, type, callback }, commands) {
if (lodash_1.default.isEmpty(commands))
return;
const result = await this.thread.eventManager.emit({
target: this.helpers.providersAccountId,
name: `out/voice/${type}`,
params: {
id,
cancel: this.isWorker ? undefined : true,
async: this.isWorker ? true : undefined,
hbs: 99,
commands,
step: { key: this.session.key, trd: this.isGlobal ? this.workerThreadId : this.thread.id } // response should be sent to this session
},
reporting: this.session.getSessionRef()
}, {
target: callback,
invocationType: 'async',
timeout: 5000
});
if (result == null)
throw new Error(`failed to send command to call: ${id}`);
}
async handleHeartbeat(call) {
const allowHeartbeat = this.canVoicerHearbeat(call);
if (allowHeartbeat) {
if (this.thread.background) {
delete this.waits.timeout;
return;
}
const expectHearbeatBefore = Date.now() + 290000;
this.triggers.deadline(expectHearbeatBefore, () => {
this.thread.background = true;
if (call.ended) {
this.log.warn('missing heartbeat, call is ended');
this.end();
}
else {
this.log.error('missing heartbeat, call is probably already hangup');
}
});
}
else if (call.vv !== 0) {
this.log.debug('voicer is older than expected, please consider voicer upgrade', { version: call.vv });
call.vv = 0;
await this.updateData();
}
}
handleCancel() {
if (this.data.handleCancel === true)
this.exitStep('cancel');
else
this.end();
}
extractSectionMessages(sections, interruptionMetadata) {
const { sectionIndex } = interruptionMetadata ?? {};
const slicedTo = sectionIndex != null ? sectionIndex + 1 : sections.length;
return sections
.slice(0, slicedTo)
.map((s, index) => {
// Should escape html, max length 4000 symbols
let text = (s.text ?? '')
.slice(0, 4000)
.replace(/(<[^>]+>|<\/[^>]+>)/gi, ' ')
.trim();
// Extracts the portion of the section text that corresponds to the played time.
if (interruptionMetadata) {
const { playedTime, sectionDuration } = interruptionMetadata;
if (sectionIndex === index) {
text = this.extractPlayedSectionMessage(text, sectionDuration, playedTime);
}
}
return text;
})
.join(' ');
}
extractPlayedSectionMessage(text, sectionDuration, playedTime) {
const words = text.split(' ');
const totalWords = words.length;
const sectionDurationSec = sectionDuration / 1000;
const playedTimeSec = playedTime / 1000;
const wordsPerSecond = totalWords / sectionDurationSec;
const wordsPlayed = Math.floor(wordsPerSecond * playedTimeSec);
return words.slice(0, wordsPlayed).join(' ').trim();
}
extractSectionFiles(sections) {
return lodash_1.default.chain(sections)
.filter(s => Boolean(s.url))
.map(s => ({
fileUrl: s.url ?? '',
fileType: 'Audio'
}))
.value();
}
async pauseRecording(call, command, sensitiveData) {
const commands = [command];
const stopRecording = (call.recordCall && sensitiveData?.muteStep);
if (call.vv >= 2) {
// newer voicer version automaically should stop/resume session recording
command.params.sensitiveData = stopRecording ? sensitiveData : {};
}
else if (stopRecording) {
commands.unshift({
name: 'stop-record-session',
params: {
muteUser: sensitiveData.muteUser,
muteBot: sensitiveData.muteBot
}
});
}
await this.sendCommands(call, commands);
}
async resumeRecording(call, sensitiveData) {
if (!call.recordCall || !sensitiveData?.muteStep || call.vv >= 2)
return;
await this.sendCommands(call, [{
name: 'resume-record-session',
params: {
muteUser: sensitiveData.muteUser,
muteBot: sensitiveData.muteBot
}
}]);
}
async transcript(call, data = {}, interruptionMetadata) {
const { previousTranscriptId = call.lastTranscriptId, action, keyPress, message, voiceProcessResult, reportingSettingsKey, sections, reprompt, recording, conferenceId, actionFromBot = false } = data;
const eventId = (0, uuid_1.v4)();
const fromIdentifier = actionFromBot ? call.botNumber : call.endUserNumber;
const toIdentifier = actionFromBot ? call.endUserNumber : call.botNumber;
const fromIsSip = (/^sip:/i).test(fromIdentifier);
const toIsSip = (/^sip:/i).test(toIdentifier);
const reportingObject = {
botIdentifier: call.botNumber,
channel: 'Phone',
fromIdentifier,
fromIdentifierType: fromIsSip ? 'SIP' : undefined,
toIdentifier,
toIdentifierType: toIsSip ? 'SIP' : undefined,
reportingSettingsKey,
conversationId: call.id,
previousTranscriptId,
reprompt,
recording,
conferenceId,
action,
eventId
};
if (sections != null) {
const allMessages = this.extractSectionMessages(sections, interruptionMetadata);
const allFiles = this.extractSectionFiles(sections);
if (!lodash_1.default.isEmpty(allMessages))
reportingObject.message = allMessages;
if (!lodash_1.default.isEmpty(allFiles))
reportingObject.media = allFiles;
}
// message might be : 'test:weight|anotherText:weight', we need only 'test'
if (message) {
reportingObject.message = lodash_1.default.chain([data.message])
.split('|')
.head()
.split(':')
.head()
.value();
}
if (keyPress)
reportingObject.keyPress = keyPress;
else if (voiceProcessResult)
reportingObject.voiceProcessResult = voiceProcessResult;
/**
* If we have the previous transcript event ID,
* which was returned from the interrupted message,
* try to update the reporting message.
*/
if (interruptionMetadata?.reporterTranscriptEventId &&
reportingObject.message) {
const updateReportingObject = {
EventId: eventId,
Timestamp: new Date().toISOString(),
Event: 'Augment',
EventValue: {
eventId: interruptionMetadata.reporterTranscriptEventId,
eventValue: {
Message: reportingObject.message,
},
},
};
this.log.debug(`Augment Transcript`, {
interruptionMetadata,
updateReportingObject,
shouldBeSendToHitl: this.process.cache.hitl != null,
});
if (this.process.cache.hitl) {
await this.process.cache.hitl.queueEvents([updateReportingObject]);
}
}
else {
await this.reporter.reportTranscriptEvent(reportingObject);
}
call.lastTranscriptId = eventId;
await this.updateData();
return eventId;
}
throwError(error = new Error('unknown')) {
throw new VoiceStepError(error.message ?? error, null, { name: error?.name });
}
async handleHangup(call) {
if (call.ended)
return;
const isHangedUpByBot = call.sessionEndedBy === 'Bot';
call.conference = undefined;
call.ended = true;
await this.transcript(call, {
action: 'Call End',
reportingSettingsKey: 'transcriptHangup',
actionFromBot: isHangedUpByBot
});
}
buildSections({ sections, textType, ttsSettings, allowKeypadBargeIn }) {
return lodash_1.default.map(sections, section => ({
text: section.voiceTextMsg,
url: section.audioUrl,
textType,
bargeInVoice: section.allowVoiceBargeIn,
bargeInKeypad: section.allowKeypadBargeIn ?? allowKeypadBargeIn,
bargeInBeforeSpeechEndTime: section.bargeInBeforeSpeechEndTime,
...ttsSettings
}));
}
buildReprompts({ prompts, allowKeypadBargeIn }) {
return lodash_1.default.map(prompts, prompt => ({
time: prompt.time,
message: prompt.item.message,
repeatMsg: prompt.item.repeat,
fileName: prompt.item.audioUrl || '',
allowVoiceBargeIn: prompt.item.allowVoiceBargeIn,
allowKeypadBargeIn: prompt.item.allowKeypadBargeIn ?? allowKeypadBargeIn
}));
}
buildChoices({ choices }) {
return lodash_1.default.map(choices, (choice) => ({
exitId: choice.exitId,
dtmf: choice.dtmf,
grxml: choice.grammar?.grxmlValue,
options: this.buildChoice(choice)
}));
}
buildChoice(choice) {
if (choice.grammar && choice.grxmlValue) {
throw new Error('grxml is not supported');
}
let parts;
if (choice.grammar?.value) {
parts = choice.grammar.value;
}
else {
parts = lodash_1.default.chain(choice.text).split(/\s*,\s*/);
}
parts = lodash_1.default.chain(parts).map(lodash_1.default.trim).compact().value();
return lodash_1.default.chain(parts)
.map((text) => lodash_1.default.split(text, ':'))
.map(([text, weight]) => ({
text: lodash_1.default.trim(text),
weight: lodash_1.default.trim(weight) || 1
}))
.value();
}
exitChoiceData(type, params = {}) {
const digit = params.digit || params.digits;
const data = {
type
};
if (!lodash_1.default.isEmpty(params.tags)) {
data.tags = params.tags;
}
if (!lodash_1.default.isEmpty(params.out)) {
data.out = params.out;
}
if (!lodash_1.default.isEmpty(digit)) {
data.digit = digit;
data.value = digit;
}
if (!lodash_1.default.isEmpty(params.language)) {
data.language = params.language;
}
if (!lodash_1.default.isEmpty(params.phrases)) {
const phrases = params.phrases;
const other = params.other ?? [];
data.value = phrases[0].text;
data.interpretation = [...phrases, ...other];
}
return data;
}
;
async rptsSend(call, { command, reporting, repromptsList, noReplyDelay, speechSections, textType, ttsSettings, sensitiveData }) {
const index = this.rptsIndex;
const current = repromptsList[index];
const params = command.params;
const rptsTimeout = this.rptsTimeout({ noReplyDelay, repromptsList });
const repromptSection = {
textType,
url: current.fileName,
text: current.message,
bargeInVoice: current.allowVoiceBargeIn,
bargeInKeypad: current.allowKeypadBargeIn,
...ttsSettings
};
if (repromptSection.text || repromptSection.url) {
params.sections.push(repromptSection);
}
if (current.repeatMsg) {
params.sections.push(...speechSections);
}
this.state.repIdx = index + 1;
params.timeout = rptsTimeout;
params.firstDigitTimeout = rptsTimeout; // needed for request_digits command
const reportingObject = {
sections: command.params.sections,
reprompt: {
maxAttempts: repromptsList.length,
attempt: index
},
action: 'Call Prompt',
reportingSettingsKey: 'transcriptReprompt',
actionFromBot: true
};
if (reporting) {
lodash_1.default.assign(reportingObject, reporting);
}
const eventId = await this.transcript(call, reportingObject);
params.reporterTranscriptEventId = eventId;
if (call.vv >= 2)
await this.pauseRecording(call, command, sensitiveData);
else
await this.sendCommands(call, [command]);
}
rptsTimeout({ noReplyDelay, repromptsList, initial = false }) {
const next = repromptsList[initial ? 0 : this.rptsIndex + 1];
return Number(lodash_1.default.get(next, 'time', noReplyDelay)) * 1000 || 60000;
}
rptsRestart() {
this.state.repIdx = undefined;
}
rptsHasMore({ repromptsList }) {
return this.rptsIndex < repromptsList.length;
}
get rptsIndex() {
return this.state.repIdx ?? 0;
}
get rptsStarted() {
return this.rptsIndex !== 0;
}
canVoicerHearbeat(call) {
return call.vv >= 1;
}
canVoicerAck(call) {
return call.vv >= 2;
}
getInterruptionMetadata(event) {
return event.params?.interruptionMetadata
?? event.params?.result?.interruptionMetadata
?? null;
}
async handleInterruption(params) {
const { call, event, speechSections, repromptsList = [], reportingSettingsKey = 'transcript' } = params;
const interruptionMetadata = this.getInterruptionMetadata(event);
this.log.debug('handleInterruption', { event, speechSections, repromptsList });
if (!interruptionMetadata)
return;
const sections = [];
if (repromptsList.length && reportingSettingsKey === 'transcriptReprompt') {
const current = repromptsList[this.rptsIndex - 1];
sections.push({
url: current?.fileName,
text: current?.message,
});
}
else {
sections.push(...speechSections);
}
await this.transcript(call, {
action: 'Call Prompt',
actionFromBot: true,
sections,
reportingSettingsKey,
}, interruptionMetadata);
}
}
exports.default = VoiceStep;
module.exports = VoiceStep;