feat(Typebot): add splitMessages and timePerChar fields to Typebot models

- Introduced `splitMessages` and `timePerChar` fields in the Typebot and TypebotSetting models with default values.
- Created a migration script to update the database schema accordingly.
- Updated audio message handling to prepend `[audio]` to transcriptions for better clarity in message context.
This commit is contained in:
Davidson Gomes 2025-06-12 13:24:25 -03:00
parent 1eb2c848f7
commit bc451e8493
12 changed files with 951 additions and 507 deletions

View File

@ -0,0 +1,7 @@
-- AlterTable
ALTER TABLE "Typebot" ADD COLUMN "splitMessages" BOOLEAN DEFAULT false,
ADD COLUMN "timePerChar" INTEGER DEFAULT 50;
-- AlterTable
ALTER TABLE "TypebotSetting" ADD COLUMN "splitMessages" BOOLEAN DEFAULT false,
ADD COLUMN "timePerChar" INTEGER DEFAULT 50;

View File

@ -357,6 +357,8 @@ model Typebot {
triggerType TriggerType?
triggerOperator TriggerOperator?
triggerValue String?
splitMessages Boolean? @default(false) @db.Boolean
timePerChar Int? @default(50) @db.Integer
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String
TypebotSetting TypebotSetting[]
@ -374,6 +376,8 @@ model TypebotSetting {
debounceTime Int? @db.Integer
typebotIdFallback String? @db.VarChar(100)
ignoreJids Json?
splitMessages Boolean? @default(false) @db.Boolean
timePerChar Int? @default(50) @db.Integer
createdAt DateTime? @default(now()) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Fallback Typebot? @relation(fields: [typebotIdFallback], references: [id])

View File

@ -165,7 +165,7 @@ export class EvolutionStartupService extends ChannelStartupService {
openAiDefaultSettings.speechToText &&
received?.message?.audioMessage
) {
messageRaw.message.speechToText = await this.openaiService.speechToText(received, this);
messageRaw.message.speechToText = `[audio] ${await this.openaiService.speechToText(received, this)}`;
}
}

View File

@ -520,7 +520,7 @@ export class BusinessStartupService extends ChannelStartupService {
openAiDefaultSettings.speechToText
) {
try {
messageRaw.message.speechToText = await this.openaiService.speechToText(
messageRaw.message.speechToText = `[audio] ${await this.openaiService.speechToText(
openAiDefaultSettings.OpenaiCreds,
{
message: {
@ -528,7 +528,7 @@ export class BusinessStartupService extends ChannelStartupService {
...messageRaw,
},
},
);
)}`;
} catch (speechError) {
this.logger.error(`Error processing speech-to-text: ${speechError}`);
}
@ -554,7 +554,7 @@ export class BusinessStartupService extends ChannelStartupService {
if (openAiDefaultSettings && openAiDefaultSettings.openaiCredsId && openAiDefaultSettings.speechToText) {
try {
messageRaw.message.speechToText = await this.openaiService.speechToText(
messageRaw.message.speechToText = `[audio] ${await this.openaiService.speechToText(
openAiDefaultSettings.OpenaiCreds,
{
message: {
@ -562,7 +562,7 @@ export class BusinessStartupService extends ChannelStartupService {
...messageRaw,
},
},
);
)}`;
} catch (speechError) {
this.logger.error(`Error processing speech-to-text: ${speechError}`);
}

View File

@ -1188,7 +1188,7 @@ export class BaileysStartupService extends ChannelStartupService {
});
if (openAiDefaultSettings && openAiDefaultSettings.openaiCredsId && openAiDefaultSettings.speechToText) {
messageRaw.message.speechToText = await this.openaiService.speechToText(received, this);
messageRaw.message.speechToText = `[audio] ${await this.openaiService.speechToText(received, this)}`;
}
}
@ -2111,7 +2111,7 @@ export class BaileysStartupService extends ChannelStartupService {
});
if (openAiDefaultSettings && openAiDefaultSettings.openaiCredsId && openAiDefaultSettings.speechToText) {
messageRaw.message.speechToText = await this.openaiService.speechToText(messageRaw, this);
messageRaw.message.speechToText = `[audio] ${await this.openaiService.speechToText(messageRaw, this)}`;
}
}

View File

@ -280,7 +280,7 @@ export abstract class BaseChatbotController<BotType = any, BotData extends BaseC
});
if (!bot) {
throw new Error(`${this.integrationName} not found`);
return null;
}
return bot;

View File

@ -55,7 +55,7 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
this.logger.debug(`[EvoAI] Downloading audio for Whisper transcription`);
const transcription = await this.openaiService.speechToText(msg, instance);
if (transcription) {
processedContent = transcription;
processedContent = `[audio] ${transcription}`;
}
} catch (err) {
this.logger.error(`[EvoAI] Failed to transcribe audio: ${err}`);

View File

@ -64,7 +64,7 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
this.logger.debug(`[EvolutionBot] Downloading audio for Whisper transcription`);
const transcription = await this.openaiService.speechToText(msg, instance);
if (transcription) {
payload.query = transcription;
payload.query = `[audio] ${transcription}`;
}
} catch (err) {
this.logger.error(`[EvolutionBot] Failed to transcribe audio: ${err}`);

View File

@ -72,7 +72,7 @@ export class FlowiseService extends BaseChatbotService<FlowiseModel> {
this.logger.debug(`[Flowise] Downloading audio for Whisper transcription`);
const transcription = await this.openaiService.speechToText(msg, instance);
if (transcription) {
payload.question = transcription;
payload.question = `[audio] ${transcription}`;
}
} catch (err) {
this.logger.error(`[Flowise] Failed to transcribe audio: ${err}`);

View File

@ -61,7 +61,7 @@ export class N8nService extends BaseChatbotService<N8n, N8nSetting> {
this.logger.debug(`[N8n] Downloading audio for Whisper transcription`);
const transcription = await this.openaiService.speechToText(msg, instance);
if (transcription) {
payload.chatInput = transcription;
payload.chatInput = `[audio] ${transcription}`;
}
} catch (err) {
this.logger.error(`[N8n] Failed to transcribe audio: ${err}`);

View File

@ -90,8 +90,25 @@ export class TypebotController extends BaseChatbotController<TypebotModel, Typeb
pushName?: string,
msg?: any,
) {
// Use the simplified service method that follows the base class pattern
await this.typebotService.processTypebot(instance, remoteJid, bot, session, settings, content, pushName, msg);
// Map to the original processTypebot method signature
await this.typebotService.processTypebot(
instance,
remoteJid,
msg,
session,
bot,
bot.url,
settings.expire,
bot.typebot,
settings.keywordFinish,
settings.delayMessage,
settings.unknownMessage,
settings.listeningFromMe,
settings.stopBotFromMe,
settings.keepOpen,
content,
{}, // prefilledVariables (optional)
);
}
// TypeBot specific method for starting a bot from API
@ -211,25 +228,23 @@ export class TypebotController extends BaseChatbotController<TypebotModel, Typeb
},
});
// Use the simplified service method instead of the complex one
const settings = {
// Use the original processTypebot method with all parameters
await this.typebotService.processTypebot(
this.waMonitor.waInstances[instanceData.name],
remoteJid,
null, // msg
null, // session
findBot,
url,
expire,
typebot,
keywordFinish,
delayMessage,
unknownMessage,
listeningFromMe,
stopBotFromMe,
keepOpen,
};
await this.typebotService.processTypebot(
instanceData,
remoteJid,
findBot,
null, // session
settings,
'init',
null, // pushName
prefilledVariables,
);
} else {