refactor: enhance Evoai integration with improved validation and message handling

This commit refines the Evoai integration by updating the service and controller logic for better functionality and maintainability. Key changes include:
- Added the `openaiService` as a parameter in the EvoaiService constructor for improved dependency management.
- Enhanced the createBot method in EvoaiController to include EvoAI-specific validation and duplicate checks.
- Updated EvoaiDto and EvoaiSettingDto to remove unnecessary comments and add a fallback property.
- Refined the message processing logic in EvoaiService to handle audio messages more effectively and improve logging clarity.
- Adjusted the schema for Evoai settings to rename `evoaiIdFallback` to `botIdFallback` for better clarity.

These updates contribute to a more robust and maintainable Evoai integration.
This commit is contained in:
Guilherme Gomes 2025-05-27 16:00:32 -03:00
parent 95bd85b6e3
commit 97ca23a7b0
5 changed files with 52 additions and 120 deletions

View File

@ -77,7 +77,7 @@ export class EvoaiController extends BaseChatbotController<EvoaiModel, EvoaiDto>
}
}
// Bots
// Override createBot to add EvoAI-specific validation
public async createBot(instance: InstanceDto, data: EvoaiDto) {
if (!this.integrationEnabled) throw new BadRequestException('Evoai is disabled');
@ -89,6 +89,7 @@ export class EvoaiController extends BaseChatbotController<EvoaiModel, EvoaiDto>
})
.then((instance) => instance.id);
// EvoAI-specific duplicate check
const checkDuplicate = await this.botRepository.findFirst({
where: {
instanceId: instanceId,
@ -101,61 +102,10 @@ export class EvoaiController extends BaseChatbotController<EvoaiModel, EvoaiDto>
throw new Error('Evoai already exists');
}
// Let the base class handle the rest
return super.createBot(instance, data);
}
public async findBot(instance: InstanceDto) {
if (!this.integrationEnabled) throw new BadRequestException('Evoai is disabled');
const instanceId = await this.prismaRepository.instance
.findFirst({
where: {
name: instance.instanceName,
},
})
.then((instance) => instance.id);
const bots = await this.botRepository.findMany({
where: {
instanceId: instanceId,
},
});
if (!bots.length) {
return null;
}
return bots;
}
public async fetchBot(instance: InstanceDto, botId: string) {
if (!this.integrationEnabled) throw new BadRequestException('Evoai is disabled');
const instanceId = await this.prismaRepository.instance
.findFirst({
where: {
name: instance.instanceName,
},
})
.then((instance) => instance.id);
const bot = await this.botRepository.findFirst({
where: {
id: botId,
},
});
if (!bot) {
throw new Error('Evoai not found');
}
if (bot.instanceId !== instanceId) {
throw new Error('Evoai not found');
}
return bot;
}
// Process Evoai-specific bot logic
protected async processBot(
instance: any,

View File

@ -1,11 +1,10 @@
import { BaseChatbotDto, BaseChatbotSettingDto } from '../../base-chatbot.dto';
export class EvoaiDto extends BaseChatbotDto {
// Evoai specific fields
agentUrl?: string;
apiKey?: string;
}
export class EvoaiSettingDto extends BaseChatbotSettingDto {
// Evoai specific fields
evoaiIdFallback?: string;
}

View File

@ -1,8 +1,7 @@
import { InstanceDto } from '@api/dto/instance.dto';
import { PrismaRepository } from '@api/repository/repository.service';
import { WAMonitoringService } from '@api/services/monitor.service';
import { Integration } from '@api/types/wa.types';
import { ConfigService } from '@config/env.config';
import { ConfigService, HttpServer } from '@config/env.config';
import { Evoai, EvoaiSetting, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { downloadMediaMessage } from 'baileys';
@ -10,12 +9,18 @@ import { v4 as uuidv4 } from 'uuid';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
private openaiService: OpenaiService;
constructor(waMonitor: WAMonitoringService, prismaRepository: PrismaRepository, configService: ConfigService) {
constructor(
waMonitor: WAMonitoringService,
prismaRepository: PrismaRepository,
configService: ConfigService,
openaiService: OpenaiService,
) {
super(waMonitor, prismaRepository, 'EvoaiService', configService);
this.openaiService = new OpenaiService(waMonitor, prismaRepository, configService);
this.openaiService = openaiService;
}
/**
@ -25,52 +30,10 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
return 'evoai';
}
public async createNewSession(instance: InstanceDto, data: any) {
return super.createNewSession(instance, data, 'evoai');
}
/**
* Override the process method to directly handle audio messages
* Implement the abstract method to send message to EvoAI API
* Handles audio transcription, image processing, and complex JSON-RPC payload
*/
public async process(
instance: any,
remoteJid: string,
bot: Evoai,
session: IntegrationSession,
settings: EvoaiSetting,
content: string,
pushName?: string,
msg?: any,
): Promise<void> {
try {
this.logger.debug(`[EvoAI] Processing message with custom process method`);
let contentProcessed = content;
// Check if this is an audio message that we should try to transcribe
if (this.isAudioMessage(content) && msg) {
try {
this.logger.debug(`[EvoAI] Downloading audio for Whisper transcription`);
const transcription = await this.openaiService.speechToText(msg);
if (transcription) {
contentProcessed = transcription;
} else {
contentProcessed = '[Audio message could not be transcribed]';
}
} catch (err) {
this.logger.error(`[EvoAI] Failed to transcribe audio: ${err}`);
contentProcessed = '[Audio message could not be transcribed]';
}
}
// For non-audio messages or if transcription failed, proceed normally
return super.process(instance, remoteJid, bot, session, settings, contentProcessed, pushName, msg);
} catch (error) {
this.logger.error(`[EvoAI] Error in process: ${error}`);
return;
}
}
protected async sendMessageToBot(
instance: any,
session: IntegrationSession,
@ -80,19 +43,40 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
pushName: string,
content: string,
msg?: any,
) {
): Promise<void> {
try {
this.logger.debug(`[EvoAI] Sending message to bot with content: ${content}`);
let processedContent = content;
// Handle audio messages - transcribe using OpenAI Whisper
if (this.isAudioMessage(content) && msg) {
try {
this.logger.debug(`[EvoAI] Downloading audio for Whisper transcription`);
const transcription = await this.openaiService.speechToText(msg, instance);
if (transcription) {
processedContent = transcription;
}
} catch (err) {
this.logger.error(`[EvoAI] Failed to transcribe audio: ${err}`);
}
}
const endpoint: string = evoai.agentUrl;
if (!endpoint) {
this.logger.error('No EvoAI endpoint defined');
return;
}
const callId = `req-${uuidv4().substring(0, 8)}`;
const messageId = uuidv4();
const messageId = msg?.key?.id || uuidv4();
// Prepare message parts
const parts = [
{
type: 'text',
text: content,
text: processedContent,
},
];
@ -130,6 +114,17 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
role: 'user',
parts,
messageId: messageId,
metadata: {
messageKey: msg?.key,
},
},
metadata: {
remoteJid: remoteJid,
pushName: pushName,
fromMe: msg?.key?.fromMe,
instanceName: instance.instanceName,
serverUrl: this.configService.get<HttpServer>('SERVER').URL,
apiKey: instance.token,
},
},
};
@ -177,22 +172,10 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
}
this.logger.debug(`[EvoAI] Extracted message to send: ${message}`);
const conversationId = session.sessionId;
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
}
await this.prismaRepository.integrationSession.update({
where: {
id: session.id,
},
data: {
status: 'opened',
awaitUser: true,
sessionId: conversationId,
},
});
} catch (error) {
this.logger.error(
`[EvoAI] Error sending message: ${error?.response?.data ? JSON.stringify(error.response.data) : error}`,

View File

@ -71,7 +71,7 @@ export const evoaiSettingSchema: JSONSchema7 = {
keepOpen: { type: 'boolean' },
debounceTime: { type: 'integer' },
ignoreJids: { type: 'array', items: { type: 'string' } },
evoaiIdFallback: { type: 'string' },
botIdFallback: { type: 'string' },
splitMessages: { type: 'boolean' },
timePerChar: { type: 'integer' },
},

View File

@ -135,7 +135,7 @@ export const flowiseController = new FlowiseController(flowiseService, prismaRep
const n8nService = new N8nService(waMonitor, prismaRepository, configService, openaiService);
export const n8nController = new N8nController(n8nService, prismaRepository, waMonitor);
const evoaiService = new EvoaiService(waMonitor, prismaRepository, configService);
const evoaiService = new EvoaiService(waMonitor, prismaRepository, configService, openaiService);
export const evoaiController = new EvoaiController(evoaiService, prismaRepository, waMonitor);
logger.info('Module - ON');