Compare commits

..

75 Commits
2.3.3 ... 2.3.5

Author SHA1 Message Date
Davidson Gomes
d48fbc3a4e Merge branch 'release/2.3.5'
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
2025-10-15 09:58:37 -03:00
Davidson Gomes
cdf06666a1 chore(workflows): update checkout step to include submodules
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
- Added 'submodules: recursive' option to the checkout step in multiple workflow files to ensure submodules are properly initialized during CI/CD processes.
2025-10-15 09:58:27 -03:00
Davidson Gomes
5254928887 Merge branch 'release/2.3.5' 2025-10-15 09:48:00 -03:00
Davidson Gomes
8468690d37 chore(manager): update asset files and install process
- Updated subproject reference in evolution-manager-v2 to the latest commit.
- Enhanced the manager_install.sh script to include npm install and build steps for the evolution-manager-v2.
- Replaced old JavaScript asset file with a new version for improved performance.
- Added a new CSS file for consistent styling across the application.
2025-10-15 09:47:32 -03:00
Willian Coqueiro
bdd9257c47 Merge branch 'develop' of https://github.com/KokeroO/evolution-api into develop 2025-10-15 09:47:32 -03:00
Willian Coqueiro
d6834c8741 fix(chatwoot): correct chatId extraction for non-group JIDs 2025-10-15 09:47:31 -03:00
Davidson Gomes
164beddb39 chore(manager): update asset files and install process
- Updated subproject reference in evolution-manager-v2 to the latest commit.
- Enhanced the manager_install.sh script to include npm install and build steps for the evolution-manager-v2.
- Replaced old JavaScript asset file with a new version for improved performance.
- Added a new CSS file for consistent styling across the application.
2025-10-15 09:44:15 -03:00
Davidson Gomes
4991f1dc37 feat(telemetry): add message type telemetry logging in channel services
- Integrated telemetry logging for received messages in Evolution, WhatsApp Business, and Baileys services.
- Enhanced message tracking by sending the message type to the telemetry system for better observability.
2025-10-15 09:42:45 -03:00
Davidson Gomes
1b1e3b3e9d chore(changelog): update CHANGELOG for version 2.3.5 release date
- Updated the release date for version 2.3.5 to 2025-10-15.
- Adjusted subproject reference in evolution-manager-v2 to the latest commit.
2025-10-15 09:42:44 -03:00
Davidson Gomes
563ca2dd22 chore(changelog): update CHANGELOG for version 2.3.5
- Added features for Chatwoot enhancements, participants data handling, and LID to phone number conversion.
- Updated Docker configurations to include Kafka and frontend services.
- Fixed PostgreSQL migration errors and improved message handling in Baileys and Chatwoot services.
- Refactored TypeScript build process and implemented exponential backoff patterns.
2025-10-15 09:42:44 -03:00
Davidson Gomes
4e44bfb222 chore(manager): update asset files and dependencies
- Updated subproject reference in evolution-manager-v2.
- Replaced old JavaScript and CSS asset files with new versions for improved performance and styling.
- Added new CSS file for consistent font styling across the application.
- Updated the evolution logo image to the latest version.
2025-10-15 09:42:44 -03:00
Davidson Gomes
9edd600513 Merge pull request #2083 from davidmnzs/main
fix: correct the error of hardcoded prisma/kafka schema
2025-10-15 09:40:15 -03:00
Davidson Gomes
501b06d133 Merge branch 'release/2.3.5' 2025-10-15 09:38:34 -03:00
Davidson Gomes
dc530285d5 feat(telemetry): add message type telemetry logging in channel services
- Integrated telemetry logging for received messages in Evolution, WhatsApp Business, and Baileys services.
- Enhanced message tracking by sending the message type to the telemetry system for better observability.
2025-10-15 09:38:06 -03:00
Davidson Gomes
8775cdf036 chore(changelog): update CHANGELOG for version 2.3.5 release date
- Updated the release date for version 2.3.5 to 2025-10-15.
- Adjusted subproject reference in evolution-manager-v2 to the latest commit.
2025-10-15 09:32:09 -03:00
Davidson Gomes
6ad33df879 chore(changelog): update CHANGELOG for version 2.3.5
- Added features for Chatwoot enhancements, participants data handling, and LID to phone number conversion.
- Updated Docker configurations to include Kafka and frontend services.
- Fixed PostgreSQL migration errors and improved message handling in Baileys and Chatwoot services.
- Refactored TypeScript build process and implemented exponential backoff patterns.
2025-10-15 09:31:45 -03:00
Davidson Gomes
633d0b4c45 Merge pull request #2085 from KokeroO/develop
Convert LIDs to PN by sending a call rejection message
2025-10-15 09:25:37 -03:00
Davidson Gomes
82c0eadf7c chore(manager): update asset files and dependencies
- Updated subproject reference in evolution-manager-v2.
- Replaced old JavaScript and CSS asset files with new versions for improved performance and styling.
- Added new CSS file for consistent font styling across the application.
- Updated the evolution logo image to the latest version.
2025-10-15 09:25:21 -03:00
Willian Coqueiro
1756abf1e6 Merge branch 'develop' of https://github.com/KokeroO/evolution-api into develop 2025-10-14 05:33:54 +00:00
Willian Coqueiro
a2f48030dc Merge branch 'develop' of https://github.com/KokeroO/evolution-api into develop 2025-10-14 05:33:33 +00:00
Willian Coqueiro
3214a9fb5b fix(chatwoot): correct chatId extraction for non-group JIDs 2025-10-14 05:25:36 +00:00
Willian Coqueiro
4b89e3f987 fix(chatwoot): correct chatId extraction for non-group JIDs 2025-10-14 02:16:22 +00:00
Willian Coqueiro
72622dca31 Merge upstream/develop into develop 2025-10-14 02:12:15 +00:00
davidmnzs
d73b72b67e fix: correct the error of hardcoded prisma/kafka schema 2025-10-13 20:28:17 -03:00
Davidson Gomes
20eef33df3 Merge pull request #2076 from KokeroO/fix/chatwoot
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
Implementations and corrections of previous commits in the chatwoot and baileys services
2025-10-13 12:19:58 -03:00
Davidson Gomes
37571c03b4 Merge pull request #2072 from nolramaf/fix/media-content-validation
fix/media content validation
2025-10-13 12:19:05 -03:00
Willian Coqueiro
017949458b refactor(baileys): simplify linkPreview handling in BaileysStartupService 2025-10-12 15:38:05 +00:00
Willian Coqueiro
2feaf1c74e fix(baileys): handle undefined status in update by defaulting to 'DELETED' 2025-10-12 15:29:48 +00:00
Willian Coqueiro
4b043cb4b8 refactor: update TypeScript build process and dependencies
- Changed the build command in package.json to use TypeScript compiler (tsc) with noEmit option.
- Added @swc/core and @swc/helpers as development dependencies for improved performance.

refactor: clean up WhatsApp Baileys service

- Removed unused properties and interfaces related to message keys.
- Simplified message handling logic by removing redundant checks and conditions.
- Updated message timestamp handling for consistency.
- Improved readability and maintainability by restructuring code and removing commented-out sections.

refactor: optimize Chatwoot service

- Streamlined database queries by reusing PostgreSQL client connection.
- Enhanced conversation creation logic with better cache handling.
- Removed unnecessary methods and improved existing ones for clarity.
- Updated message sending logic to handle file streams instead of buffers.

fix: improve translation loading mechanism

- Simplified translation file loading by removing environment variable checks.
- Ensured translations are loaded from a consistent path within the project structure.
2025-10-12 15:03:48 +00:00
Marlon Alves
b0d261b305 fix/media content validation 2025-10-11 04:13:12 -03:00
Willian Coqueiro
c041986e26 Merge upstream/develop into develop 2025-10-10 02:11:44 +00:00
Davidson Gomes
0976109d27 Merge pull request #2025 from guispiller/main
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
feat: convert LID to phoneNumber on GROUP_PARTICIPANTS_UPDATE webhook
2025-10-09 15:05:41 -03:00
Davidson Gomes
b808dda33b Merge pull request #2048 from dersonbsb2022/main
feat(chatwoot): comprehensive improvements to message handling, editing, deletion and i18n (translate messages)
2025-10-09 14:59:53 -03:00
Anderson Silva
98b7f15a43 fix(baileys): update to 7.0.0-rc.5 and fix assertSessions signature
Problem:
- GitHub Actions failing: Expected 1 arguments, but got 2
- Local had outdated Baileys 7.0.0-rc.3 in node_modules
- assertSessions signature changed between versions

Solution:
- Fresh npm install with Baileys 7.0.0-rc.5
- Updated assertSessions to pass only jids (no force param)
- Regenerated Prisma Client after reinstall
- Updated package-lock.json for version consistency

Changes:
- assertSessions now receives 1 argument (jids only)
- Kept force param in method signature for API compatibility
- Removed @ts-expect-error directives (no longer needed)

Tested:
-  Server starts successfully
-  Build passes without errors
-  Lint passes
2025-10-06 19:30:13 -03:00
Anderson Silva
94ddc0dfbe fix(baileys): use type assertion for assertSessions compatibility
Problem:
- GitHub Actions shows: Expected 1 arguments, but got 2
- Local environment shows: Expected 2 arguments, but got 1
- Different Baileys versions/definitions between environments

Solution:
- Use 'as any' type assertion for force parameter
- Maintains compatibility with both signature variations
- Allows code to work in all environments

Technical notes:
- Local: baileys@7.0.0-rc.5 expects 2 arguments (jids, force)
- GitHub Actions: May have different version/cache expecting 1 argument
- Type assertion bypasses strict type checking for cross-version compatibility
2025-10-06 19:12:32 -03:00
Anderson Silva
d4b0cfd2ba fix(chatwoot): resolve webhook timeout on deletion with 5+ images
Problem:
- Chatwoot shows red error when deleting messages with 5+ images
- Cause: Chatwoot webhook timeout of 5 seconds
- Processing 5 images takes ~9 seconds
- Duplicate webhooks arrive during processing

Solution:
- Implemented async processing with setImmediate()
- Webhook responds immediately (< 100ms)
- Deletion processes in background without blocking
- Maintains idempotency with cache (1 hour TTL)
- Maintains lock mechanism (60 seconds TTL)

Benefits:
- Scales infinitely (10, 20, 100+ images)
- No timeout regardless of quantity
- No error messages in Chatwoot
- Reliable background processing

Tested:
- 5 images: 9s background processing
- Webhook response: < 100ms
- No red error in Chatwoot
- Deletion completes successfully

BREAKING CHANGE: Fixed assertSessions signature to accept force parameter
2025-10-06 16:14:26 -03:00
dersonbsb2022
a5a46dc72a Merge branch 'develop' into main 2025-10-06 15:21:10 -03:00
Anderson Silva
e13434804c refactor: implement exponential backoff patterns and extract magic numbers to constants
- Extract HTTP timeout constant (60s for large file downloads)
- Extract S3/MinIO retry configuration (3 retries, 1s-8s exponential backoff)
- Extract database polling retry configuration (5 retries, 100ms-2s exponential backoff)
- Extract webhook and lock polling delays to named constants
- Extract cache TTL values (5min for messages, 30min for updates) in Baileys service
- Implement exponential backoff for S3/MinIO downloads following webhook controller pattern
- Implement exponential backoff for database polling removing fixed delays
- Add deletion event lock to prevent race conditions with duplicate webhooks
- Process deletion events immediately (no delay) to fix Chatwoot local storage red error
- Make i18n translations path configurable via TRANSLATIONS_BASE_DIR env variable
- Add detailed logging for deletion events debugging

Addresses code review suggestions from Sourcery AI and Copilot AI:
- Magic numbers extracted to well-documented constants
- Retry configurations consolidated and clearly separated by use case
- S3/MinIO retry uses longer delays (external storage)
- Database polling uses shorter delays (internal operations)
- Fixes Chatwoot local storage deletion error (red message issue)
- Maintains full compatibility with S3/MinIO storage (tested)

Breaking changes: None - all changes are internal improvements
2025-10-06 15:10:38 -03:00
Davidson Gomes
53cd7d5d13 chore(deps): update baileys package to version 7.0.0-rc.5
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
- Bumped baileys dependency version in package.json and package-lock.json to 7.0.0-rc.5 for improved functionality and bug fixes.
- Added p-queue and p-timeout packages for enhanced performance and timeout management.
2025-10-06 14:29:22 -03:00
Spiller
bedfb019aa fix lint 2025-10-06 11:53:50 -03:00
Anderson Silva
6e1d027750 feat(chatwoot): comprehensive improvements to message handling, editing, deletion and i18n
- Fix bidirectional message deletion between Chatwoot and WhatsApp
- Support deletion of multiple attachments sent together
- Implement proper message editing with 'Edited Message:' prefix format
- Enable deletion of edited messages by updating chatwootMessageId
- Skip cache for deleted messages (messageStubType === 1) to prevent duplicates
- Fix i18n translation path detection for production environment
- Add automatic dev/prod path resolution for translation files
- Improve error handling and logging for message operations

Technical improvements:
- Changed Chatwoot deletion query from findFirst to findMany for multiple attachments
- Fixed instanceId override issue in message deletion payload
- Added retry logic with Prisma MessageUpdate validation
- Implemented cache bypass for revoked messages to ensure proper processing
- Enhanced i18n to detect dist/ folder in production vs src/ in development

Resolves issues with:
- Message deletion not working from Chatwoot to WhatsApp
- Multiple attachments causing incomplete deletion
- Edited messages showing raw i18n keys instead of translated text
- Cache collision preventing deletion of edited messages
- Production environment not loading translation files correctly

Note: Tested and validated with Chatwoot v4.1 in production environment
2025-10-03 14:47:24 -03:00
Spiller
fb1fa4d91a feat: add participantsData field maintaining backward
compatibility

  - Keep original participants array (string[]) for backward
  compatibility
  - Add new participantsData field with resolved phone numbers and
  metadata
  - Consumers can migrate gradually from participants to
  participantsData
  - No breaking changes to existing webhook integrations

  Payload structure:
  - participants: string[] (original JID strings)
  - participantsData: object[] (enhanced with phoneNumber, name,
  imgUrl)
2025-09-30 10:12:14 -03:00
Spiller
57ea6707bc feat: convert LID to phoneNumber on
GROUP_PARTICIPANTS_UPDATE
2025-09-29 20:50:39 -03:00
Davidson Gomes
ad8df44236 Merge pull request #2023 from Vitordotpy/fix/chatwoot-conversation-handling
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
fix(chatwoot): Corrige Reabertura de Conversas e Loop de Mensagem de Conexão
2025-09-29 16:08:52 -03:00
Vitordotpy
c132379b3a fix(chatwoot): ajustar lógica de verificação de conversas e cache
Este commit modifica a lógica de verificação de conversas no serviço Chatwoot, garantindo que a busca por conversas ativas seja priorizada em relação ao uso de cache. A verificação de cache foi removida em pontos críticos para evitar que conversas desatualizadas sejam utilizadas, melhorando a precisão na recuperação de dados. Além disso, a lógica de reabertura de conversas foi refinada para garantir que as interações sejam tratadas corretamente, mantendo a experiência do usuário mais fluida.
2025-09-29 15:26:24 -03:00
Vitordotpy
f7862637b1 fix(chatwoot): otimizar lógica de reabertura de conversas e notificação de conexão
Este commit introduz melhorias na integração com o Chatwoot, focando na reabertura de conversas e na notificação de conexão. A lógica foi refatorada para centralizar a busca por conversas abertas e a reabertura de conversas resolvidas, garantindo que interações não sejam perdidas. Além disso, foi implementado um intervalo mínimo para notificações de conexão, evitando mensagens excessivas e melhorando a experiência do usuário.
2025-09-28 22:38:45 -03:00
Vitordotpy
0d8e8bc0fb fix(chatwoot): corrige reabertura de conversas e loop de conexão
Este commit aborda duas questões críticas na integração com o Chatwoot para melhorar a estabilidade e a experiência do agente.

Primeiro, as conversas que já estavam marcadas como "resolvidas" no Chatwoot não eram reabertas automaticamente quando o cliente enviava uma nova mensagem. Isso foi corrigido para que o sistema verifique o status da conversa e a reabra, garantindo que nenhuma nova interação seja perdida.

Segundo, um bug no tratamento do evento de conexão fazia com que a mensagem de status "Conexão estabelecida com sucesso" fosse enviada repetidamente, poluindo o histórico da conversa. A lógica foi ajustada para garantir que esta notificação seja enviada apenas uma vez por evento de conexão.
2025-09-28 22:19:36 -03:00
Davidson Gomes
b62917e80f Merge pull request #2021 from Vitordotpy/fix/message-update-and-i18n-errors
Some checks failed
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
fix(baileys): message update and i18n errors
2025-09-26 16:37:24 -03:00
Vitordotpy
eeb324227b fix(baileys): adicionar log de aviso para mensagens não encontradas
- Implementada uma mensagem de aviso no serviço Baileys quando a mensagem original não é encontrada durante a atualização, melhorando a rastreabilidade de erros.
- Ajustada a lógica de verificação do caminho de traduções para garantir que o diretório correto seja utilizado, com tratamento de erro caso não seja encontrado.
2025-09-26 16:12:40 -03:00
Vitordotpy
c31b62fb3d fix(baileys): corrigir verificação de mensagem no serviço Baileys
- Ajustada a lógica de verificação para garantir que o ID da mensagem seja definido apenas quando disponível, evitando possíveis erros de referência.
- Atualizada a definição do caminho de traduções para suportar a estrutura de diretórios em produção.
2025-09-26 16:00:39 -03:00
Davidson Gomes
22465c0a56 fix: corrigido incompatibilidade no use voise call da wavoip com versao nova da baileys 2025-09-26 13:00:52 -03:00
Davidson Gomes
da6f1bd540 chore(changelog): update CHANGELOG for Baileys v7.0.0-rc.4 and PostgreSQL connection improvements
- Added entry for Baileys version update to v7.0.0-rc.4.
- Refactored PostgreSQL connection handling and enhanced message processing capabilities.
2025-09-26 12:58:36 -03:00
Davidson Gomes
069786b9fe chore(deps): update baileys package to version 7.0.0-rc.4
- Bumped baileys dependency version in package.json and package-lock.json to 7.0.0-rc.4 for improved functionality and bug fixes.
2025-09-26 12:56:40 -03:00
Davidson Gomes
bd0c43feac Merge pull request #2017 from Vitordotpy/fix/enhanced-chatwoot-database-connection
Some checks are pending
Check Code Quality / check-lint-and-build (push) Waiting to run
Build Docker image / Build and Deploy (push) Waiting to run
Security Scan / CodeQL Analysis (javascript) (push) Waiting to run
Security Scan / Dependency Review (push) Waiting to run
Fix Chatwoot DB Connection Instability and Implement Stale Conversation Cache Handling
2025-09-26 07:35:26 -03:00
Vitordotpy
5dc1d02d0a refactor(chatbot): melhorar tratamento de erros em mensagens no Chatwoot
- Implementada a função `handleStaleConversationError` para centralizar a lógica de tratamento de erros relacionados a conversas não encontradas.
- A lógica de retry foi aprimorada para as funções `createMessage` e `sendData`, garantindo que as operações sejam reprocessadas corretamente em caso de falhas.
- Removido código duplicado e melhorada a legibilidade do serviço Chatwoot.
2025-09-25 17:38:10 -03:00
Vitor Manoel Santos Moura
8697329f71 Update src/api/integrations/chatbot/chatwoot/services/chatwoot.service.ts
aplicação de desestruturação de objetos que é uma boa prática do ts

Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com>
2025-09-25 17:30:43 -03:00
Vitor Manoel Santos Moura
58b5561f72 Update src/api/integrations/chatbot/chatwoot/services/chatwoot.service.ts
aplicação de desestruturação de objetos que é uma boa prática do ts

Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com>
2025-09-25 17:30:30 -03:00
Vitordotpy
093515555d refactor(chatbot): refatorar conexão com PostgreSQL e melhorar tratamento de mensagens
- Alterado método de obtenção da conexão PostgreSQL para ser assíncrono, melhorando a gestão de conexões.
- Implementada lógica de retry para criação de mensagens e conversas, garantindo maior robustez em caso de falhas.
- Ajustadas chamadas de consulta ao banco de dados para utilizar a nova abordagem de conexão.
- Adicionada nova propriedade `messageBodyForRetry` para facilitar o reenvio de mensagens em caso de erro.
2025-09-25 17:08:40 -03:00
Davidson Gomes
d8268b0eb1 fix(migration): resolve PostgreSQL migration error for Kafka integration
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
- Corrected table reference in migration SQL to align with naming conventions.
- Fixed foreign key constraint issue that caused migration failure.
- Ensured successful setup of Kafka integration by addressing database migration errors.
2025-09-24 13:59:23 -03:00
Davidson Gomes
4585850741 chore(release): bump version to 2.3.5 and update bug report template
Some checks are pending
Check Code Quality / check-lint-and-build (push) Waiting to run
Build Docker image / Build and Deploy (push) Waiting to run
Security Scan / CodeQL Analysis (javascript) (push) Waiting to run
Security Scan / Dependency Review (push) Waiting to run
- Updated package and lock files to version 2.3.5.
- Modified bug report template to reflect the new version number.
- Removed outdated Kafka Docker README file.
2025-09-23 18:42:07 -03:00
Davidson Gomes
6c150eed6d chore(docker): add Kafka and frontend services to Docker configurations
- Introduced Kafka and Zookeeper services in a new docker-compose file for better message handling.
- Added frontend service to both development and production docker-compose files for improved UI management.
- Updated evolution-manager-v2 submodule to the latest commit.
- Updated CHANGELOG for version 2.3.5 release.
2025-09-23 18:40:19 -03:00
Davidson Gomes
78c7b96f0f Merge branch 'release/2.3.4'
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
2025-09-23 11:42:25 -03:00
Davidson Gomes
dfea584aa7 chore(changelog): update CHANGELOG for version 2.3.4 release
Some checks are pending
Check Code Quality / check-lint-and-build (push) Waiting to run
Build Docker image / Build and Deploy (push) Waiting to run
Security Scan / CodeQL Analysis (javascript) (push) Waiting to run
Security Scan / Dependency Review (push) Waiting to run
- Enhanced EvolutionBot functionality with splitMessages and linkPreview support
- Centralized message splitting logic across chatbot services for consistency
- Improved message formatting and delivery capabilities
2025-09-23 11:41:51 -03:00
Davidson Gomes
6c5b056615 chore(changelog): remove empty line in CHANGELOG for consistency 2025-09-23 11:37:48 -03:00
Davidson Gomes
d8b4378163 Merge pull request #1986 from dersonbsb2022/main
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled
fix(evolutionbot):  Fixing the correct message sending method so that messages are split.
2025-09-22 08:02:11 -03:00
Davidson Gomes
838cc14531 Merge pull request #1989 from JamsMendez/update-docker-compose
Some checks are pending
Check Code Quality / check-lint-and-build (push) Waiting to run
Build Docker image / Build and Deploy (push) Waiting to run
Security Scan / CodeQL Analysis (javascript) (push) Waiting to run
Security Scan / Dependency Review (push) Waiting to run
fix(docker): change private image to public image in docker-compose
2025-09-21 15:18:13 -03:00
Jose A. Mendez Santiago
878da12fa4 fix(docker): change private image to public image in docker-compose 2025-09-20 22:02:25 -06:00
Anderson Silva
10a2b60595 refactor(chatbot): centralize split logic and ensure linkPreview consistency
- Centralize double-line-break message splitting logic into dedicated helper methods
- Add targeted debug logs for better observability without clutter
- Ensure linkPreview parameter is consistently passed across all chatbot services
- Extract splitMessageByDoubleLineBreaks() and sendSingleMessage() helpers
- Update all chatbot services to explicitly pass linkPreview: true
- Improve code testability and maintainability

Services updated:
- BaseChatbotService: Refactored split logic and added debug logs
- TypebotService: Added linkPreview parameter to all sendMessageWhatsApp calls
- OpenAIService: Added linkPreview parameter to all sendMessageWhatsApp calls
- N8nService: Added linkPreview parameter to sendMessageWhatsApp call
- FlowiseService: Added linkPreview parameter to sendMessageWhatsApp call
- EvoaiService: Added linkPreview parameter to sendMessageWhatsApp call
- DifyService: Added linkPreview parameter to all sendMessageWhatsApp calls
2025-09-20 11:35:15 -03:00
Anderson Silva
b0ca79cd11 fix(evolutionbot): implement splitMessages and linkPreview functionality
- Replace instance.textMessage() with sendMessageWhatsApp() method
- Enable message splitting by double line breaks (\n\n)
- Add proper delay and typing indicators between split messages
- Fix linkPreview parameter passing to base class methods
- Support linkPreview: false/true from webhook response
- Remove unnecessary debug logs for cleaner output

Fixes: EvolutionBot was not respecting splitMessages and linkPreview configurations
2025-09-20 10:57:03 -03:00
Davidson Gomes
71eb189a6d chore(changelog): update CHANGELOG for recent enhancements and fixes
Some checks failed
Security Scan / Dependency Review (push) Has been cancelled
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
- Fixed `instanceName` field error in message creation, resolving Prisma validation issues.
- Enhanced media message processing across chatbot services, improving base64 conversion and media URL handling.
- Resolved ESLint configuration conflicts in Evolution Manager v2, updating rules and fixing code formatting issues.
- Streamlined media message handling and improved data consistency in database operations.
2025-09-18 17:47:54 -03:00
Davidson Gomes
407d254cf7 refactor(chatbot): streamline media message handling across chatbot services
- Removed redundant instance name references in EvolutionStartupService to enhance data consistency.
- Updated media message processing in various chatbot services to utilize base64 and mediaUrl more effectively, ensuring better handling of image messages.
- Improved overall code readability and maintainability by simplifying media handling logic.
2025-09-18 17:46:47 -03:00
Davidson Gomes
5f44da61fb feat(evolution-manager): add evolution-manager-v2 as a submodule and update changelog
- Introduced evolution-manager-v2 as a git submodule for easier access and integration.
- Updated CHANGELOG to reflect the addition of evolution-manager-v2 with details on its features and open-source setup.
- Adjusted media message handling in EvolutionStartupService to use null instead of undefined for base64 and mediaUrl properties, ensuring better data consistency.
2025-09-18 17:00:15 -03:00
Davidson Gomes
41a36bbb19 feat(changelog): update CHANGELOG for version 2.3.4
- Added Apache Kafka integration for real-time event streaming, including a new controller, router, and schema.
- Fixed MySQL schema issues related to default values and added missing relation fields in the Instance model.
- Introduced new environment variables for comprehensive Kafka configuration.
2025-09-18 15:48:27 -03:00
Davidson Gomes
8ab41fcfc9 feat(kafka): add Kafka integration for event streaming
- Introduced Kafka support in the Evolution API, allowing for real-time event streaming and processing.
- Updated environment configuration to include Kafka-related variables.
- Added KafkaController and KafkaRouter for managing Kafka events.
- Enhanced event management to support Kafka alongside existing integrations.
- Updated database schemas and migrations for Kafka integration in both MySQL and PostgreSQL.
- Documented Kafka integration in the README file.
2025-09-18 15:44:56 -03:00
Davidson Gomes
5e08628d89 refactor(eslint): change unused vars rule to error and update error handling in services
- Update ESLint configuration to set `@typescript-eslint/no-unused-vars` from 'warn' to 'error' for stricter linting.
- Refactor error handling in various services to omit error variable in catch blocks for cleaner code.
2025-09-18 14:59:33 -03:00
61 changed files with 5539 additions and 4074 deletions

View File

@@ -190,6 +190,60 @@ PUSHER_EVENTS_CALL=true
PUSHER_EVENTS_TYPEBOT_START=false
PUSHER_EVENTS_TYPEBOT_CHANGE_STATUS=false
# Kafka - Environment variables
KAFKA_ENABLED=false
KAFKA_CLIENT_ID=evolution-api
KAFKA_BROKERS=localhost:9092
KAFKA_CONNECTION_TIMEOUT=3000
KAFKA_REQUEST_TIMEOUT=30000
# Global events - By enabling this variable, events from all instances are sent to global Kafka topics.
KAFKA_GLOBAL_ENABLED=false
KAFKA_CONSUMER_GROUP_ID=evolution-api-consumers
KAFKA_TOPIC_PREFIX=evolution
KAFKA_NUM_PARTITIONS=1
KAFKA_REPLICATION_FACTOR=1
KAFKA_AUTO_CREATE_TOPICS=false
# Choose the events you want to send to Kafka
KAFKA_EVENTS_APPLICATION_STARTUP=false
KAFKA_EVENTS_INSTANCE_CREATE=false
KAFKA_EVENTS_INSTANCE_DELETE=false
KAFKA_EVENTS_QRCODE_UPDATED=false
KAFKA_EVENTS_MESSAGES_SET=false
KAFKA_EVENTS_MESSAGES_UPSERT=false
KAFKA_EVENTS_MESSAGES_EDITED=false
KAFKA_EVENTS_MESSAGES_UPDATE=false
KAFKA_EVENTS_MESSAGES_DELETE=false
KAFKA_EVENTS_SEND_MESSAGE=false
KAFKA_EVENTS_SEND_MESSAGE_UPDATE=false
KAFKA_EVENTS_CONTACTS_SET=false
KAFKA_EVENTS_CONTACTS_UPSERT=false
KAFKA_EVENTS_CONTACTS_UPDATE=false
KAFKA_EVENTS_PRESENCE_UPDATE=false
KAFKA_EVENTS_CHATS_SET=false
KAFKA_EVENTS_CHATS_UPSERT=false
KAFKA_EVENTS_CHATS_UPDATE=false
KAFKA_EVENTS_CHATS_DELETE=false
KAFKA_EVENTS_GROUPS_UPSERT=false
KAFKA_EVENTS_GROUPS_UPDATE=false
KAFKA_EVENTS_GROUP_PARTICIPANTS_UPDATE=false
KAFKA_EVENTS_CONNECTION_UPDATE=false
KAFKA_EVENTS_LABELS_EDIT=false
KAFKA_EVENTS_LABELS_ASSOCIATION=false
KAFKA_EVENTS_CALL=false
KAFKA_EVENTS_TYPEBOT_START=false
KAFKA_EVENTS_TYPEBOT_CHANGE_STATUS=false
# SASL Authentication (optional)
KAFKA_SASL_ENABLED=false
KAFKA_SASL_MECHANISM=plain
KAFKA_SASL_USERNAME=
KAFKA_SASL_PASSWORD=
# SSL Configuration (optional)
KAFKA_SSL_ENABLED=false
KAFKA_SSL_REJECT_UNAUTHORIZED=true
KAFKA_SSL_CA=
KAFKA_SSL_KEY=
KAFKA_SSL_CERT=
# WhatsApp Business API - Environment variables
# Token used to validate the webhook on the Facebook APP
WA_BUSINESS_TOKEN_WEBHOOK=evolution

View File

@@ -26,7 +26,7 @@ module.exports = {
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/no-non-null-assertion': 'off',
'@typescript-eslint/no-unused-vars': 'warn',
'@typescript-eslint/no-unused-vars': 'error',
'import/first': 'error',
'import/no-duplicates': 'error',
'simple-import-sort/imports': 'error',

View File

@@ -59,7 +59,7 @@ body:
value: |
- OS: [e.g. Ubuntu 20.04, Windows 10, macOS 12.0]
- Node.js version: [e.g. 18.17.0]
- Evolution API version: [e.g. 2.3.3]
- Evolution API version: [e.g. 2.3.5]
- Database: [e.g. PostgreSQL 14, MySQL 8.0]
- Connection type: [e.g. Baileys, WhatsApp Business API]
validations:

View File

@@ -13,6 +13,8 @@ jobs:
steps:
- uses: actions/checkout@v5
with:
submodules: recursive
- name: Install Node
uses: actions/setup-node@v5

View File

@@ -15,6 +15,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v5
with:
submodules: recursive
- name: Docker meta
id: meta

View File

@@ -15,6 +15,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v5
with:
submodules: recursive
- name: Docker meta
id: meta

View File

@@ -15,6 +15,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v5
with:
submodules: recursive
- name: Docker meta
id: meta

View File

@@ -26,6 +26,8 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v5
with:
submodules: recursive
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
@@ -47,5 +49,7 @@ jobs:
steps:
- name: Checkout Repository
uses: actions/checkout@v5
with:
submodules: recursive
- name: Dependency Review
uses: actions/dependency-review-action@v4

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "evolution-manager-v2"]
path = evolution-manager-v2
url = https://github.com/EvolutionAPI/evolution-manager-v2.git

View File

@@ -1,3 +1,117 @@
# 2.3.5 (2025-10-15)
### Features
* **Chatwoot Enhancements**: Comprehensive improvements to message handling, editing, deletion and i18n
* **Participants Data**: Add participantsData field maintaining backward compatibility for group participants
* **LID to Phone Number**: Convert LID to phoneNumber on group participants
* **Docker Configurations**: Add Kafka and frontend services to Docker configurations
### Fixed
* **Kafka Migration**: Fixed PostgreSQL migration error for Kafka integration
- Corrected table reference from `"public"."Instance"` to `"Instance"` in foreign key constraint
- Fixed `ERROR: relation "public.Instance" does not exist` issue in migration `20250918182355_add_kafka_integration`
- Aligned table naming convention with other Evolution API migrations for consistency
- Resolved database migration failure that prevented Kafka integration setup
* **Update Baileys Version**: v7.0.0-rc.5 with compatibility fixes
- Fixed assertSessions signature compatibility using type assertion
- Fixed incompatibility in voice call (wavoip) with new Baileys version
- Handle undefined status in update by defaulting to 'DELETED'
* **Chatwoot Improvements**: Multiple fixes for enhanced reliability
- Correct chatId extraction for non-group JIDs
- Resolve webhook timeout on deletion with 5+ images
- Improve error handling in Chatwoot messages
- Adjust conversation verification logic and cache
- Optimize conversation reopening logic and connection notification
- Fix conversation reopening and connection loop
* **Baileys Message Handling**: Enhanced message processing
- Add warning log for messages not found
- Fix message verification in Baileys service
- Simplify linkPreview handling in BaileysStartupService
* **Media Validation**: Fix media content validation
* **PostgreSQL Connection**: Refactor connection with PostgreSQL and improve message handling
### Code Quality & Refactoring
* **Exponential Backoff**: Implement exponential backoff patterns and extract magic numbers to constants
* **TypeScript Build**: Update TypeScript build process and dependencies
###
# 2.3.4 (2025-09-23)
### Features
* **Kafka Integration**: Added Apache Kafka event integration for real-time event streaming
- New Kafka controller, router, and schema for event publishing
- Support for instance-specific and global event topics
- Configurable SASL/SSL authentication and connection settings
- Auto-creation of topics with configurable partitions and replication
- Consumer group management for reliable event processing
- Integration with existing event manager for seamless event distribution
* **Evolution Manager v2 Open Source**: Evolution Manager v2 is now available as open source
- Added as git submodule with HTTPS URL for easy access
- Complete open source setup with Apache 2.0 license + Evolution API custom conditions
- GitHub templates for issues, pull requests, and workflows
- Comprehensive documentation and contribution guidelines
- Docker support for development and production environments
- CI/CD workflows for code quality, security audits, and automated builds
- Multi-language support (English, Portuguese, Spanish, French)
- Modern React + TypeScript + Vite frontend with Tailwind CSS
* **EvolutionBot Enhancements**: Improved EvolutionBot functionality and message handling
- Implemented splitMessages functionality for better message segmentation
- Added linkPreview support for enhanced message presentation
- Centralized split logic across chatbot services for consistency
- Enhanced message formatting and delivery capabilities
### Fixed
* **MySQL Schema**: Fixed invalid default value errors for `createdAt` fields in `Evoai` and `EvoaiSetting` models
- Changed `@default(now())` to `@default(dbgenerated("CURRENT_TIMESTAMP"))` for MySQL compatibility
- Added missing relation fields (`N8n`, `N8nSetting`, `Evoai`, `EvoaiSetting`) in Instance model
- Resolved Prisma schema validation errors for MySQL provider
* **Prisma Schema Validation**: Fixed `instanceName` field error in message creation
- Removed invalid `instanceName` field from message objects before database insertion
- Resolved `Unknown argument 'instanceName'` Prisma validation error
- Streamlined message data structure to match Prisma schema requirements
* **Media Message Processing**: Enhanced media handling across chatbot services
- Fixed base64 conversion in EvoAI service for proper image processing
- Converted ArrayBuffer to base64 string using `Buffer.from().toString('base64')`
- Improved media URL handling and base64 encoding for better chatbot integration
- Enhanced image message detection and processing workflow
* **Evolution Manager v2 Linting**: Resolved ESLint configuration conflicts
- Disabled conflicting Prettier rules in ESLint configuration
- Added comprehensive rule overrides for TypeScript and React patterns
- Fixed import ordering and code formatting issues
- Updated security vulnerabilities in dependencies (Vite, esbuild)
### Code Quality & Refactoring
* **Chatbot Services**: Streamlined media message handling across all chatbot integrations
- Standardized base64 and mediaUrl processing patterns
- Improved code readability and maintainability in media handling logic
- Enhanced error handling for media download and conversion processes
- Unified image message detection across different chatbot services
* **Database Operations**: Improved data consistency and validation
- Enhanced Prisma schema compliance across all message operations
- Removed redundant instance name references for better data integrity
- Optimized message creation workflow with proper field validation
### Environment Variables
* Added comprehensive Kafka configuration options:
- `KAFKA_ENABLED`, `KAFKA_CLIENT_ID`, `KAFKA_BROKERS`
- `KAFKA_CONSUMER_GROUP_ID`, `KAFKA_TOPIC_PREFIX`
- `KAFKA_SASL_*` and `KAFKA_SSL_*` for authentication
- `KAFKA_EVENTS_*` for event type configuration
# 2.3.3 (2025-09-18)
### Features

View File

@@ -0,0 +1,51 @@
version: '3.3'
services:
zookeeper:
container_name: zookeeper
image: confluentinc/cp-zookeeper:7.5.0
environment:
- ZOOKEEPER_CLIENT_PORT=2181
- ZOOKEEPER_TICK_TIME=2000
- ZOOKEEPER_SYNC_LIMIT=2
volumes:
- zookeeper_data:/var/lib/zookeeper/
ports:
- 2181:2181
kafka:
container_name: kafka
image: confluentinc/cp-kafka:7.5.0
depends_on:
- zookeeper
environment:
- KAFKA_BROKER_ID=1
- KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,OUTSIDE:PLAINTEXT
- KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092,OUTSIDE://host.docker.internal:9094
- KAFKA_INTER_BROKER_LISTENER_NAME=PLAINTEXT
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
- KAFKA_AUTO_CREATE_TOPICS_ENABLE=true
- KAFKA_LOG_RETENTION_HOURS=168
- KAFKA_LOG_SEGMENT_BYTES=1073741824
- KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS=300000
- KAFKA_COMPRESSION_TYPE=gzip
ports:
- 29092:29092
- 9092:9092
- 9094:9094
volumes:
- kafka_data:/var/lib/kafka/data
volumes:
zookeeper_data:
kafka_data:
networks:
evolution-net:
name: evolution-net
driver: bridge

View File

@@ -2,7 +2,7 @@ version: "3.7"
services:
evolution_v2:
image: evoapicloud/evolution-api:v2.3.1
image: evoapicloud/evolution-api:v2.3.5
volumes:
- evolution_instances:/evolution/instances
networks:

View File

@@ -17,5 +17,5 @@ b. Your contributed code may be used for commercial purposes, including but not
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0. Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0.
© 2024 Evolution API
© 2025 Evolution API

View File

@@ -55,6 +55,9 @@ Evolution API supports various integrations to enhance its functionality. Below
- [RabbitMQ](https://www.rabbitmq.com/):
- Receive events from the Evolution API via RabbitMQ.
- [Apache Kafka](https://kafka.apache.org/):
- Receive events from the Evolution API via Apache Kafka for real-time event streaming and processing.
- [Amazon SQS](https://aws.amazon.com/pt/sqs/):
- Receive events from the Evolution API via Amazon SQS.

View File

@@ -15,6 +15,16 @@ services:
expose:
- 8080
frontend:
container_name: evolution_frontend
image: evolution/manager:local
build: ./evolution-manager-v2
restart: always
ports:
- "3000:80"
networks:
- evolution-net
volumes:
evolution_instances:

View File

@@ -3,7 +3,7 @@ version: "3.8"
services:
api:
container_name: evolution_api
image: evolution/api:metrics
image: evoapicloud/evolution-api:latest
restart: always
depends_on:
- redis
@@ -20,6 +20,15 @@ services:
expose:
- "8080"
frontend:
container_name: evolution_frontend
image: evoapicloud/evolution-manager:latest
restart: always
ports:
- "3000:80"
networks:
- evolution-net
redis:
container_name: evolution_redis
image: redis:latest

302
env.example Normal file
View File

@@ -0,0 +1,302 @@
# ===========================================
# EVOLUTION API - CONFIGURAÇÃO DE AMBIENTE
# ===========================================
# ===========================================
# SERVIDOR
# ===========================================
SERVER_NAME=evolution
SERVER_TYPE=http
SERVER_PORT=8080
SERVER_URL=http://localhost:8080
SERVER_DISABLE_DOCS=false
SERVER_DISABLE_MANAGER=false
# ===========================================
# CORS
# ===========================================
CORS_ORIGIN=*
CORS_METHODS=POST,GET,PUT,DELETE
CORS_CREDENTIALS=true
# ===========================================
# SSL (opcional)
# ===========================================
SSL_CONF_PRIVKEY=
SSL_CONF_FULLCHAIN=
# ===========================================
# BANCO DE DADOS
# ===========================================
DATABASE_PROVIDER=postgresql
DATABASE_CONNECTION_URI=postgresql://username:password@localhost:5432/evolution_api
DATABASE_CONNECTION_CLIENT_NAME=evolution
# Configurações de salvamento de dados
DATABASE_SAVE_DATA_INSTANCE=true
DATABASE_SAVE_DATA_NEW_MESSAGE=true
DATABASE_SAVE_MESSAGE_UPDATE=true
DATABASE_SAVE_DATA_CONTACTS=true
DATABASE_SAVE_DATA_CHATS=true
DATABASE_SAVE_DATA_HISTORIC=true
DATABASE_SAVE_DATA_LABELS=true
DATABASE_SAVE_IS_ON_WHATSAPP=true
DATABASE_SAVE_IS_ON_WHATSAPP_DAYS=7
DATABASE_DELETE_MESSAGE=false
# ===========================================
# REDIS
# ===========================================
CACHE_REDIS_ENABLED=true
CACHE_REDIS_URI=redis://localhost:6379
CACHE_REDIS_PREFIX_KEY=evolution-cache
CACHE_REDIS_TTL=604800
CACHE_REDIS_SAVE_INSTANCES=true
# Cache local (fallback)
CACHE_LOCAL_ENABLED=true
CACHE_LOCAL_TTL=86400
# ===========================================
# AUTENTICAÇÃO
# ===========================================
AUTHENTICATION_API_KEY=BQYHJGJHJ
AUTHENTICATION_EXPOSE_IN_FETCH_INSTANCES=false
# ===========================================
# LOGS
# ===========================================
LOG_LEVEL=ERROR,WARN,DEBUG,INFO,LOG,VERBOSE,DARK,WEBHOOKS,WEBSOCKET
LOG_COLOR=true
LOG_BAILEYS=error
# ===========================================
# INSTÂNCIAS
# ===========================================
DEL_INSTANCE=false
DEL_TEMP_INSTANCES=true
# ===========================================
# IDIOMA
# ===========================================
LANGUAGE=pt-BR
# ===========================================
# WEBHOOK
# ===========================================
WEBHOOK_GLOBAL_URL=
WEBHOOK_GLOBAL_ENABLED=false
WEBHOOK_GLOBAL_WEBHOOK_BY_EVENTS=false
# Eventos de webhook
WEBHOOK_EVENTS_APPLICATION_STARTUP=false
WEBHOOK_EVENTS_INSTANCE_CREATE=false
WEBHOOK_EVENTS_INSTANCE_DELETE=false
WEBHOOK_EVENTS_QRCODE_UPDATED=false
WEBHOOK_EVENTS_MESSAGES_SET=false
WEBHOOK_EVENTS_MESSAGES_UPSERT=false
WEBHOOK_EVENTS_MESSAGES_EDITED=false
WEBHOOK_EVENTS_MESSAGES_UPDATE=false
WEBHOOK_EVENTS_MESSAGES_DELETE=false
WEBHOOK_EVENTS_SEND_MESSAGE=false
WEBHOOK_EVENTS_SEND_MESSAGE_UPDATE=false
WEBHOOK_EVENTS_CONTACTS_SET=false
WEBHOOK_EVENTS_CONTACTS_UPDATE=false
WEBHOOK_EVENTS_CONTACTS_UPSERT=false
WEBHOOK_EVENTS_PRESENCE_UPDATE=false
WEBHOOK_EVENTS_CHATS_SET=false
WEBHOOK_EVENTS_CHATS_UPDATE=false
WEBHOOK_EVENTS_CHATS_UPSERT=false
WEBHOOK_EVENTS_CHATS_DELETE=false
WEBHOOK_EVENTS_CONNECTION_UPDATE=false
WEBHOOK_EVENTS_LABELS_EDIT=false
WEBHOOK_EVENTS_LABELS_ASSOCIATION=false
WEBHOOK_EVENTS_GROUPS_UPSERT=false
WEBHOOK_EVENTS_GROUPS_UPDATE=false
WEBHOOK_EVENTS_GROUP_PARTICIPANTS_UPDATE=false
WEBHOOK_EVENTS_CALL=false
WEBHOOK_EVENTS_TYPEBOT_START=false
WEBHOOK_EVENTS_TYPEBOT_CHANGE_STATUS=false
WEBHOOK_EVENTS_ERRORS=false
WEBHOOK_EVENTS_ERRORS_WEBHOOK=
# Configurações de webhook
WEBHOOK_REQUEST_TIMEOUT_MS=30000
WEBHOOK_RETRY_MAX_ATTEMPTS=10
WEBHOOK_RETRY_INITIAL_DELAY_SECONDS=5
WEBHOOK_RETRY_USE_EXPONENTIAL_BACKOFF=true
WEBHOOK_RETRY_MAX_DELAY_SECONDS=300
WEBHOOK_RETRY_JITTER_FACTOR=0.2
WEBHOOK_RETRY_NON_RETRYABLE_STATUS_CODES=400,401,403,404,422
# ===========================================
# WEBSOCKET
# ===========================================
WEBSOCKET_ENABLED=true
WEBSOCKET_GLOBAL_EVENTS=true
WEBSOCKET_ALLOWED_HOSTS=
# ===========================================
# RABBITMQ
# ===========================================
RABBITMQ_ENABLED=false
RABBITMQ_GLOBAL_ENABLED=false
RABBITMQ_PREFIX_KEY=
RABBITMQ_EXCHANGE_NAME=evolution_exchange
RABBITMQ_URI=
RABBITMQ_FRAME_MAX=8192
# ===========================================
# NATS
# ===========================================
NATS_ENABLED=false
NATS_GLOBAL_ENABLED=false
NATS_PREFIX_KEY=
NATS_EXCHANGE_NAME=evolution_exchange
NATS_URI=
# ===========================================
# SQS
# ===========================================
SQS_ENABLED=false
SQS_GLOBAL_ENABLED=false
SQS_GLOBAL_FORCE_SINGLE_QUEUE=false
SQS_GLOBAL_PREFIX_NAME=global
SQS_ACCESS_KEY_ID=
SQS_SECRET_ACCESS_KEY=
SQS_ACCOUNT_ID=
SQS_REGION=
SQS_MAX_PAYLOAD_SIZE=1048576
# ===========================================
# PUSHER
# ===========================================
PUSHER_ENABLED=false
PUSHER_GLOBAL_ENABLED=false
PUSHER_GLOBAL_APP_ID=
PUSHER_GLOBAL_KEY=
PUSHER_GLOBAL_SECRET=
PUSHER_GLOBAL_CLUSTER=
PUSHER_GLOBAL_USE_TLS=false
# ===========================================
# WHATSAPP BUSINESS
# ===========================================
WA_BUSINESS_TOKEN_WEBHOOK=evolution
WA_BUSINESS_URL=https://graph.facebook.com
WA_BUSINESS_VERSION=v18.0
WA_BUSINESS_LANGUAGE=en
# ===========================================
# CONFIGURAÇÕES DE SESSÃO
# ===========================================
CONFIG_SESSION_PHONE_CLIENT=Evolution API
CONFIG_SESSION_PHONE_NAME=Chrome
# ===========================================
# QR CODE
# ===========================================
QRCODE_LIMIT=30
QRCODE_COLOR=#198754
# ===========================================
# INTEGRAÇÕES
# ===========================================
# Typebot
TYPEBOT_ENABLED=false
TYPEBOT_API_VERSION=old
TYPEBOT_SEND_MEDIA_BASE64=false
# Chatwoot
CHATWOOT_ENABLED=false
CHATWOOT_MESSAGE_DELETE=false
CHATWOOT_MESSAGE_READ=false
CHATWOOT_BOT_CONTACT=true
CHATWOOT_IMPORT_DATABASE_CONNECTION_URI=
CHATWOOT_IMPORT_PLACEHOLDER_MEDIA_MESSAGE=false
# OpenAI
OPENAI_ENABLED=false
OPENAI_API_KEY_GLOBAL=
# Dify
DIFY_ENABLED=false
# N8N
N8N_ENABLED=false
# EvoAI
EVOAI_ENABLED=false
# Flowise
FLOWISE_ENABLED=false
# ===========================================
# S3 / MINIO
# ===========================================
S3_ENABLED=false
S3_ACCESS_KEY=
S3_SECRET_KEY=
S3_ENDPOINT=
S3_BUCKET=
S3_PORT=9000
S3_USE_SSL=false
S3_REGION=
S3_SKIP_POLICY=false
S3_SAVE_VIDEO=false
# ===========================================
# MÉTRICAS
# ===========================================
PROMETHEUS_METRICS=false
METRICS_AUTH_REQUIRED=false
METRICS_USER=
METRICS_PASSWORD=
METRICS_ALLOWED_IPS=
# ===========================================
# TELEMETRIA
# ===========================================
TELEMETRY_ENABLED=true
TELEMETRY_URL=
# ===========================================
# PROXY
# ===========================================
PROXY_HOST=
PROXY_PORT=
PROXY_PROTOCOL=
PROXY_USERNAME=
PROXY_PASSWORD=
# ===========================================
# CONVERSOR DE ÁUDIO
# ===========================================
API_AUDIO_CONVERTER=
API_AUDIO_CONVERTER_KEY=
# ===========================================
# FACEBOOK
# ===========================================
FACEBOOK_APP_ID=
FACEBOOK_CONFIG_ID=
FACEBOOK_USER_TOKEN=
# ===========================================
# SENTRY
# ===========================================
SENTRY_DSN=
# ===========================================
# EVENT EMITTER
# ===========================================
EVENT_EMITTER_MAX_LISTENERS=50
# ===========================================
# PROVIDER
# ===========================================
PROVIDER_ENABLED=false
PROVIDER_HOST=
PROVIDER_PORT=5656
PROVIDER_PREFIX=evolution

1
evolution-manager-v2 Submodule

Submodule evolution-manager-v2 added at f054b9bc28

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 25 KiB

485
manager/dist/assets/index-CO3NSIFj.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -5,8 +5,8 @@
<link rel="icon" type="image/png" href="https://evolution-api.com/files/evo/favicon.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Evolution Manager</title>
<script type="module" crossorigin src="/assets/index-D-oOjDYe.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-CXH2BdD4.css">
<script type="module" crossorigin src="/assets/index-CO3NSIFj.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-DsIrum0U.css">
</head>
<body>
<div id="root"></div>

8
manager_install.sh Executable file
View File

@@ -0,0 +1,8 @@
#! /bin/bash
cd evolution-manager-v2
npm install
npm run build
cd ..
rm -rf manager/dist
cp -r evolution-manager-v2/dist manager/dist

5930
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "evolution-api",
"version": "2.3.3",
"version": "2.3.5",
"description": "Rest api for communication with WhatsApp",
"main": "./dist/main.js",
"type": "commonjs",
@@ -56,7 +56,7 @@
"eslint --fix"
],
"src/**/*.ts": [
"sh -c 'npm run build'"
"sh -c 'tsc --noEmit'"
]
},
"config": {
@@ -77,7 +77,7 @@
"amqplib": "^0.10.5",
"audio-decode": "^2.2.3",
"axios": "^1.7.9",
"baileys": "^7.0.0-rc.3",
"baileys": "^7.0.0-rc.5",
"class-validator": "^0.14.1",
"compression": "^1.7.5",
"cors": "^2.8.5",
@@ -95,6 +95,7 @@
"json-schema": "^0.4.0",
"jsonschema": "^1.4.1",
"jsonwebtoken": "^9.0.2",
"kafkajs": "^2.2.4",
"link-preview-js": "^3.0.13",
"long": "^5.2.3",
"mediainfo.js": "^0.3.4",

View File

@@ -0,0 +1,231 @@
/*
Warnings:
- You are about to alter the column `createdAt` on the `Chat` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Chat` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Chatwoot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Chatwoot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Contact` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Contact` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Dify` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Dify` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `DifySetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `DifySetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Evoai` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Evoai` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `EvoaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `EvoaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `EvolutionBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `EvolutionBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `EvolutionBotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `EvolutionBotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Flowise` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Flowise` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `FlowiseSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `FlowiseSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `disconnectionAt` on the `Instance` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Instance` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Instance` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `IntegrationSession` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `IntegrationSession` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to drop the column `lid` on the `IsOnWhatsapp` table. All the data in the column will be lost.
- You are about to alter the column `createdAt` on the `IsOnWhatsapp` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `IsOnWhatsapp` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Label` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Label` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Media` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `N8n` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `N8n` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `N8nSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `N8nSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Nats` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Nats` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `OpenaiBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `OpenaiBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `OpenaiCreds` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `OpenaiCreds` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `OpenaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `OpenaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Proxy` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Proxy` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Pusher` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Pusher` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Rabbitmq` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Rabbitmq` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Session` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Setting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Setting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Sqs` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Sqs` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Template` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Template` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to drop the column `splitMessages` on the `Typebot` table. All the data in the column will be lost.
- You are about to drop the column `timePerChar` on the `Typebot` table. All the data in the column will be lost.
- You are about to alter the column `createdAt` on the `Typebot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Typebot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to drop the column `splitMessages` on the `TypebotSetting` table. All the data in the column will be lost.
- You are about to drop the column `timePerChar` on the `TypebotSetting` table. All the data in the column will be lost.
- You are about to alter the column `createdAt` on the `TypebotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `TypebotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Webhook` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Webhook` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Websocket` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Websocket` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
*/
-- DropIndex
DROP INDEX `unique_remote_instance` ON `Chat`;
-- AlterTable
ALTER TABLE `Chat` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `Chatwoot` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Contact` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `Dify` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `DifySetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Evoai` MODIFY `triggerType` ENUM('all', 'keyword', 'none', 'advanced') NULL,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `EvoaiSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `EvolutionBot` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `EvolutionBotSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Flowise` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `FlowiseSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Instance` MODIFY `disconnectionAt` TIMESTAMP NULL,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `IntegrationSession` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `IsOnWhatsapp` DROP COLUMN `lid`,
MODIFY `createdAt` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Label` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Media` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP;
-- AlterTable
ALTER TABLE `N8n` MODIFY `triggerType` ENUM('all', 'keyword', 'none', 'advanced') NULL,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `N8nSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Nats` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `OpenaiBot` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `OpenaiCreds` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `OpenaiSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Proxy` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Pusher` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Rabbitmq` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Session` MODIFY `createdAt` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP;
-- AlterTable
ALTER TABLE `Setting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Sqs` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Template` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Typebot` DROP COLUMN `splitMessages`,
DROP COLUMN `timePerChar`,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `TypebotSetting` DROP COLUMN `splitMessages`,
DROP COLUMN `timePerChar`,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Webhook` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Websocket` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- CreateTable
CREATE TABLE `Kafka` (
`id` VARCHAR(191) NOT NULL,
`enabled` BOOLEAN NOT NULL DEFAULT false,
`events` JSON NOT NULL,
`createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
`updatedAt` TIMESTAMP NOT NULL,
`instanceId` VARCHAR(191) NOT NULL,
UNIQUE INDEX `Kafka_instanceId_key`(`instanceId`),
PRIMARY KEY (`id`)
) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
-- AddForeignKey
ALTER TABLE `Kafka` ADD CONSTRAINT `Kafka_instanceId_fkey` FOREIGN KEY (`instanceId`) REFERENCES `Instance`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -88,6 +88,7 @@ model Instance {
Rabbitmq Rabbitmq?
Nats Nats?
Sqs Sqs?
Kafka Kafka?
Websocket Websocket?
Typebot Typebot[]
Session Session?
@@ -105,8 +106,11 @@ model Instance {
EvolutionBotSetting EvolutionBotSetting?
Flowise Flowise[]
FlowiseSetting FlowiseSetting?
Pusher Pusher?
N8n N8n[]
N8nSetting N8nSetting?
Evoai Evoai[]
EvoaiSetting EvoaiSetting?
Pusher Pusher?
}
model Session {
@@ -309,6 +313,16 @@ model Sqs {
instanceId String @unique
}
model Kafka {
id String @id @default(cuid())
enabled Boolean @default(false)
events Json @db.Json
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String @unique
}
model Websocket {
id String @id @default(cuid())
enabled Boolean @default(false)
@@ -647,7 +661,7 @@ model IsOnWhatsapp {
model N8n {
id String @id @default(cuid())
enabled Boolean @default(true) @db.TinyInt(1)
enabled Boolean @default(true) @db.TinyInt()
description String? @db.VarChar(255)
webhookUrl String? @db.VarChar(255)
basicAuthUser String? @db.VarChar(255)
@@ -666,7 +680,7 @@ model N8n {
triggerType TriggerType?
triggerOperator TriggerOperator?
triggerValue String?
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String
@@ -686,7 +700,7 @@ model N8nSetting {
ignoreJids Json?
splitMessages Boolean? @default(false)
timePerChar Int? @default(50) @db.Int
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Fallback N8n? @relation(fields: [n8nIdFallback], references: [id])
n8nIdFallback String? @db.VarChar(100)
@@ -696,7 +710,7 @@ model N8nSetting {
model Evoai {
id String @id @default(cuid())
enabled Boolean @default(true) @db.TinyInt(1)
enabled Boolean @default(true) @db.TinyInt()
description String? @db.VarChar(255)
agentUrl String? @db.VarChar(255)
apiKey String? @db.VarChar(255)
@@ -714,7 +728,7 @@ model Evoai {
triggerType TriggerType?
triggerOperator TriggerOperator?
triggerValue String?
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String
@@ -734,7 +748,7 @@ model EvoaiSetting {
ignoreJids Json?
splitMessages Boolean? @default(false)
timePerChar Int? @default(50) @db.Int
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Fallback Evoai? @relation(fields: [evoaiIdFallback], references: [id])
evoaiIdFallback String? @db.VarChar(100)

View File

@@ -0,0 +1,17 @@
-- CreateTable
CREATE TABLE "Kafka" (
"id" TEXT NOT NULL,
"enabled" BOOLEAN NOT NULL DEFAULT false,
"events" JSONB NOT NULL,
"createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP NOT NULL,
"instanceId" TEXT NOT NULL,
CONSTRAINT "Kafka_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "Kafka_instanceId_key" ON "Kafka"("instanceId");
-- AddForeignKey
ALTER TABLE "Kafka" ADD CONSTRAINT "Kafka_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "Instance"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -88,6 +88,7 @@ model Instance {
Rabbitmq Rabbitmq?
Nats Nats?
Sqs Sqs?
Kafka Kafka?
Websocket Websocket?
Typebot Typebot[]
Session Session?
@@ -312,6 +313,16 @@ model Sqs {
instanceId String @unique
}
model Kafka {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean
events Json @db.JsonB
createdAt DateTime? @default(now()) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String @unique
}
model Websocket {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean

View File

@@ -89,6 +89,7 @@ model Instance {
Rabbitmq Rabbitmq?
Nats Nats?
Sqs Sqs?
Kafka Kafka?
Websocket Websocket?
Typebot Typebot[]
Session Session?
@@ -313,6 +314,16 @@ model Sqs {
instanceId String @unique
}
model Kafka {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean
events Json @db.JsonB
createdAt DateTime? @default(now()) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String @unique
}
model Websocket {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean

View File

@@ -16,6 +16,7 @@ import { Events, wa } from '@api/types/wa.types';
import { AudioConverter, Chatwoot, ConfigService, Openai, S3 } from '@config/env.config';
import { BadRequestException, InternalServerErrorException } from '@exceptions';
import { createJid } from '@utils/createJid';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { isBase64, isURL } from 'class-validator';
import EventEmitter2 from 'eventemitter2';
@@ -171,6 +172,8 @@ export class EvolutionStartupService extends ChannelStartupService {
this.logger.log(messageRaw);
sendTelemetry(`received.message.${messageRaw.messageType ?? 'unknown'}`);
this.sendDataWebhook(Events.MESSAGES_UPSERT, messageRaw);
await chatbotController.emit({
@@ -323,8 +326,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'imageMessage',
@@ -337,8 +340,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'videoMessage',
@@ -351,8 +354,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'audioMessage',
@@ -372,8 +375,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'documentMessage',
@@ -449,7 +452,7 @@ export class EvolutionStartupService extends ChannelStartupService {
}
}
const base64 = messageRaw.message.base64;
const { base64 } = messageRaw.message;
delete messageRaw.message.base64;
if (base64 || file || audioFile) {

View File

@@ -24,6 +24,7 @@ import { AudioConverter, Chatwoot, ConfigService, Database, Openai, S3, WaBusine
import { BadRequestException, InternalServerErrorException } from '@exceptions';
import { createJid } from '@utils/createJid';
import { status } from '@utils/renderStatus';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { arrayUnique, isURL } from 'class-validator';
import EventEmitter2 from 'eventemitter2';
@@ -655,6 +656,8 @@ export class BusinessStartupService extends ChannelStartupService {
this.logger.log(messageRaw);
sendTelemetry(`received.message.${messageRaw.messageType ?? 'unknown'}`);
this.sendDataWebhook(Events.MESSAGES_UPSERT, messageRaw);
await chatbotController.emit({

View File

@@ -71,7 +71,7 @@ export const useVoiceCallsBaileys = async (
socket.on('assertSessions', async (jids, force, callback) => {
try {
const response = await baileys_sock.assertSessions(jids, force);
const response = await baileys_sock.assertSessions(jids);
callback(response);

View File

@@ -85,6 +85,7 @@ import { fetchLatestWaWebVersion } from '@utils/fetchLatestWaWebVersion';
import { makeProxyAgent } from '@utils/makeProxyAgent';
import { getOnWhatsappCache, saveOnWhatsappCache } from '@utils/onWhatsappCache';
import { status } from '@utils/renderStatus';
import { sendTelemetry } from '@utils/sendTelemetry';
import useMultiFileAuthStatePrisma from '@utils/use-multi-file-auth-state-prisma';
import { AuthStateProvider } from '@utils/use-multi-file-auth-state-provider-files';
import { useMultiFileAuthStateRedisDb } from '@utils/use-multi-file-auth-state-redis-db';
@@ -152,13 +153,7 @@ import { v4 } from 'uuid';
import { BaileysMessageProcessor } from './baileysMessage.processor';
import { useVoiceCallsBaileys } from './voiceCalls/useVoiceCallsBaileys';
export interface ExtendedMessageKey extends WAMessageKey {
senderPn?: string;
previousRemoteJid?: string | null;
}
export interface ExtendedIMessageKey extends proto.IMessageKey {
senderPn?: string;
remoteJidAlt?: string;
participantAlt?: string;
server_id?: string;
@@ -254,6 +249,10 @@ export class BaileysStartupService extends ChannelStartupService {
private endSession = false;
private logBaileys = this.configService.get<Log>('LOG').BAILEYS;
// Cache TTL constants (in seconds)
private readonly MESSAGE_CACHE_TTL_SECONDS = 5 * 60; // 5 minutes - avoid duplicate message processing
private readonly UPDATE_CACHE_TTL_SECONDS = 30 * 60; // 30 minutes - avoid duplicate status updates
public stateConnection: wa.StateConnection = { state: 'close' };
public phoneNumber: string;
@@ -445,7 +444,7 @@ export class BaileysStartupService extends ChannelStartupService {
try {
const profilePic = await this.profilePicture(this.instance.wuid);
this.instance.profilePictureUrl = profilePic.profilePictureUrl;
} catch (error) {
} catch {
this.instance.profilePictureUrl = null;
}
const formattedWuid = this.instance.wuid.split('@')[0].padEnd(30, ' ');
@@ -500,8 +499,8 @@ export class BaileysStartupService extends ChannelStartupService {
try {
// Use raw SQL to avoid JSON path issues
const webMessageInfo = (await this.prismaRepository.$queryRaw`
SELECT * FROM "Message"
WHERE "instanceId" = ${this.instanceId}
SELECT * FROM "Message"
WHERE "instanceId" = ${this.instanceId}
AND "key"->>'id' = ${key.id}
`) as proto.IWebMessageInfo[];
@@ -524,7 +523,7 @@ export class BaileysStartupService extends ChannelStartupService {
}
return webMessageInfo[0].message;
} catch (error) {
} catch {
return { conversation: '' };
}
}
@@ -597,7 +596,7 @@ export class BaileysStartupService extends ChannelStartupService {
const rand = Math.floor(Math.random() * Math.floor(proxyUrls.length));
const proxyUrl = 'http://' + proxyUrls[rand];
options = { agent: makeProxyAgent(proxyUrl), fetchAgent: makeProxyAgent(proxyUrl) };
} catch (error) {
} catch {
this.localProxy.enabled = false;
}
} else {
@@ -1000,10 +999,6 @@ export class BaileysStartupService extends ChannelStartupService {
continue;
}
if (m.key.remoteJid?.includes('@lid') && (m.key as ExtendedIMessageKey).senderPn) {
m.key.remoteJid = (m.key as ExtendedIMessageKey).senderPn;
}
if (Long.isLong(m?.messageTimestamp)) {
m.messageTimestamp = m.messageTimestamp?.toNumber();
}
@@ -1066,10 +1061,6 @@ export class BaileysStartupService extends ChannelStartupService {
) => {
try {
for (const received of messages) {
if (received.key.remoteJid?.includes('@lid') && (received.key as ExtendedMessageKey).senderPn) {
(received.key as ExtendedMessageKey).previousRemoteJid = received.key.remoteJid;
received.key.remoteJid = (received.key as ExtendedMessageKey).senderPn;
}
if (
received?.messageStubParameters?.some?.((param) =>
[
@@ -1117,9 +1108,9 @@ export class BaileysStartupService extends ChannelStartupService {
await this.sendDataWebhook(Events.MESSAGES_EDITED, editedMessage);
const oldMessage = await this.getMessage(editedMessage.key, true);
if ((oldMessage as any)?.id) {
const editedMessageTimestamp = Long.isLong(editedMessage?.timestampMs)
? Math.floor(editedMessage.timestampMs.toNumber() / 1000)
: Math.floor((editedMessage.timestampMs as number) / 1000);
const editedMessageTimestamp = Long.isLong(received?.messageTimestamp)
? Math.floor(received?.messageTimestamp.toNumber())
: Math.floor(received?.messageTimestamp as number);
await this.prismaRepository.message.update({
where: { id: (oldMessage as any).id },
@@ -1150,7 +1141,7 @@ export class BaileysStartupService extends ChannelStartupService {
continue;
}
await this.baileysCache.set(messageKey, true, 5 * 60);
await this.baileysCache.set(messageKey, true, this.MESSAGE_CACHE_TTL_SECONDS);
if (
(type !== 'notify' && type !== 'append') ||
@@ -1189,7 +1180,7 @@ export class BaileysStartupService extends ChannelStartupService {
where: { id: existingChat.id },
data: { name: received.pushName },
});
} catch (error) {
} catch {
console.log(`Chat insert record ignored: ${received.key.remoteJid} - ${this.instanceId}`);
}
}
@@ -1270,7 +1261,7 @@ export class BaileysStartupService extends ChannelStartupService {
await this.updateMessagesReadedByTimestamp(remoteJid, timestamp);
}
await this.baileysCache.set(messageKey, true, 5 * 60);
await this.baileysCache.set(messageKey, true, this.MESSAGE_CACHE_TTL_SECONDS);
} else {
this.logger.info(`Update readed messages duplicated ignored [avoid deadlock]: ${messageKey}`);
}
@@ -1358,12 +1349,10 @@ export class BaileysStartupService extends ChannelStartupService {
}
}
if (messageRaw.key.remoteJid?.includes('@lid') && messageRaw.key.remoteJidAlt) {
messageRaw.key.remoteJid = messageRaw.key.remoteJidAlt;
}
this.logger.log(messageRaw);
sendTelemetry(`received.message.${messageRaw.messageType ?? 'unknown'}`);
this.sendDataWebhook(Events.MESSAGES_UPSERT, messageRaw);
await chatbotController.emit({
@@ -1437,9 +1426,7 @@ export class BaileysStartupService extends ChannelStartupService {
continue;
}
if (key.remoteJid?.includes('@lid') && key.remoteJidAlt) {
key.remoteJid = key.remoteJidAlt;
}
if (update.message !== null && update.status === undefined) continue;
const updateKey = `${this.instance.id}_${key.id}_${update.status}`;
@@ -1480,7 +1467,7 @@ export class BaileysStartupService extends ChannelStartupService {
keyId: key.id,
remoteJid: key?.remoteJid,
fromMe: key.fromMe,
participant: key?.remoteJid,
participant: key?.participant,
status: status[update.status] ?? 'DELETED',
pollUpdates,
instanceId: this.instanceId,
@@ -1491,14 +1478,18 @@ export class BaileysStartupService extends ChannelStartupService {
if (configDatabaseData.HISTORIC || configDatabaseData.NEW_MESSAGE) {
// Use raw SQL to avoid JSON path issues
const messages = (await this.prismaRepository.$queryRaw`
SELECT * FROM "Message"
WHERE "instanceId" = ${this.instanceId}
SELECT * FROM "Message"
WHERE "instanceId" = ${this.instanceId}
AND "key"->>'id' = ${key.id}
LIMIT 1
`) as any[];
findMessage = messages[0] || null;
if (findMessage) message.messageId = findMessage.id;
if (!findMessage?.id) {
this.logger.warn(`Original message not found for update. Skipping. Key: ${JSON.stringify(key)}`);
continue;
}
message.messageId = findMessage.id;
}
if (update.message === null && update.status === undefined) {
@@ -1533,7 +1524,7 @@ export class BaileysStartupService extends ChannelStartupService {
if (status[update.status] === status[4]) {
this.logger.log(`Update as read in message.update ${remoteJid} - ${timestamp}`);
await this.updateMessagesReadedByTimestamp(remoteJid, timestamp);
await this.baileysCache.set(messageKey, true, 5 * 60);
await this.baileysCache.set(messageKey, true, this.MESSAGE_CACHE_TTL_SECONDS);
}
await this.prismaRepository.message.update({
@@ -1564,7 +1555,7 @@ export class BaileysStartupService extends ChannelStartupService {
if (this.configService.get<Database>('DATABASE').SAVE_DATA.CHATS) {
try {
await this.prismaRepository.chat.update({ where: { id: existingChat.id }, data: chatToInsert });
} catch (error) {
} catch {
console.log(`Chat insert record ignored: ${chatToInsert.remoteJid} - ${chatToInsert.instanceId}`);
}
}
@@ -1591,12 +1582,66 @@ export class BaileysStartupService extends ChannelStartupService {
});
},
'group-participants.update': (participantsUpdate: {
'group-participants.update': async (participantsUpdate: {
id: string;
participants: string[];
action: ParticipantAction;
}) => {
this.sendDataWebhook(Events.GROUP_PARTICIPANTS_UPDATE, participantsUpdate);
// ENHANCEMENT: Adds participantsData field while maintaining backward compatibility
// MAINTAINS: participants: string[] (original JID strings)
// ADDS: participantsData: { jid: string, phoneNumber: string, name?: string, imgUrl?: string }[]
// This enables LID to phoneNumber conversion without breaking existing webhook consumers
// Helper to normalize participantId as phone number
const normalizePhoneNumber = (id: string): string => {
// Remove @lid, @s.whatsapp.net suffixes and extract just the number part
return id.split('@')[0];
};
try {
// Usa o mesmo método que o endpoint /group/participants
const groupParticipants = await this.findParticipants({ groupJid: participantsUpdate.id });
// Validação para garantir que temos dados válidos
if (!groupParticipants?.participants || !Array.isArray(groupParticipants.participants)) {
throw new Error('Invalid participant data received from findParticipants');
}
// Filtra apenas os participantes que estão no evento
const resolvedParticipants = participantsUpdate.participants.map((participantId) => {
const participantData = groupParticipants.participants.find((p) => p.id === participantId);
let phoneNumber: string;
if (participantData?.phoneNumber) {
phoneNumber = participantData.phoneNumber;
} else {
phoneNumber = normalizePhoneNumber(participantId);
}
return {
jid: participantId,
phoneNumber,
name: participantData?.name,
imgUrl: participantData?.imgUrl,
};
});
// Mantém formato original + adiciona dados resolvidos
const enhancedParticipantsUpdate = {
...participantsUpdate,
participants: participantsUpdate.participants, // Mantém array original de strings
// Adiciona dados resolvidos em campo separado
participantsData: resolvedParticipants,
};
this.sendDataWebhook(Events.GROUP_PARTICIPANTS_UPDATE, enhancedParticipantsUpdate);
} catch (error) {
this.logger.error(
`Failed to resolve participant data for GROUP_PARTICIPANTS_UPDATE webhook: ${error.message} | Group: ${participantsUpdate.id} | Participants: ${participantsUpdate.participants.length}`,
);
// Fallback - envia sem conversão
this.sendDataWebhook(Events.GROUP_PARTICIPANTS_UPDATE, participantsUpdate);
}
this.updateGroupMetadataCache(participantsUpdate.id);
},
@@ -1678,6 +1723,9 @@ export class BaileysStartupService extends ChannelStartupService {
}
if (settings?.msgCall?.trim().length > 0 && call.status == 'offer') {
if (call.from.endsWith('@lid')) {
call.from = await this.client.signalRepository.lidMapping.getPNForLID(call.from as string);
}
const msg = await this.client.sendMessage(call.from, { text: settings.msgCall });
this.client.ev.emit('messages.upsert', { messages: [msg], type: 'notify' });
@@ -1832,7 +1880,7 @@ export class BaileysStartupService extends ChannelStartupService {
const profilePictureUrl = await this.client.profilePictureUrl(jid, 'image');
return { wuid: jid, profilePictureUrl };
} catch (error) {
} catch {
return { wuid: jid, profilePictureUrl: null };
}
}
@@ -1842,7 +1890,7 @@ export class BaileysStartupService extends ChannelStartupService {
try {
return { wuid: jid, status: (await this.client.fetchStatus(jid))[0]?.status };
} catch (error) {
} catch {
return { wuid: jid, status: null };
}
}
@@ -1891,7 +1939,7 @@ export class BaileysStartupService extends ChannelStartupService {
website: business?.website?.shift(),
};
}
} catch (error) {
} catch {
return { wuid: jid, name: null, picture: null, status: null, os: null, isBusiness: false };
}
}
@@ -2131,7 +2179,7 @@ export class BaileysStartupService extends ChannelStartupService {
if (!cache.REDIS.ENABLED && !cache.LOCAL.ENABLED) group = await this.findGroup({ groupJid: sender }, 'inner');
else group = await this.getGroupMetadataCache(sender);
// group = await this.findGroup({ groupJid: sender }, 'inner');
} catch (error) {
} catch {
throw new NotFoundException('Group not found');
}
@@ -3367,18 +3415,13 @@ export class BaileysStartupService extends ChannelStartupService {
}
const numberJid = numberVerified?.jid || user.jid;
const lid =
typeof numberVerified?.lid === 'string'
? numberVerified.lid
: numberJid.includes('@lid')
? numberJid.split('@')[1]
: undefined;
return new OnWhatsAppDto(
numberJid,
!!numberVerified?.exists,
user.number,
contacts.find((c) => c.remoteJid === numberJid)?.pushName,
lid,
undefined,
);
}),
);
@@ -3530,7 +3573,7 @@ export class BaileysStartupService extends ChannelStartupService {
keyId: messageId,
remoteJid: response.key.remoteJid,
fromMe: response.key.fromMe,
participant: response.key?.remoteJid,
participant: response.key?.participant,
status: 'DELETED',
instanceId: this.instanceId,
};
@@ -3590,7 +3633,10 @@ export class BaileysStartupService extends ChannelStartupService {
}
}
if ('messageContextInfo' in msg.message && Object.keys(msg.message).length === 1) {
if (
Object.keys(msg.message).length === 1 &&
Object.prototype.hasOwnProperty.call(msg.message, 'messageContextInfo')
) {
throw 'The message is messageContextInfo';
}
@@ -3640,7 +3686,7 @@ export class BaileysStartupService extends ChannelStartupService {
{},
{ logger: P({ level: 'error' }) as any, reuploadRequest: this.client.updateMediaMessage },
);
} catch (err) {
} catch {
this.logger.error('Download Media failed, trying to retry in 5 seconds...');
await new Promise((resolve) => setTimeout(resolve, 5000));
const mediaType = Object.keys(msg.message).find((key) => key.endsWith('Message'));
@@ -3965,7 +4011,7 @@ export class BaileysStartupService extends ChannelStartupService {
keyId: messageId,
remoteJid: messageSent.key.remoteJid,
fromMe: messageSent.key.fromMe,
participant: messageSent.key?.remoteJid,
participant: messageSent.key?.participant,
status: 'EDITED',
instanceId: this.instanceId,
};
@@ -4230,7 +4276,7 @@ export class BaileysStartupService extends ChannelStartupService {
public async inviteInfo(id: GroupInvite) {
try {
return await this.client.groupGetInviteInfo(id.inviteCode);
} catch (error) {
} catch {
throw new NotFoundException('No invite info', id.inviteCode);
}
}
@@ -4253,7 +4299,7 @@ export class BaileysStartupService extends ChannelStartupService {
}
return { send: true, inviteUrl };
} catch (error) {
} catch {
throw new NotFoundException('No send invite');
}
}
@@ -4461,7 +4507,7 @@ export class BaileysStartupService extends ChannelStartupService {
// Use raw SQL to avoid JSON path issues
const result = await this.prismaRepository.$executeRaw`
UPDATE "Message"
UPDATE "Message"
SET "status" = ${status[4]}
WHERE "instanceId" = ${this.instanceId}
AND "key"->>'remoteJid' = ${remoteJid}
@@ -4486,7 +4532,7 @@ export class BaileysStartupService extends ChannelStartupService {
this.prismaRepository.chat.findFirst({ where: { remoteJid } }),
// Use raw SQL to avoid JSON path issues
this.prismaRepository.$queryRaw`
SELECT COUNT(*)::int as count FROM "Message"
SELECT COUNT(*)::int as count FROM "Message"
WHERE "instanceId" = ${this.instanceId}
AND "key"->>'remoteJid' = ${remoteJid}
AND ("key"->>'fromMe')::boolean = false
@@ -4561,8 +4607,8 @@ export class BaileysStartupService extends ChannelStartupService {
return response;
}
public async baileysAssertSessions(jids: string[], force: boolean) {
const response = await this.client.assertSessions(jids, force);
public async baileysAssertSessions(jids: string[]) {
const response = await this.client.assertSessions(jids);
return response;
}
@@ -4717,7 +4763,7 @@ export class BaileysStartupService extends ChannelStartupService {
collectionsLength: collections?.length,
collections: collections,
};
} catch (error) {
} catch {
return { wuid: jid, name: null, isBusiness: false };
}
}
@@ -4766,7 +4812,7 @@ export class BaileysStartupService extends ChannelStartupService {
{
OR: [
keyFilters?.remoteJid ? { key: { path: ['remoteJid'], equals: keyFilters?.remoteJid } } : {},
keyFilters?.senderPn ? { key: { path: ['senderPn'], equals: keyFilters?.senderPn } } : {},
keyFilters?.remoteJidAlt ? { key: { path: ['remoteJidAlt'], equals: keyFilters?.remoteJidAlt } } : {},
],
},
],
@@ -4796,7 +4842,7 @@ export class BaileysStartupService extends ChannelStartupService {
{
OR: [
keyFilters?.remoteJid ? { key: { path: ['remoteJid'], equals: keyFilters?.remoteJid } } : {},
keyFilters?.senderPn ? { key: { path: ['senderPn'], equals: keyFilters?.senderPn } } : {},
keyFilters?.remoteJidAlt ? { key: { path: ['remoteJidAlt'], equals: keyFilters?.remoteJidAlt } } : {},
],
},
],

View File

@@ -49,7 +49,7 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
try {
JSON.parse(str);
return true;
} catch (e) {
} catch {
return false;
}
}
@@ -180,6 +180,7 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
remoteJid: string,
message: string,
settings: SettingsType,
linkPreview: boolean = true,
): Promise<void> {
if (!message) return;
@@ -202,7 +203,7 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
if (mediaType) {
// Send accumulated text before sending media
if (textBuffer.trim()) {
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages);
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages, linkPreview);
textBuffer = '';
}
@@ -252,7 +253,56 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
// Send any remaining text
if (textBuffer.trim()) {
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages);
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages, linkPreview);
}
}
/**
* Split message by double line breaks and return array of message parts
*/
private splitMessageByDoubleLineBreaks(message: string): string[] {
return message.split('\n\n').filter((part) => part.trim().length > 0);
}
/**
* Send a single message with proper typing indicators and delays
*/
private async sendSingleMessage(
instance: any,
remoteJid: string,
message: string,
settings: any,
linkPreview: boolean = true,
): Promise<void> {
const timePerChar = settings?.timePerChar ?? 0;
const minDelay = 1000;
const maxDelay = 20000;
const delay = Math.min(Math.max(message.length * timePerChar, minDelay), maxDelay);
this.logger.debug(`[BaseChatbot] Sending single message with linkPreview: ${linkPreview}`);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.presenceSubscribe(remoteJid);
await instance.client.sendPresenceUpdate('composing', remoteJid);
}
await new Promise<void>((resolve) => {
setTimeout(async () => {
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: message,
linkPreview,
},
false,
);
resolve();
}, delay);
});
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
}
@@ -265,67 +315,24 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
text: string,
settings: any,
splitMessages: boolean,
linkPreview: boolean = true,
): Promise<void> {
const timePerChar = settings?.timePerChar ?? 0;
const minDelay = 1000;
const maxDelay = 20000;
if (splitMessages) {
const multipleMessages = text.split('\n\n');
for (let index = 0; index < multipleMessages.length; index++) {
const message = multipleMessages[index];
if (!message.trim()) continue;
const messageParts = this.splitMessageByDoubleLineBreaks(text);
const delay = Math.min(Math.max(message.length * timePerChar, minDelay), maxDelay);
this.logger.debug(`[BaseChatbot] Splitting message into ${messageParts.length} parts`);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.presenceSubscribe(remoteJid);
await instance.client.sendPresenceUpdate('composing', remoteJid);
}
for (let index = 0; index < messageParts.length; index++) {
const message = messageParts[index];
await new Promise<void>((resolve) => {
setTimeout(async () => {
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: message,
},
false,
);
resolve();
}, delay);
});
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
this.logger.debug(`[BaseChatbot] Sending message part ${index + 1}/${messageParts.length}`);
await this.sendSingleMessage(instance, remoteJid, message, settings, linkPreview);
}
this.logger.debug(`[BaseChatbot] All message parts sent successfully`);
} else {
const delay = Math.min(Math.max(text.length * timePerChar, minDelay), maxDelay);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.presenceSubscribe(remoteJid);
await instance.client.sendPresenceUpdate('composing', remoteJid);
}
await new Promise<void>((resolve) => {
setTimeout(async () => {
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: text,
},
false,
);
resolve();
}, delay);
});
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
this.logger.debug(`[BaseChatbot] Sending single message`);
await this.sendSingleMessage(instance, remoteJid, text, settings, linkPreview);
}
}

View File

@@ -91,19 +91,19 @@ export class ChatbotController {
pushName,
isIntegration,
};
await evolutionBotController.emit(emitData);
evolutionBotController.emit(emitData);
await typebotController.emit(emitData);
typebotController.emit(emitData);
await openaiController.emit(emitData);
openaiController.emit(emitData);
await difyController.emit(emitData);
difyController.emit(emitData);
await n8nController.emit(emitData);
n8nController.emit(emitData);
await evoaiController.emit(emitData);
evoaiController.emit(emitData);
await flowiseController.emit(emitData);
flowiseController.emit(emitData);
}
public processDebounce(

View File

@@ -1,6 +1,5 @@
import { InstanceDto } from '@api/dto/instance.dto';
import { Options, Quoted, SendAudioDto, SendMediaDto, SendTextDto } from '@api/dto/sendMessage.dto';
import { ExtendedMessageKey } from '@api/integrations/channel/whatsapp/whatsapp.baileys.service';
import { ChatwootDto } from '@api/integrations/chatbot/chatwoot/dto/chatwoot.dto';
import { postgresClient } from '@api/integrations/chatbot/chatwoot/libs/postgres.client';
import { chatwootImport } from '@api/integrations/chatbot/chatwoot/utils/chatwoot-import-helper';
@@ -24,7 +23,7 @@ import { Chatwoot as ChatwootModel, Contact as ContactModel, Message as MessageM
import i18next from '@utils/i18n';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { proto } from 'baileys';
import { proto, WAMessageKey } from 'baileys';
import dayjs from 'dayjs';
import FormData from 'form-data';
import { Jimp, JimpMime } from 'jimp';
@@ -33,6 +32,8 @@ import mimeTypes from 'mime-types';
import path from 'path';
import { Readable } from 'stream';
const MIN_CONNECTION_NOTIFICATION_INTERVAL_MS = 30000; // 30 seconds
interface ChatwootMessage {
messageId?: number;
inboxId?: number;
@@ -44,6 +45,25 @@ interface ChatwootMessage {
export class ChatwootService {
private readonly logger = new Logger('ChatwootService');
// HTTP timeout constants
private readonly MEDIA_DOWNLOAD_TIMEOUT_MS = 60000; // 60 seconds for large files
// S3/MinIO retry configuration (external storage - longer delays, fewer retries)
private readonly S3_MAX_RETRIES = 3;
private readonly S3_BASE_DELAY_MS = 1000; // Base delay: 1 second
private readonly S3_MAX_DELAY_MS = 8000; // Max delay: 8 seconds
// Database polling retry configuration (internal DB - shorter delays, more retries)
private readonly DB_POLLING_MAX_RETRIES = 5;
private readonly DB_POLLING_BASE_DELAY_MS = 100; // Base delay: 100ms
private readonly DB_POLLING_MAX_DELAY_MS = 2000; // Max delay: 2 seconds
// Webhook processing delay
private readonly WEBHOOK_INITIAL_DELAY_MS = 500; // Initial delay before processing webhook
// Lock polling delay
private readonly LOCK_POLLING_DELAY_MS = 300; // Delay between lock status checks
private provider: any;
constructor(
@@ -130,7 +150,7 @@ export class ChatwootService {
public async find(instance: InstanceDto): Promise<ChatwootDto> {
try {
return await this.waMonitor.waInstances[instance.instanceName].findChatwoot();
} catch (error) {
} catch {
this.logger.error('chatwoot not found');
return { enabled: null, url: '' };
}
@@ -370,7 +390,7 @@ export class ChatwootService {
});
return contact;
} catch (error) {
} catch {
return null;
}
}
@@ -407,7 +427,7 @@ export class ChatwootService {
}
return true;
} catch (error) {
} catch {
return false;
}
}
@@ -568,27 +588,29 @@ export class ChatwootService {
}
public async createConversation(instance: InstanceDto, body: any) {
const isLid = body.key.previousRemoteJid?.includes('@lid') && body.key.senderPn;
const remoteJid = body.key.remoteJid;
const isLid = body.key.addressingMode === 'lid' && body.key.remoteJidAlt;
const remoteJid = isLid ? body.key.remoteJidAlt : body.key.remoteJid;
const cacheKey = `${instance.instanceName}:createConversation-${remoteJid}`;
const lockKey = `${instance.instanceName}:lock:createConversation-${remoteJid}`;
const maxWaitTime = 5000; // 5 secounds
const maxWaitTime = 5000; // 5 seconds
const client = await this.clientCw(instance);
if (!client) return null;
try {
// Processa atualização de contatos já criados @lid
if (isLid && body.key.senderPn !== body.key.previousRemoteJid) {
if (isLid && body.key.remoteJidAlt !== body.key.remoteJid) {
const contact = await this.findContact(instance, body.key.remoteJid.split('@')[0]);
if (contact && contact.identifier !== body.key.senderPn) {
if (contact && contact.identifier !== body.key.remoteJidAlt) {
this.logger.verbose(
`Identifier needs update: (contact.identifier: ${contact.identifier}, body.key.remoteJid: ${body.key.remoteJid}, body.key.senderPn: ${body.key.senderPn}`,
`Identifier needs update: (contact.identifier: ${contact.identifier}, body.key.remoteJid: ${body.key.remoteJid}, body.key.remoteJidAlt: ${body.key.remoteJidAlt}`,
);
const updateContact = await this.updateContact(instance, contact.id, {
identifier: body.key.senderPn,
phone_number: `+${body.key.senderPn.split('@')[0]}`,
identifier: body.key.remoteJidAlt,
phone_number: `+${body.key.remoteJidAlt.split('@')[0]}`,
});
if (updateContact === null) {
const baseContact = await this.findContact(instance, body.key.senderPn.split('@')[0]);
const baseContact = await this.findContact(instance, body.key.remoteJidAlt.split('@')[0]);
if (baseContact) {
await this.mergeContacts(baseContact.id, contact.id);
this.logger.verbose(
@@ -605,6 +627,22 @@ export class ChatwootService {
if (await this.cache.has(cacheKey)) {
const conversationId = (await this.cache.get(cacheKey)) as number;
this.logger.verbose(`Found conversation to: ${remoteJid}, conversation ID: ${conversationId}`);
let conversationExists: conversation | boolean;
try {
conversationExists = await client.conversations.get({
accountId: this.provider.accountId,
conversationId: conversationId,
});
this.logger.verbose(`Conversation exists: ${JSON.stringify(conversationExists)}`);
} catch (error) {
this.logger.error(`Error getting conversation: ${error}`);
conversationExists = false;
}
if (!conversationExists) {
this.logger.verbose('Conversation does not exist, re-calling createConversation');
this.cache.delete(cacheKey);
return await this.createConversation(instance, body);
}
return conversationId;
}
@@ -617,7 +655,7 @@ export class ChatwootService {
this.logger.warn(`Timeout aguardando lock para ${remoteJid}`);
break;
}
await new Promise((res) => setTimeout(res, 300));
await new Promise((res) => setTimeout(res, this.LOCK_POLLING_DELAY_MS));
if (await this.cache.has(cacheKey)) {
const conversationId = (await this.cache.get(cacheKey)) as number;
this.logger.verbose(`Resolves creation of: ${remoteJid}, conversation ID: ${conversationId}`);
@@ -639,11 +677,8 @@ export class ChatwootService {
return (await this.cache.get(cacheKey)) as number;
}
const client = await this.clientCw(instance);
if (!client) return null;
const isGroup = remoteJid.includes('@g.us');
const chatId = isGroup ? remoteJid : remoteJid.split('@')[0];
const chatId = isGroup ? remoteJid : remoteJid.split('@')[0].split(':')[0];
let nameContact = !body.key.fromMe ? body.pushName : chatId;
const filterInbox = await this.getInbox(instance);
if (!filterInbox) return null;
@@ -769,7 +804,7 @@ export class ChatwootService {
if (inboxConversation) {
this.logger.verbose(`Returning existing conversation ID: ${inboxConversation.id}`);
this.cache.set(cacheKey, inboxConversation.id);
this.cache.set(cacheKey, inboxConversation.id, 8 * 3600);
return inboxConversation.id;
}
}
@@ -802,7 +837,7 @@ export class ChatwootService {
}
this.logger.verbose(`New conversation created of ${remoteJid} with ID: ${conversation.id}`);
this.cache.set(cacheKey, conversation.id);
this.cache.set(cacheKey, conversation.id, 8 * 3600);
return conversation.id;
} finally {
await this.cache.delete(lockKey);
@@ -1123,20 +1158,140 @@ export class ChatwootService {
public async sendAttachment(waInstance: any, number: string, media: any, caption?: string, options?: Options) {
try {
const parsedMedia = path.parse(decodeURIComponent(media));
let mimeType = mimeTypes.lookup(parsedMedia?.ext) || '';
let fileName = parsedMedia?.name + parsedMedia?.ext;
// Sempre baixar o arquivo do MinIO/S3 antes de enviar
// URLs presigned podem expirar, então convertemos para base64
let mediaBuffer: Buffer;
let mimeType: string;
let fileName: string;
if (!mimeType) {
const parts = media.split('/');
fileName = decodeURIComponent(parts[parts.length - 1]);
try {
this.logger.verbose(`Downloading media from: ${media}`);
// Tentar fazer download do arquivo com autenticação do Chatwoot
// maxRedirects: 0 para não seguir redirects automaticamente
const response = await axios.get(media, {
responseType: 'arraybuffer',
timeout: this.MEDIA_DOWNLOAD_TIMEOUT_MS,
headers: {
api_access_token: this.provider.token,
},
maxRedirects: 0, // Não seguir redirects automaticamente
validateStatus: (status) => status < 500, // Aceitar redirects (301, 302, 307)
});
mimeType = response.headers['content-type'];
this.logger.verbose(`Initial response status: ${response.status}`);
// Se for redirect, pegar a URL de destino e fazer novo request
if (response.status >= 300 && response.status < 400) {
const redirectUrl = response.headers.location;
this.logger.verbose(`Redirect to: ${redirectUrl}`);
if (redirectUrl) {
// Fazer novo request para a URL do S3/MinIO (sem autenticação, pois é presigned URL)
// IMPORTANTE: Chatwoot pode gerar a URL presigned ANTES de fazer upload
// Vamos tentar com retry usando exponential backoff se receber 404 (arquivo ainda não disponível)
this.logger.verbose('Downloading from S3/MinIO...');
let s3Response;
let retryCount = 0;
const maxRetries = this.S3_MAX_RETRIES;
const baseDelay = this.S3_BASE_DELAY_MS;
const maxDelay = this.S3_MAX_DELAY_MS;
while (retryCount <= maxRetries) {
s3Response = await axios.get(redirectUrl, {
responseType: 'arraybuffer',
timeout: this.MEDIA_DOWNLOAD_TIMEOUT_MS,
validateStatus: (status) => status < 500,
});
this.logger.verbose(
`S3 response status: ${s3Response.status}, size: ${s3Response.data?.byteLength || 0} bytes (attempt ${retryCount + 1}/${maxRetries + 1})`,
);
// Se não for 404, sair do loop
if (s3Response.status !== 404) {
break;
}
// Se for 404 e ainda tem tentativas, aguardar com exponential backoff e tentar novamente
if (retryCount < maxRetries) {
// Exponential backoff com max delay (seguindo padrão do webhook controller)
const backoffDelay = Math.min(baseDelay * Math.pow(2, retryCount), maxDelay);
const errorBody = s3Response.data?.toString ? s3Response.data.toString('utf-8') : s3Response.data;
this.logger.warn(
`File not yet available in S3/MinIO (attempt ${retryCount + 1}/${maxRetries + 1}). Retrying in ${backoffDelay}ms with exponential backoff...`,
);
this.logger.verbose(`MinIO Response: ${errorBody}`);
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
retryCount++;
} else {
// Última tentativa falhou
break;
}
}
// Após todas as tentativas, verificar o status final
if (s3Response.status === 404) {
const errorBody = s3Response.data?.toString ? s3Response.data.toString('utf-8') : s3Response.data;
this.logger.error(`File not found in S3/MinIO after ${maxRetries + 1} attempts. URL: ${redirectUrl}`);
this.logger.error(`MinIO Error Response: ${errorBody}`);
throw new Error(
'File not found in S3/MinIO (404). The file may have been deleted, the URL is incorrect, or Chatwoot has not finished uploading yet.',
);
}
if (s3Response.status === 403) {
this.logger.error(`Access denied to S3/MinIO. URL may have expired: ${redirectUrl}`);
throw new Error(
'Access denied to S3/MinIO (403). Presigned URL may have expired. Check S3_PRESIGNED_EXPIRATION setting.',
);
}
if (s3Response.status >= 400) {
this.logger.error(`S3/MinIO error ${s3Response.status}: ${s3Response.statusText}`);
throw new Error(`S3/MinIO error ${s3Response.status}: ${s3Response.statusText}`);
}
mediaBuffer = Buffer.from(s3Response.data);
mimeType = s3Response.headers['content-type'] || 'application/octet-stream';
this.logger.verbose(`Downloaded ${mediaBuffer.length} bytes from S3, type: ${mimeType}`);
} else {
this.logger.error('Redirect response without Location header');
throw new Error('Redirect without Location header');
}
} else if (response.status === 404) {
this.logger.error(`File not found (404) at: ${media}`);
throw new Error('File not found (404). The attachment may not exist in Chatwoot storage.');
} else if (response.status >= 400) {
this.logger.error(`HTTP ${response.status}: ${response.statusText} for URL: ${media}`);
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
} else {
// Download direto sem redirect
mediaBuffer = Buffer.from(response.data);
mimeType = response.headers['content-type'] || 'application/octet-stream';
this.logger.verbose(`Downloaded ${mediaBuffer.length} bytes directly, type: ${mimeType}`);
}
// Extrair nome do arquivo da URL ou usar o content-disposition
const parsedMedia = path.parse(decodeURIComponent(media));
if (parsedMedia?.name && parsedMedia?.ext) {
fileName = parsedMedia.name + parsedMedia.ext;
} else {
const parts = media.split('/');
fileName = decodeURIComponent(parts[parts.length - 1].split('?')[0]);
}
this.logger.verbose(`File name: ${fileName}, size: ${mediaBuffer.length} bytes`);
} catch (downloadError) {
this.logger.error('[MEDIA DOWNLOAD] ❌ Error downloading media from: ' + media);
this.logger.error(`[MEDIA DOWNLOAD] Error message: ${downloadError.message}`);
this.logger.error(`[MEDIA DOWNLOAD] Error stack: ${downloadError.stack}`);
this.logger.error(`[MEDIA DOWNLOAD] Full error: ${JSON.stringify(downloadError, null, 2)}`);
throw new Error(`Failed to download media: ${downloadError.message}`);
}
// Determinar o tipo de mídia pelo mimetype
let type = 'document';
switch (mimeType.split('/')[0]) {
@@ -1154,10 +1309,12 @@ export class ChatwootService {
break;
}
// Para áudio, usar base64 com data URI
if (type === 'audio') {
const base64Audio = `data:${mimeType};base64,${mediaBuffer.toString('base64')}`;
const data: SendAudioDto = {
number: number,
audio: media,
audio: base64Audio,
delay: 1200,
quoted: options?.quoted,
};
@@ -1169,8 +1326,12 @@ export class ChatwootService {
return messageSent;
}
const documentExtensions = ['.gif', '.svg', '.tiff', '.tif'];
if (type === 'image' && parsedMedia && documentExtensions.includes(parsedMedia?.ext)) {
// Para outros tipos, converter para base64 puro (sem prefixo data URI)
const base64Media = mediaBuffer.toString('base64');
const documentExtensions = ['.gif', '.svg', '.tiff', '.tif', '.dxf', '.dwg'];
const parsedExt = path.parse(fileName)?.ext;
if (type === 'image' && parsedExt && documentExtensions.includes(parsedExt)) {
type = 'document';
}
@@ -1178,7 +1339,7 @@ export class ChatwootService {
number: number,
mediatype: type as any,
fileName: fileName,
media: media,
media: base64Media, // Base64 puro, sem prefixo
delay: 1200,
quoted: options?.quoted,
};
@@ -1194,6 +1355,7 @@ export class ChatwootService {
return messageSent;
} catch (error) {
this.logger.error(error);
throw error; // Re-throw para que o erro seja tratado pelo caller
}
}
@@ -1233,9 +1395,87 @@ export class ChatwootService {
});
}
/**
* Processa deleção de mensagem em background
* Método assíncrono chamado via setImmediate para não bloquear resposta do webhook
*/
private async processDeletion(instance: InstanceDto, body: any, deleteLockKey: string) {
this.logger.warn(`[DELETE] 🗑️ Processing deletion - messageId: ${body.id}`);
const waInstance = this.waMonitor.waInstances[instance.instanceName];
// Buscar TODAS as mensagens com esse chatwootMessageId (pode ser múltiplos anexos)
const messages = await this.prismaRepository.message.findMany({
where: {
chatwootMessageId: body.id,
instanceId: instance.instanceId,
},
});
if (messages && messages.length > 0) {
this.logger.warn(`[DELETE] Found ${messages.length} message(s) to delete from Chatwoot message ${body.id}`);
this.logger.verbose(`[DELETE] Messages keys: ${messages.map((m) => (m.key as any)?.id).join(', ')}`);
// Deletar cada mensagem no WhatsApp
for (const message of messages) {
const key = message.key as WAMessageKey;
this.logger.warn(
`[DELETE] Attempting to delete WhatsApp message - keyId: ${key?.id}, remoteJid: ${key?.remoteJid}`,
);
try {
await waInstance?.client.sendMessage(key.remoteJid, { delete: key });
this.logger.warn(`[DELETE] ✅ Message ${key.id} deleted in WhatsApp successfully`);
} catch (error) {
this.logger.error(`[DELETE] ❌ Error deleting message ${key.id} in WhatsApp: ${error}`);
this.logger.error(`[DELETE] Error details: ${JSON.stringify(error, null, 2)}`);
}
}
// Remover todas as mensagens do banco de dados
await this.prismaRepository.message.deleteMany({
where: {
instanceId: instance.instanceId,
chatwootMessageId: body.id,
},
});
this.logger.warn(`[DELETE] ✅ SUCCESS: ${messages.length} message(s) deleted from WhatsApp and database`);
} else {
// Mensagem não encontrada - pode ser uma mensagem antiga que foi substituída por edição
this.logger.warn(`[DELETE] ⚠️ WARNING: Message not found in DB - chatwootMessageId: ${body.id}`);
}
// Liberar lock após processar
await this.cache.delete(deleteLockKey);
}
public async receiveWebhook(instance: InstanceDto, body: any) {
try {
await new Promise((resolve) => setTimeout(resolve, 500));
// IMPORTANTE: Verificar lock de deleção ANTES do delay inicial
// para evitar race condition com webhooks duplicados
let isDeletionEvent = false;
if (body.event === 'message_updated' && body.content_attributes?.deleted) {
isDeletionEvent = true;
const deleteLockKey = `${instance.instanceName}:deleteMessage-${body.id}`;
// Verificar se já está processando esta deleção
if (await this.cache.has(deleteLockKey)) {
this.logger.warn(`[DELETE] ⏭️ SKIPPING: Deletion already in progress for messageId: ${body.id}`);
return { message: 'already_processing' };
}
// Adquirir lock IMEDIATAMENTE por 30 segundos
await this.cache.set(deleteLockKey, true, 30);
this.logger.warn(
`[WEBHOOK-DELETE] Event: ${body.event}, messageId: ${body.id}, conversation: ${body.conversation?.id}`,
);
}
// Para deleções, processar IMEDIATAMENTE (sem delay)
// Para outros eventos, aguardar delay inicial
if (!isDeletionEvent) {
await new Promise((resolve) => setTimeout(resolve, this.WEBHOOK_INITIAL_DELAY_MS));
}
const client = await this.clientCw(instance);
@@ -1254,6 +1494,39 @@ export class ChatwootService {
this.cache.delete(keyToDelete);
}
// Log para debug de mensagens deletadas
if (body.event === 'message_updated') {
this.logger.verbose(
`Message updated event - deleted: ${body.content_attributes?.deleted}, messageId: ${body.id}`,
);
}
// Processar deleção de mensagem ANTES das outras validações
if (body.event === 'message_updated' && body.content_attributes?.deleted) {
// Lock já foi adquirido no início do método (antes do delay)
const deleteLockKey = `${instance.instanceName}:deleteMessage-${body.id}`;
// ESTRATÉGIA: Processar em background e responder IMEDIATAMENTE
// Isso evita timeout do Chatwoot (5s) quando há muitas imagens (> 5s de processamento)
this.logger.warn(`[DELETE] 🚀 Starting background deletion - messageId: ${body.id}`);
// Executar em background (sem await) - não bloqueia resposta do webhook
setImmediate(async () => {
try {
await this.processDeletion(instance, body, deleteLockKey);
} catch (error) {
this.logger.error(`[DELETE] ❌ Background deletion failed for messageId ${body.id}: ${error}`);
}
});
// RESPONDER IMEDIATAMENTE ao Chatwoot (< 50ms)
return {
message: 'deletion_accepted',
messageId: body.id,
note: 'Deletion is being processed in background',
};
}
if (
!body?.conversation ||
body.private ||
@@ -1285,7 +1558,7 @@ export class ChatwootService {
});
if (message) {
const key = message.key as ExtendedMessageKey;
const key = message.key as WAMessageKey;
await waInstance?.client.sendMessage(key.remoteJid, { delete: key });
@@ -1370,7 +1643,10 @@ export class ChatwootService {
}
if (body.message_type === 'outgoing' && body?.conversation?.messages?.length && chatId !== '123456') {
if (body?.conversation?.messages[0]?.source_id?.substring(0, 5) === 'WAID:') {
if (
body?.conversation?.messages[0]?.source_id?.substring(0, 5) === 'WAID:' &&
body?.conversation?.messages[0]?.id === body?.id
) {
return { message: 'bot' };
}
@@ -1394,40 +1670,58 @@ export class ChatwootService {
for (const message of body.conversation.messages) {
if (message.attachments && message.attachments.length > 0) {
for (const attachment of message.attachments) {
if (!messageReceived) {
formatText = null;
// Processa anexos de forma assíncrona para não bloquear o webhook
const processAttachments = async () => {
for (const attachment of message.attachments) {
if (!messageReceived) {
formatText = null;
}
const options: Options = {
quoted: await this.getQuotedMessage(body, instance),
};
try {
const messageSent = await this.sendAttachment(
waInstance,
chatId,
attachment.data_url,
formatText,
options,
);
if (!messageSent && body.conversation?.id) {
this.onSendMessageError(instance, body.conversation?.id);
}
if (messageSent) {
await this.updateChatwootMessageId(
{
...messageSent,
owner: instance.instanceName,
},
{
messageId: body.id,
inboxId: body.inbox?.id,
conversationId: body.conversation?.id,
contactInboxSourceId: body.conversation?.contact_inbox?.source_id,
},
instance,
);
}
} catch (error) {
this.logger.error(error);
if (body.conversation?.id) {
this.onSendMessageError(instance, body.conversation?.id, error);
}
}
}
};
const options: Options = {
quoted: await this.getQuotedMessage(body, instance),
};
const messageSent = await this.sendAttachment(
waInstance,
chatId,
attachment.data_url,
formatText,
options,
);
if (!messageSent && body.conversation?.id) {
this.onSendMessageError(instance, body.conversation?.id);
}
await this.updateChatwootMessageId(
{
...messageSent,
owner: instance.instanceName,
},
{
messageId: body.id,
inboxId: body.inbox?.id,
conversationId: body.conversation?.id,
contactInboxSourceId: body.conversation?.contact_inbox?.source_id,
},
instance,
);
}
// Executa em background sem bloquear
processAttachments().catch((error) => {
this.logger.error(error);
});
} else {
const data: SendTextDto = {
number: chatId,
@@ -1450,10 +1744,7 @@ export class ChatwootService {
}
await this.updateChatwootMessageId(
{
...messageSent,
instanceId: instance.instanceId,
},
messageSent, // Já tem instanceId
{
messageId: body.id,
inboxId: body.inbox?.id,
@@ -1483,7 +1774,7 @@ export class ChatwootService {
},
});
if (lastMessage && !lastMessage.chatwootIsRead) {
const key = lastMessage.key as ExtendedMessageKey;
const key = lastMessage.key as WAMessageKey;
waInstance?.markMessageAsRead({
readMessages: [
@@ -1541,14 +1832,63 @@ export class ChatwootService {
chatwootMessageIds: ChatwootMessage,
instance: InstanceDto,
) {
const key = message.key as ExtendedMessageKey;
const key = message.key as WAMessageKey;
if (!chatwootMessageIds.messageId || !key?.id) {
this.logger.verbose(
`Skipping updateChatwootMessageId - messageId: ${chatwootMessageIds.messageId}, keyId: ${key?.id}`,
);
return;
}
// Use instanceId from message or fallback to instance
const instanceId = message.instanceId || instance.instanceId;
this.logger.verbose(
`Updating message with chatwootMessageId: ${chatwootMessageIds.messageId}, keyId: ${key.id}, instanceId: ${instanceId}`,
);
// Verifica se a mensagem existe antes de atualizar usando polling com exponential backoff
let retries = 0;
const maxRetries = this.DB_POLLING_MAX_RETRIES;
const baseDelay = this.DB_POLLING_BASE_DELAY_MS;
const maxDelay = this.DB_POLLING_MAX_DELAY_MS;
let messageExists = false;
while (retries < maxRetries && !messageExists) {
const existingMessage = await this.prismaRepository.message.findFirst({
where: {
instanceId: instanceId,
key: {
path: ['id'],
equals: key.id,
},
},
});
if (existingMessage) {
messageExists = true;
this.logger.verbose(`Message found in database after ${retries} retries`);
} else {
retries++;
if (retries < maxRetries) {
// Exponential backoff com max delay (seguindo padrão do sistema)
const backoffDelay = Math.min(baseDelay * Math.pow(2, retries - 1), maxDelay);
this.logger.verbose(`Message not found, retry ${retries}/${maxRetries} in ${backoffDelay}ms`);
await new Promise((resolve) => setTimeout(resolve, backoffDelay));
} else {
this.logger.verbose(`Message not found after ${retries} attempts`);
}
}
}
if (!messageExists) {
this.logger.warn(`Message not found in database after ${maxRetries} retries, keyId: ${key.id}`);
return;
}
// Use raw SQL to avoid JSON path issues
await this.prismaRepository.$executeRaw`
const result = await this.prismaRepository.$executeRaw`
UPDATE "Message"
SET
"chatwootMessageId" = ${chatwootMessageIds.messageId},
@@ -1556,10 +1896,12 @@ export class ChatwootService {
"chatwootInboxId" = ${chatwootMessageIds.inboxId},
"chatwootContactInboxSourceId" = ${chatwootMessageIds.contactInboxSourceId},
"chatwootIsRead" = ${chatwootMessageIds.isRead || false}
WHERE "instanceId" = ${instance.instanceId}
WHERE "instanceId" = ${instanceId}
AND "key"->>'id' = ${key.id}
`;
this.logger.verbose(`Update result: ${result} rows affected`);
if (this.isImportHistoryAvailable()) {
chatwootImport.updateMessageSourceID(chatwootMessageIds.messageId, key.id);
}
@@ -1609,7 +1951,7 @@ export class ChatwootService {
},
});
const key = message?.key as ExtendedMessageKey;
const key = message?.key as WAMessageKey;
if (message && key?.id) {
return {
@@ -1913,6 +2255,7 @@ export class ChatwootService {
}
if (event === 'messages.upsert' || event === 'send.message') {
this.logger.info(`[${event}] New message received - Instance: ${JSON.stringify(body, null, 2)}`);
if (body.key.remoteJid === 'status@broadcast') {
return;
}
@@ -2235,9 +2578,8 @@ export class ChatwootService {
}
if (event === 'messages.edit' || event === 'send.message.update') {
const editedText = `${
body?.editedMessage?.conversation || body?.editedMessage?.extendedTextMessage?.text
}\n\n_\`${i18next.t('cw.message.edited')}.\`_`;
const editedMessageContent =
body?.editedMessage?.conversation || body?.editedMessage?.extendedTextMessage?.text;
const message = await this.getMessageByKeyId(instance, body?.key?.id);
if (!message) {
@@ -2245,11 +2587,14 @@ export class ChatwootService {
return;
}
const key = message.key as ExtendedMessageKey;
const key = message.key as WAMessageKey;
const messageType = key?.fromMe ? 'outgoing' : 'incoming';
if (message && message.chatwootConversationId) {
if (message && message.chatwootConversationId && message.chatwootMessageId) {
// Criar nova mensagem com formato: "Mensagem editada:\n\nteste1"
const editedText = `\n\n\`${i18next.t('cw.message.edited')}:\`\n\n${editedMessageContent}`;
const send = await this.createMessage(
instance,
message.chatwootConversationId,
@@ -2327,15 +2672,30 @@ export class ChatwootService {
await this.createBotMessage(instance, msgStatus, 'incoming');
}
if (event === 'connection.update') {
if (body.status === 'open') {
// if we have qrcode count then we understand that a new connection was established
if (this.waMonitor.waInstances[instance.instanceName].qrCode.count > 0) {
const msgConnection = i18next.t('cw.inbox.connected');
await this.createBotMessage(instance, msgConnection, 'incoming');
this.waMonitor.waInstances[instance.instanceName].qrCode.count = 0;
chatwootImport.clearAll(instance);
}
if (event === 'connection.update' && body.status === 'open') {
const waInstance = this.waMonitor.waInstances[instance.instanceName];
if (!waInstance) return;
const now = Date.now();
const timeSinceLastNotification = now - (waInstance.lastConnectionNotification || 0);
// Se a conexão foi estabelecida via QR code, notifica imediatamente.
if (waInstance.qrCode && waInstance.qrCode.count > 0) {
const msgConnection = i18next.t('cw.inbox.connected');
await this.createBotMessage(instance, msgConnection, 'incoming');
waInstance.qrCode.count = 0;
waInstance.lastConnectionNotification = now;
chatwootImport.clearAll(instance);
}
// Se não foi via QR code, verifica o throttling.
else if (timeSinceLastNotification >= MIN_CONNECTION_NOTIFICATION_INTERVAL_MS) {
const msgConnection = i18next.t('cw.inbox.connected');
await this.createBotMessage(instance, msgConnection, 'incoming');
waInstance.lastConnectionNotification = now;
} else {
this.logger.warn(
`Connection notification skipped for ${instance.instanceName} - too frequent (${timeSinceLastNotification}ms since last)`,
);
}
}
@@ -2552,7 +2912,7 @@ export class ChatwootService {
await chatwootImport.importHistoryMessages(instance, this, inbox, this.provider);
const waInstance = this.waMonitor.waInstances[instance.instanceName];
waInstance.clearCacheChatwoot();
} catch (error) {
} catch {
return;
}
}

View File

@@ -112,12 +112,19 @@ class ChatwootImport {
const bindInsert = [provider.accountId];
for (const contact of contactsChunk) {
bindInsert.push(contact.pushName);
const isGroup = this.isIgnorePhoneNumber(contact.remoteJid);
const contactName = isGroup ? `${contact.pushName} (GROUP)` : contact.pushName;
bindInsert.push(contactName);
const bindName = `$${bindInsert.length}`;
bindInsert.push(`+${contact.remoteJid.split('@')[0]}`);
const bindPhoneNumber = `$${bindInsert.length}`;
let bindPhoneNumber: string;
if (!isGroup) {
bindInsert.push(`+${contact.remoteJid.split('@')[0]}`);
bindPhoneNumber = `$${bindInsert.length}`;
} else {
bindPhoneNumber = 'NULL';
}
bindInsert.push(contact.remoteJid);
const bindIdentifier = `$${bindInsert.length}`;

View File

@@ -4,6 +4,7 @@ import { Integration } from '@api/types/wa.types';
import { ConfigService, HttpServer } from '@config/env.config';
import { Dify, DifySetting, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { isURL } from 'class-validator';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
@@ -78,15 +79,35 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: contentSplit[1].split('?')[0],
},
];
payload.query = contentSplit[2] || content;
const media = content.split('|');
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: mediaBase64,
},
];
}
} else {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: media[1].split('?')[0],
},
];
}
payload.query = media[2] || content;
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@@ -107,7 +128,7 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
const conversationId = response?.data?.conversation_id;
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
await this.prismaRepository.integrationSession.update({
@@ -140,15 +161,35 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: contentSplit[1].split('?')[0],
},
];
payload.inputs.query = contentSplit[2] || content;
const media = content.split('|');
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: mediaBase64,
},
];
}
} else {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: media[1].split('?')[0],
},
];
payload.inputs.query = media[2] || content;
}
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@@ -169,7 +210,7 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
const conversationId = response?.data?.conversation_id;
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
await this.prismaRepository.integrationSession.update({
@@ -202,15 +243,26 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: contentSplit[1].split('?')[0],
},
];
payload.query = contentSplit[2] || content;
const media = content.split('|');
if (msg.message.mediaUrl || msg.message.base64) {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: msg.message.mediaUrl || msg.message.base64,
},
];
} else {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: media[1].split('?')[0],
},
];
payload.query = media[2] || content;
}
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@@ -246,7 +298,7 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
await instance.client.sendPresenceUpdate('paused', remoteJid);
if (answer) {
await this.sendMessageWhatsApp(instance, remoteJid, answer, settings);
await this.sendMessageWhatsApp(instance, remoteJid, answer, settings, true);
}
await this.prismaRepository.integrationSession.update({

View File

@@ -5,6 +5,7 @@ import { ConfigService, HttpServer } from '@config/env.config';
import { Evoai, EvoaiSetting, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { downloadMediaMessage } from 'baileys';
import { isURL } from 'class-validator';
import { v4 as uuidv4 } from 'uuid';
import { BaseChatbotService } from '../../base-chatbot.service';
@@ -82,23 +83,43 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
// Handle image message if present
if (this.isImageMessage(content) && msg) {
const contentSplit = content.split('|');
parts[0].text = contentSplit[2] || content;
const media = content.split('|');
parts[0].text = media[2] || content;
try {
// Download the image
const mediaBuffer = await downloadMediaMessage(msg, 'buffer', {});
const fileContent = Buffer.from(mediaBuffer).toString('base64');
const fileName = contentSplit[2] || `${msg.key?.id || 'image'}.jpg`;
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
parts.push({
type: 'file',
file: {
name: fileName,
mimeType: 'image/jpeg',
bytes: fileContent,
},
} as any);
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
parts.push({
type: 'file',
file: {
name: msg.key.id + '.jpeg',
mimeType: 'image/jpeg',
bytes: mediaBase64,
},
} as any);
}
} else {
// Download the image
const mediaBuffer = await downloadMediaMessage(msg, 'buffer', {});
const fileContent = Buffer.from(mediaBuffer).toString('base64');
const fileName = media[2] || `${msg.key?.id || 'image'}.jpg`;
parts.push({
type: 'file',
file: {
name: fileName,
mimeType: 'image/jpeg',
bytes: fileContent,
},
} as any);
}
} catch (fileErr) {
this.logger.error(`[EvoAI] Failed to process image: ${fileErr}`);
}
@@ -174,7 +195,7 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
this.logger.debug(`[EvoAI] Extracted message to send: ${message}`);
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
} catch (error) {
this.logger.error(

View File

@@ -6,6 +6,7 @@ import { ConfigService, HttpServer } from '@config/env.config';
import { EvolutionBot, EvolutionBotSetting, IntegrationSession } from '@prisma/client';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { isURL } from 'class-validator';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
@@ -71,16 +72,26 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
}
}
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
if (this.isImageMessage(content) && msg) {
const media = content.split('|');
payload.files = [
{
type: 'image',
url: contentSplit[1].split('?')[0],
},
];
payload.query = contentSplit[2] || content;
if (msg.message.mediaUrl || msg.message.base64) {
payload.files = [
{
type: 'image',
url: msg.message.base64 || msg.message.mediaUrl,
},
];
} else {
payload.files = [
{
type: 'image',
url: media[1].split('?')[0],
},
];
}
payload.query = media[2] || content;
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@@ -115,15 +126,10 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
},
};
this.logger.debug(`[EvolutionBot] Sending request to endpoint: ${endpoint}`);
this.logger.debug(`[EvolutionBot] Request payload: ${JSON.stringify(sanitizedPayload, null, 2)}`);
const response = await axios.post(endpoint, payload, {
headers,
});
this.logger.debug(`[EvolutionBot] Response received - Status: ${response.status}`);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
@@ -134,10 +140,6 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
// Validate linkPreview is boolean and default to true for backward compatibility
const linkPreview = typeof rawLinkPreview === 'boolean' ? rawLinkPreview : true;
this.logger.debug(
`[EvolutionBot] Processing response - Message length: ${message?.length || 0}, LinkPreview: ${linkPreview}`,
);
if (message && typeof message === 'string' && message.startsWith("'") && message.endsWith("'")) {
const innerContent = message.slice(1, -1);
if (!innerContent.includes("'")) {
@@ -146,17 +148,8 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
}
if (message) {
// Send message directly with validated linkPreview option
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: message,
linkPreview, // Always boolean, defaults to true
},
false,
);
this.logger.debug(`[EvolutionBot] Message sent successfully with linkPreview: ${linkPreview}`);
// Use the base class method that handles splitMessages functionality
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, linkPreview);
} else {
this.logger.warn(`[EvolutionBot] No message content received from bot response`);
}

View File

@@ -5,6 +5,7 @@ import { Integration } from '@api/types/wa.types';
import { ConfigService, HttpServer } from '@config/env.config';
import { Flowise as FlowiseModel, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { isURL } from 'class-validator';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
@@ -82,17 +83,28 @@ export class FlowiseService extends BaseChatbotService<FlowiseModel> {
}
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
const media = content.split('|');
payload.uploads = [
{
data: contentSplit[1].split('?')[0],
type: 'url',
name: 'Flowise.png',
mime: 'image/png',
},
];
payload.question = contentSplit[2] || content;
if (msg.message.mediaUrl || msg.message.base64) {
payload.uploads = [
{
data: msg.message.base64 || msg.message.mediaUrl,
type: 'url',
name: 'Flowise.png',
mime: 'image/png',
},
];
} else {
payload.uploads = [
{
data: media[1].split('?')[0],
type: 'url',
name: 'Flowise.png',
mime: 'image/png',
},
];
payload.question = media[2] || content;
}
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@@ -130,7 +142,7 @@ export class FlowiseService extends BaseChatbotService<FlowiseModel> {
if (message) {
// Use the base class method to send the message to WhatsApp
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
}

View File

@@ -78,7 +78,7 @@ export class N8nService extends BaseChatbotService<N8n, N8nSetting> {
const message = response?.data?.output || response?.data?.answer;
// Use base class method instead of custom implementation
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
await this.prismaRepository.integrationSession.update({
where: {

View File

@@ -6,6 +6,7 @@ import { IntegrationSession, OpenaiBot, OpenaiSetting } from '@prisma/client';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { downloadMediaMessage } from 'baileys';
import { isURL } from 'class-validator';
import FormData from 'form-data';
import OpenAI from 'openai';
import P from 'pino';
@@ -85,6 +86,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
remoteJid,
"Sorry, I couldn't transcribe your audio message. Could you please type your message instead?",
settings,
true,
);
return;
}
@@ -173,7 +175,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
}
// Process with the appropriate API based on bot type
await this.sendMessageToBot(instance, session, settings, openaiBot, remoteJid, pushName || '', content);
await this.sendMessageToBot(instance, session, settings, openaiBot, remoteJid, pushName || '', content, msg);
} catch (error) {
this.logger.error(`Error in process: ${error.message || JSON.stringify(error)}`);
return;
@@ -191,6 +193,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
remoteJid: string,
pushName: string,
content: string,
msg?: any,
): Promise<void> {
this.logger.log(`Sending message to bot for remoteJid: ${remoteJid}, bot type: ${openaiBot.botType}`);
@@ -222,10 +225,11 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
pushName,
false, // Not fromMe
content,
msg,
);
} else {
this.logger.log('Processing with ChatCompletion API');
message = await this.processChatCompletionMessage(instance, openaiBot, remoteJid, content);
message = await this.processChatCompletionMessage(instance, openaiBot, remoteJid, content, msg);
}
this.logger.log(`Got response from OpenAI: ${message?.substring(0, 50)}${message?.length > 50 ? '...' : ''}`);
@@ -233,7 +237,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
// Send the response
if (message) {
this.logger.log('Sending message to WhatsApp');
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
} else {
this.logger.error('No message to send to WhatsApp');
}
@@ -268,6 +272,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
pushName: string,
fromMe: boolean,
content: string,
msg?: any,
): Promise<string> {
const messageData: any = {
role: fromMe ? 'assistant' : 'user',
@@ -276,18 +281,35 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
const url = contentSplit[1].split('?')[0];
const media = content.split('|');
messageData.content = [
{ type: 'text', text: contentSplit[2] || content },
{
type: 'image_url',
image_url: {
url: url,
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
messageData.content = [
{ type: 'text', text: media[2] || content },
{ type: 'image_url', image_url: { url: mediaBase64 } },
];
}
} else {
const url = media[1].split('?')[0];
messageData.content = [
{ type: 'text', text: media[2] || content },
{
type: 'image_url',
image_url: {
url: url,
},
},
},
];
];
}
}
// Get thread ID from session or create new thread
@@ -376,6 +398,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
openaiBot: OpenaiBot,
remoteJid: string,
content: string,
msg?: any,
): Promise<string> {
this.logger.log('Starting processChatCompletionMessage');
@@ -468,18 +491,26 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
// Handle image messages
if (this.isImageMessage(content)) {
this.logger.log('Found image message');
const contentSplit = content.split('|');
const url = contentSplit[1].split('?')[0];
const media = content.split('|');
messageData.content = [
{ type: 'text', text: contentSplit[2] || content },
{
type: 'image_url',
image_url: {
url: url,
if (msg.message.mediaUrl || msg.message.base64) {
messageData.content = [
{ type: 'text', text: media[2] || content },
{ type: 'image_url', image_url: { url: msg.message.base64 || msg.message.mediaUrl } },
];
} else {
const url = media[1].split('?')[0];
messageData.content = [
{ type: 'text', text: media[2] || content },
{
type: 'image_url',
image_url: {
url: url,
},
},
},
];
];
}
}
// Combine all messages: system messages, pre-defined messages, conversation history, and current message

View File

@@ -318,7 +318,7 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
} else if (formattedText.includes('[buttons]')) {
await this.processButtonMessage(instance, formattedText, session.remoteJid);
} else {
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings);
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings, true);
}
sendTelemetry('/message/sendText');
@@ -393,7 +393,7 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
} else if (formattedText.includes('[buttons]')) {
await this.processButtonMessage(instance, formattedText, session.remoteJid);
} else {
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings);
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings, true);
}
sendTelemetry('/message/sendText');
@@ -642,15 +642,21 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
if (!content) {
if (unknownMessage) {
await this.sendMessageWhatsApp(waInstance, remoteJid, unknownMessage, {
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
await this.sendMessageWhatsApp(
waInstance,
remoteJid,
unknownMessage,
});
{
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
unknownMessage,
},
true,
);
sendTelemetry('/message/sendText');
}
return;
@@ -801,15 +807,21 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
if (!data?.messages || data.messages.length === 0) {
if (!content) {
if (unknownMessage) {
await this.sendMessageWhatsApp(waInstance, remoteJid, unknownMessage, {
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
await this.sendMessageWhatsApp(
waInstance,
remoteJid,
unknownMessage,
});
{
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
unknownMessage,
},
true,
);
sendTelemetry('/message/sendText');
}
return;
@@ -903,15 +915,21 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
if (!content) {
if (unknownMessage) {
await this.sendMessageWhatsApp(waInstance, remoteJid, unknownMessage, {
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
await this.sendMessageWhatsApp(
waInstance,
remoteJid,
unknownMessage,
});
{
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
unknownMessage,
},
true,
);
sendTelemetry('/message/sendText');
}
return;

View File

@@ -40,6 +40,11 @@ export class EventDto {
useTLS?: boolean;
events?: string[];
};
kafka?: {
enabled?: boolean;
events?: string[];
};
}
export function EventInstanceMixin<TBase extends Constructor>(Base: TBase) {
@@ -82,5 +87,10 @@ export function EventInstanceMixin<TBase extends Constructor>(Base: TBase) {
useTLS?: boolean;
events?: string[];
};
kafka?: {
enabled?: boolean;
events?: string[];
};
};
}

View File

@@ -1,3 +1,4 @@
import { KafkaController } from '@api/integrations/event/kafka/kafka.controller';
import { NatsController } from '@api/integrations/event/nats/nats.controller';
import { PusherController } from '@api/integrations/event/pusher/pusher.controller';
import { RabbitmqController } from '@api/integrations/event/rabbitmq/rabbitmq.controller';
@@ -17,6 +18,7 @@ export class EventManager {
private natsController: NatsController;
private sqsController: SqsController;
private pusherController: PusherController;
private kafkaController: KafkaController;
constructor(prismaRepository: PrismaRepository, waMonitor: WAMonitoringService) {
this.prisma = prismaRepository;
@@ -28,6 +30,7 @@ export class EventManager {
this.nats = new NatsController(prismaRepository, waMonitor);
this.sqs = new SqsController(prismaRepository, waMonitor);
this.pusher = new PusherController(prismaRepository, waMonitor);
this.kafka = new KafkaController(prismaRepository, waMonitor);
}
public set prisma(prisma: PrismaRepository) {
@@ -93,12 +96,20 @@ export class EventManager {
return this.pusherController;
}
public set kafka(kafka: KafkaController) {
this.kafkaController = kafka;
}
public get kafka() {
return this.kafkaController;
}
public init(httpServer: Server): void {
this.websocket.init(httpServer);
this.rabbitmq.init();
this.nats.init();
this.sqs.init();
this.pusher.init();
this.kafka.init();
}
public async emit(eventData: {
@@ -119,42 +130,47 @@ export class EventManager {
await this.sqs.emit(eventData);
await this.webhook.emit(eventData);
await this.pusher.emit(eventData);
await this.kafka.emit(eventData);
}
public async setInstance(instanceName: string, data: any): Promise<any> {
if (data.websocket)
if (data.websocket) {
await this.websocket.set(instanceName, {
websocket: {
enabled: true,
events: data.websocket?.events,
},
});
}
if (data.rabbitmq)
if (data.rabbitmq) {
await this.rabbitmq.set(instanceName, {
rabbitmq: {
enabled: true,
events: data.rabbitmq?.events,
},
});
}
if (data.nats)
if (data.nats) {
await this.nats.set(instanceName, {
nats: {
enabled: true,
events: data.nats?.events,
},
});
}
if (data.sqs)
if (data.sqs) {
await this.sqs.set(instanceName, {
sqs: {
enabled: true,
events: data.sqs?.events,
},
});
}
if (data.webhook)
if (data.webhook) {
await this.webhook.set(instanceName, {
webhook: {
enabled: true,
@@ -165,8 +181,9 @@ export class EventManager {
byEvents: data.webhook?.byEvents,
},
});
}
if (data.pusher)
if (data.pusher) {
await this.pusher.set(instanceName, {
pusher: {
enabled: true,
@@ -178,5 +195,15 @@ export class EventManager {
useTLS: data.pusher?.useTLS,
},
});
}
if (data.kafka) {
await this.kafka.set(instanceName, {
kafka: {
enabled: true,
events: data.kafka?.events,
},
});
}
}
}

View File

@@ -1,3 +1,4 @@
import { KafkaRouter } from '@api/integrations/event/kafka/kafka.router';
import { NatsRouter } from '@api/integrations/event/nats/nats.router';
import { PusherRouter } from '@api/integrations/event/pusher/pusher.router';
import { RabbitmqRouter } from '@api/integrations/event/rabbitmq/rabbitmq.router';
@@ -18,5 +19,6 @@ export class EventRouter {
this.router.use('/nats', new NatsRouter(...guards).router);
this.router.use('/pusher', new PusherRouter(...guards).router);
this.router.use('/sqs', new SqsRouter(...guards).router);
this.router.use('/kafka', new KafkaRouter(...guards).router);
}
}

View File

@@ -22,6 +22,9 @@ export const eventSchema: JSONSchema7 = {
sqs: {
$ref: '#/$defs/event',
},
kafka: {
$ref: '#/$defs/event',
},
},
$defs: {
event: {

View File

@@ -0,0 +1,414 @@
import { PrismaRepository } from '@api/repository/repository.service';
import { WAMonitoringService } from '@api/services/monitor.service';
import { configService, Kafka, Log } from '@config/env.config';
import { Logger } from '@config/logger.config';
import { Consumer, ConsumerConfig, Kafka as KafkaJS, KafkaConfig, Producer, ProducerConfig } from 'kafkajs';
import { EmitData, EventController, EventControllerInterface } from '../event.controller';
export class KafkaController extends EventController implements EventControllerInterface {
private kafkaClient: KafkaJS | null = null;
private producer: Producer | null = null;
private consumer: Consumer | null = null;
private readonly logger = new Logger('KafkaController');
private reconnectAttempts = 0;
private maxReconnectAttempts = 10;
private reconnectDelay = 5000; // 5 seconds
private isReconnecting = false;
constructor(prismaRepository: PrismaRepository, waMonitor: WAMonitoringService) {
super(prismaRepository, waMonitor, configService.get<Kafka>('KAFKA')?.ENABLED, 'kafka');
}
public async init(): Promise<void> {
if (!this.status) {
return;
}
await this.connect();
}
private async connect(): Promise<void> {
try {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const clientConfig: KafkaConfig = {
clientId: kafkaConfig.CLIENT_ID || 'evolution-api',
brokers: kafkaConfig.BROKERS || ['localhost:9092'],
connectionTimeout: kafkaConfig.CONNECTION_TIMEOUT || 3000,
requestTimeout: kafkaConfig.REQUEST_TIMEOUT || 30000,
retry: {
initialRetryTime: 100,
retries: 8,
},
};
// Add SASL authentication if configured
if (kafkaConfig.SASL?.ENABLED) {
clientConfig.sasl = {
mechanism: (kafkaConfig.SASL.MECHANISM as any) || 'plain',
username: kafkaConfig.SASL.USERNAME,
password: kafkaConfig.SASL.PASSWORD,
};
}
// Add SSL configuration if enabled
if (kafkaConfig.SSL?.ENABLED) {
clientConfig.ssl = {
rejectUnauthorized: kafkaConfig.SSL.REJECT_UNAUTHORIZED !== false,
ca: kafkaConfig.SSL.CA ? [kafkaConfig.SSL.CA] : undefined,
key: kafkaConfig.SSL.KEY,
cert: kafkaConfig.SSL.CERT,
};
}
this.kafkaClient = new KafkaJS(clientConfig);
// Initialize producer
const producerConfig: ProducerConfig = {
maxInFlightRequests: 1,
idempotent: true,
transactionTimeout: 30000,
};
this.producer = this.kafkaClient.producer(producerConfig);
await this.producer.connect();
// Initialize consumer for global events if enabled
if (kafkaConfig.GLOBAL_ENABLED) {
await this.initGlobalConsumer();
}
this.reconnectAttempts = 0;
this.isReconnecting = false;
this.logger.info('Kafka initialized successfully');
// Create topics if they don't exist
if (kafkaConfig.AUTO_CREATE_TOPICS) {
await this.createTopics();
}
} catch (error) {
this.logger.error({
local: 'KafkaController.connect',
message: 'Failed to connect to Kafka',
error: error.message || error,
});
this.scheduleReconnect();
throw error;
}
}
private async initGlobalConsumer(): Promise<void> {
try {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const consumerConfig: ConsumerConfig = {
groupId: kafkaConfig.CONSUMER_GROUP_ID || 'evolution-api-consumers',
sessionTimeout: 30000,
heartbeatInterval: 3000,
};
this.consumer = this.kafkaClient.consumer(consumerConfig);
await this.consumer.connect();
// Subscribe to global topics
const events = kafkaConfig.EVENTS;
if (events) {
const eventKeys = Object.keys(events).filter((event) => events[event]);
for (const event of eventKeys) {
const topicName = this.getTopicName(event, true);
await this.consumer.subscribe({ topic: topicName });
}
// Start consuming messages
await this.consumer.run({
eachMessage: async ({ topic, message }) => {
try {
const data = JSON.parse(message.value?.toString() || '{}');
this.logger.debug(`Received message from topic ${topic}: ${JSON.stringify(data)}`);
// Process the message here if needed
// This is where you can add custom message processing logic
} catch (error) {
this.logger.error(`Error processing message from topic ${topic}: ${error}`);
}
},
});
this.logger.info('Global Kafka consumer initialized');
}
} catch (error) {
this.logger.error(`Failed to initialize global Kafka consumer: ${error}`);
}
}
private async createTopics(): Promise<void> {
try {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const admin = this.kafkaClient.admin();
await admin.connect();
const topics = [];
// Create global topics if enabled
if (kafkaConfig.GLOBAL_ENABLED && kafkaConfig.EVENTS) {
const eventKeys = Object.keys(kafkaConfig.EVENTS).filter((event) => kafkaConfig.EVENTS[event]);
for (const event of eventKeys) {
const topicName = this.getTopicName(event, true);
topics.push({
topic: topicName,
numPartitions: kafkaConfig.NUM_PARTITIONS || 1,
replicationFactor: kafkaConfig.REPLICATION_FACTOR || 1,
});
}
}
if (topics.length > 0) {
await admin.createTopics({
topics,
waitForLeaders: true,
});
this.logger.info(`Created ${topics.length} Kafka topics`);
}
await admin.disconnect();
} catch (error) {
this.logger.error(`Failed to create Kafka topics: ${error}`);
}
}
private getTopicName(event: string, isGlobal: boolean = false, instanceName?: string): string {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const prefix = kafkaConfig.TOPIC_PREFIX || 'evolution';
if (isGlobal) {
return `${prefix}.global.${event.toLowerCase().replace(/_/g, '.')}`;
} else {
return `${prefix}.${instanceName}.${event.toLowerCase().replace(/_/g, '.')}`;
}
}
private handleConnectionLoss(): void {
if (this.isReconnecting) {
return;
}
this.cleanup();
this.scheduleReconnect();
}
private scheduleReconnect(): void {
if (this.reconnectAttempts >= this.maxReconnectAttempts) {
this.logger.error(
`Maximum reconnect attempts (${this.maxReconnectAttempts}) reached. Stopping reconnection attempts.`,
);
return;
}
if (this.isReconnecting) {
return;
}
this.isReconnecting = true;
this.reconnectAttempts++;
const delay = this.reconnectDelay * Math.pow(2, Math.min(this.reconnectAttempts - 1, 5));
this.logger.info(
`Scheduling Kafka reconnection attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`,
);
setTimeout(async () => {
try {
this.logger.info(
`Attempting to reconnect to Kafka (attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts})`,
);
await this.connect();
this.logger.info('Successfully reconnected to Kafka');
} catch (error) {
this.logger.error({
local: 'KafkaController.scheduleReconnect',
message: `Reconnection attempt ${this.reconnectAttempts} failed`,
error: error.message || error,
});
this.isReconnecting = false;
this.scheduleReconnect();
}
}, delay);
}
private async ensureConnection(): Promise<boolean> {
if (!this.producer) {
this.logger.warn('Kafka producer is not available, attempting to reconnect...');
if (!this.isReconnecting) {
this.scheduleReconnect();
}
return false;
}
return true;
}
public async emit({
instanceName,
origin,
event,
data,
serverUrl,
dateTime,
sender,
apiKey,
integration,
}: EmitData): Promise<void> {
if (integration && !integration.includes('kafka')) {
return;
}
if (!this.status) {
return;
}
if (!(await this.ensureConnection())) {
this.logger.warn(`Failed to emit event ${event} for instance ${instanceName}: No Kafka connection`);
return;
}
const instanceKafka = await this.get(instanceName);
const kafkaLocal = instanceKafka?.events;
const kafkaGlobal = configService.get<Kafka>('KAFKA').GLOBAL_ENABLED;
const kafkaEvents = configService.get<Kafka>('KAFKA').EVENTS;
const we = event.replace(/[.-]/gm, '_').toUpperCase();
const logEnabled = configService.get<Log>('LOG').LEVEL.includes('WEBHOOKS');
const message = {
event,
instance: instanceName,
data,
server_url: serverUrl,
date_time: dateTime,
sender,
apikey: apiKey,
timestamp: Date.now(),
};
const messageValue = JSON.stringify(message);
// Instance-specific events
if (instanceKafka?.enabled && this.producer && Array.isArray(kafkaLocal) && kafkaLocal.includes(we)) {
const topicName = this.getTopicName(event, false, instanceName);
let retry = 0;
while (retry < 3) {
try {
await this.producer.send({
topic: topicName,
messages: [
{
key: instanceName,
value: messageValue,
headers: {
event,
instance: instanceName,
origin,
timestamp: dateTime,
},
},
],
});
if (logEnabled) {
const logData = {
local: `${origin}.sendData-Kafka`,
...message,
};
this.logger.log(logData);
}
break;
} catch (error) {
this.logger.error({
local: 'KafkaController.emit',
message: `Error publishing local Kafka message (attempt ${retry + 1}/3)`,
error: error.message || error,
});
retry++;
if (retry >= 3) {
this.handleConnectionLoss();
}
}
}
}
// Global events
if (kafkaGlobal && kafkaEvents[we] && this.producer) {
const topicName = this.getTopicName(event, true);
let retry = 0;
while (retry < 3) {
try {
await this.producer.send({
topic: topicName,
messages: [
{
key: `${instanceName}-${event}`,
value: messageValue,
headers: {
event,
instance: instanceName,
origin,
timestamp: dateTime,
},
},
],
});
if (logEnabled) {
const logData = {
local: `${origin}.sendData-Kafka-Global`,
...message,
};
this.logger.log(logData);
}
break;
} catch (error) {
this.logger.error({
local: 'KafkaController.emit',
message: `Error publishing global Kafka message (attempt ${retry + 1}/3)`,
error: error.message || error,
});
retry++;
if (retry >= 3) {
this.handleConnectionLoss();
}
}
}
}
}
public async cleanup(): Promise<void> {
try {
if (this.consumer) {
await this.consumer.disconnect();
this.consumer = null;
}
if (this.producer) {
await this.producer.disconnect();
this.producer = null;
}
this.kafkaClient = null;
} catch (error) {
this.logger.warn({
local: 'KafkaController.cleanup',
message: 'Error during cleanup',
error: error.message || error,
});
this.producer = null;
this.consumer = null;
this.kafkaClient = null;
}
}
}

View File

@@ -0,0 +1,36 @@
import { RouterBroker } from '@api/abstract/abstract.router';
import { InstanceDto } from '@api/dto/instance.dto';
import { EventDto } from '@api/integrations/event/event.dto';
import { HttpStatus } from '@api/routes/index.router';
import { eventManager } from '@api/server.module';
import { eventSchema, instanceSchema } from '@validate/validate.schema';
import { RequestHandler, Router } from 'express';
export class KafkaRouter extends RouterBroker {
constructor(...guards: RequestHandler[]) {
super();
this.router
.post(this.routerPath('set'), ...guards, async (req, res) => {
const response = await this.dataValidate<EventDto>({
request: req,
schema: eventSchema,
ClassRef: EventDto,
execute: (instance, data) => eventManager.kafka.set(instance.instanceName, data),
});
res.status(HttpStatus.CREATED).json(response);
})
.get(this.routerPath('find'), ...guards, async (req, res) => {
const response = await this.dataValidate<InstanceDto>({
request: req,
schema: instanceSchema,
ClassRef: InstanceDto,
execute: (instance) => eventManager.kafka.get(instance.instanceName),
});
res.status(HttpStatus.OK).json(response);
});
}
public readonly router: Router = Router();
}

View File

@@ -0,0 +1,21 @@
import { JSONSchema7 } from 'json-schema';
import { v4 } from 'uuid';
import { EventController } from '../event.controller';
export const kafkaSchema: JSONSchema7 = {
$id: v4(),
type: 'object',
properties: {
enabled: { type: 'boolean', enum: [true, false] },
events: {
type: 'array',
minItems: 0,
items: {
type: 'string',
enum: EventController.events,
},
},
},
required: ['enabled'],
};

View File

@@ -33,7 +33,7 @@ const bucketExists = async () => {
try {
const list = await minioClient.listBuckets();
return list.find((bucket) => bucket.name === bucketName);
} catch (error) {
} catch {
return false;
}
}

View File

@@ -826,7 +826,7 @@ export class ChannelStartupService {
const msg = message.message;
// Se só tem messageContextInfo, não é mídia válida
if (Object.keys(msg).length === 1 && 'messageContextInfo' in msg) {
if (Object.keys(msg).length === 1 && Object.prototype.hasOwnProperty.call(msg, 'messageContextInfo')) {
return false;
}

View File

@@ -25,7 +25,7 @@ export class ProxyService {
}
return result;
} catch (error) {
} catch {
return null;
}
}

View File

@@ -24,7 +24,7 @@ export class SettingsService {
}
return result;
} catch (error) {
} catch {
return null;
}
}

View File

@@ -153,6 +153,34 @@ export type Sqs = {
};
};
export type Kafka = {
ENABLED: boolean;
CLIENT_ID: string;
BROKERS: string[];
CONNECTION_TIMEOUT: number;
REQUEST_TIMEOUT: number;
GLOBAL_ENABLED: boolean;
CONSUMER_GROUP_ID: string;
TOPIC_PREFIX: string;
NUM_PARTITIONS: number;
REPLICATION_FACTOR: number;
AUTO_CREATE_TOPICS: boolean;
EVENTS: EventsRabbitmq;
SASL?: {
ENABLED: boolean;
MECHANISM: string;
USERNAME: string;
PASSWORD: string;
};
SSL?: {
ENABLED: boolean;
REJECT_UNAUTHORIZED: boolean;
CA?: string;
KEY?: string;
CERT?: string;
};
};
export type Websocket = {
ENABLED: boolean;
GLOBAL_EVENTS: boolean;
@@ -372,6 +400,7 @@ export interface Env {
RABBITMQ: Rabbitmq;
NATS: Nats;
SQS: Sqs;
KAFKA: Kafka;
WEBSOCKET: Websocket;
WA_BUSINESS: WaBusiness;
LOG: Log;
@@ -587,6 +616,68 @@ export class ConfigService {
TYPEBOT_START: process.env?.SQS_GLOBAL_TYPEBOT_START === 'true',
},
},
KAFKA: {
ENABLED: process.env?.KAFKA_ENABLED === 'true',
CLIENT_ID: process.env?.KAFKA_CLIENT_ID || 'evolution-api',
BROKERS: process.env?.KAFKA_BROKERS?.split(',') || ['localhost:9092'],
CONNECTION_TIMEOUT: Number.parseInt(process.env?.KAFKA_CONNECTION_TIMEOUT || '3000'),
REQUEST_TIMEOUT: Number.parseInt(process.env?.KAFKA_REQUEST_TIMEOUT || '30000'),
GLOBAL_ENABLED: process.env?.KAFKA_GLOBAL_ENABLED === 'true',
CONSUMER_GROUP_ID: process.env?.KAFKA_CONSUMER_GROUP_ID || 'evolution-api-consumers',
TOPIC_PREFIX: process.env?.KAFKA_TOPIC_PREFIX || 'evolution',
NUM_PARTITIONS: Number.parseInt(process.env?.KAFKA_NUM_PARTITIONS || '1'),
REPLICATION_FACTOR: Number.parseInt(process.env?.KAFKA_REPLICATION_FACTOR || '1'),
AUTO_CREATE_TOPICS: process.env?.KAFKA_AUTO_CREATE_TOPICS === 'true',
EVENTS: {
APPLICATION_STARTUP: process.env?.KAFKA_EVENTS_APPLICATION_STARTUP === 'true',
INSTANCE_CREATE: process.env?.KAFKA_EVENTS_INSTANCE_CREATE === 'true',
INSTANCE_DELETE: process.env?.KAFKA_EVENTS_INSTANCE_DELETE === 'true',
QRCODE_UPDATED: process.env?.KAFKA_EVENTS_QRCODE_UPDATED === 'true',
MESSAGES_SET: process.env?.KAFKA_EVENTS_MESSAGES_SET === 'true',
MESSAGES_UPSERT: process.env?.KAFKA_EVENTS_MESSAGES_UPSERT === 'true',
MESSAGES_EDITED: process.env?.KAFKA_EVENTS_MESSAGES_EDITED === 'true',
MESSAGES_UPDATE: process.env?.KAFKA_EVENTS_MESSAGES_UPDATE === 'true',
MESSAGES_DELETE: process.env?.KAFKA_EVENTS_MESSAGES_DELETE === 'true',
SEND_MESSAGE: process.env?.KAFKA_EVENTS_SEND_MESSAGE === 'true',
SEND_MESSAGE_UPDATE: process.env?.KAFKA_EVENTS_SEND_MESSAGE_UPDATE === 'true',
CONTACTS_SET: process.env?.KAFKA_EVENTS_CONTACTS_SET === 'true',
CONTACTS_UPSERT: process.env?.KAFKA_EVENTS_CONTACTS_UPSERT === 'true',
CONTACTS_UPDATE: process.env?.KAFKA_EVENTS_CONTACTS_UPDATE === 'true',
PRESENCE_UPDATE: process.env?.KAFKA_EVENTS_PRESENCE_UPDATE === 'true',
CHATS_SET: process.env?.KAFKA_EVENTS_CHATS_SET === 'true',
CHATS_UPSERT: process.env?.KAFKA_EVENTS_CHATS_UPSERT === 'true',
CHATS_UPDATE: process.env?.KAFKA_EVENTS_CHATS_UPDATE === 'true',
CHATS_DELETE: process.env?.KAFKA_EVENTS_CHATS_DELETE === 'true',
CONNECTION_UPDATE: process.env?.KAFKA_EVENTS_CONNECTION_UPDATE === 'true',
LABELS_EDIT: process.env?.KAFKA_EVENTS_LABELS_EDIT === 'true',
LABELS_ASSOCIATION: process.env?.KAFKA_EVENTS_LABELS_ASSOCIATION === 'true',
GROUPS_UPSERT: process.env?.KAFKA_EVENTS_GROUPS_UPSERT === 'true',
GROUP_UPDATE: process.env?.KAFKA_EVENTS_GROUPS_UPDATE === 'true',
GROUP_PARTICIPANTS_UPDATE: process.env?.KAFKA_EVENTS_GROUP_PARTICIPANTS_UPDATE === 'true',
CALL: process.env?.KAFKA_EVENTS_CALL === 'true',
TYPEBOT_START: process.env?.KAFKA_EVENTS_TYPEBOT_START === 'true',
TYPEBOT_CHANGE_STATUS: process.env?.KAFKA_EVENTS_TYPEBOT_CHANGE_STATUS === 'true',
},
SASL:
process.env?.KAFKA_SASL_ENABLED === 'true'
? {
ENABLED: true,
MECHANISM: process.env?.KAFKA_SASL_MECHANISM || 'plain',
USERNAME: process.env?.KAFKA_SASL_USERNAME || '',
PASSWORD: process.env?.KAFKA_SASL_PASSWORD || '',
}
: undefined,
SSL:
process.env?.KAFKA_SSL_ENABLED === 'true'
? {
ENABLED: true,
REJECT_UNAUTHORIZED: process.env?.KAFKA_SSL_REJECT_UNAUTHORIZED !== 'false',
CA: process.env?.KAFKA_SSL_CA,
KEY: process.env?.KAFKA_SSL_KEY,
CERT: process.env?.KAFKA_SSL_CERT,
}
: undefined,
},
WEBSOCKET: {
ENABLED: process.env?.WEBSOCKET_ENABLED === 'true',
GLOBAL_EVENTS: process.env?.WEBSOCKET_GLOBAL_EVENTS === 'true',

View File

@@ -3,8 +3,6 @@ import fs from 'fs';
import i18next from 'i18next';
import path from 'path';
const __dirname = path.resolve(process.cwd(), 'src', 'utils');
const languages = ['en', 'pt-BR', 'es'];
const translationsPath = path.join(__dirname, 'translations');
const configService: ConfigService = new ConfigService();

View File

@@ -19,7 +19,7 @@ export async function keyExists(sessionId: string): Promise<any> {
try {
const key = await prismaRepository.session.findUnique({ where: { sessionId: sessionId } });
return !!key;
} catch (error) {
} catch {
return false;
}
}
@@ -38,7 +38,7 @@ export async function saveKey(sessionId: string, keyJson: any): Promise<any> {
where: { sessionId: sessionId },
data: { creds: JSON.stringify(keyJson) },
});
} catch (error) {
} catch {
return null;
}
}
@@ -49,7 +49,7 @@ export async function getAuthKey(sessionId: string): Promise<any> {
if (!register) return null;
const auth = await prismaRepository.session.findUnique({ where: { sessionId: sessionId } });
return JSON.parse(auth?.creds);
} catch (error) {
} catch {
return null;
}
}
@@ -59,7 +59,7 @@ async function deleteAuthKey(sessionId: string): Promise<any> {
const register = await keyExists(sessionId);
if (!register) return;
await prismaRepository.session.delete({ where: { sessionId: sessionId } });
} catch (error) {
} catch {
return;
}
}
@@ -68,7 +68,7 @@ async function fileExists(file: string): Promise<any> {
try {
const stat = await fs.stat(file);
if (stat.isFile()) return true;
} catch (error) {
} catch {
return;
}
}
@@ -119,7 +119,7 @@ export default async function useMultiFileAuthStatePrisma(
const parsedData = JSON.parse(rawData, BufferJSON.reviver);
return parsedData;
} catch (error) {
} catch {
return null;
}
}
@@ -137,7 +137,7 @@ export default async function useMultiFileAuthStatePrisma(
} else {
await deleteAuthKey(sessionId);
}
} catch (error) {
} catch {
return;
}
}