Merge branch 'release/2.3.4'
Some checks failed
Check Code Quality / check-lint-and-build (push) Has been cancelled
Build Docker image / Build and Deploy (push) Has been cancelled
Security Scan / CodeQL Analysis (javascript) (push) Has been cancelled
Security Scan / Dependency Review (push) Has been cancelled

This commit is contained in:
Davidson Gomes 2025-09-23 11:42:25 -03:00
commit 78c7b96f0f
46 changed files with 2188 additions and 639 deletions

View File

@ -190,6 +190,60 @@ PUSHER_EVENTS_CALL=true
PUSHER_EVENTS_TYPEBOT_START=false
PUSHER_EVENTS_TYPEBOT_CHANGE_STATUS=false
# Kafka - Environment variables
KAFKA_ENABLED=false
KAFKA_CLIENT_ID=evolution-api
KAFKA_BROKERS=localhost:9092
KAFKA_CONNECTION_TIMEOUT=3000
KAFKA_REQUEST_TIMEOUT=30000
# Global events - By enabling this variable, events from all instances are sent to global Kafka topics.
KAFKA_GLOBAL_ENABLED=false
KAFKA_CONSUMER_GROUP_ID=evolution-api-consumers
KAFKA_TOPIC_PREFIX=evolution
KAFKA_NUM_PARTITIONS=1
KAFKA_REPLICATION_FACTOR=1
KAFKA_AUTO_CREATE_TOPICS=false
# Choose the events you want to send to Kafka
KAFKA_EVENTS_APPLICATION_STARTUP=false
KAFKA_EVENTS_INSTANCE_CREATE=false
KAFKA_EVENTS_INSTANCE_DELETE=false
KAFKA_EVENTS_QRCODE_UPDATED=false
KAFKA_EVENTS_MESSAGES_SET=false
KAFKA_EVENTS_MESSAGES_UPSERT=false
KAFKA_EVENTS_MESSAGES_EDITED=false
KAFKA_EVENTS_MESSAGES_UPDATE=false
KAFKA_EVENTS_MESSAGES_DELETE=false
KAFKA_EVENTS_SEND_MESSAGE=false
KAFKA_EVENTS_SEND_MESSAGE_UPDATE=false
KAFKA_EVENTS_CONTACTS_SET=false
KAFKA_EVENTS_CONTACTS_UPSERT=false
KAFKA_EVENTS_CONTACTS_UPDATE=false
KAFKA_EVENTS_PRESENCE_UPDATE=false
KAFKA_EVENTS_CHATS_SET=false
KAFKA_EVENTS_CHATS_UPSERT=false
KAFKA_EVENTS_CHATS_UPDATE=false
KAFKA_EVENTS_CHATS_DELETE=false
KAFKA_EVENTS_GROUPS_UPSERT=false
KAFKA_EVENTS_GROUPS_UPDATE=false
KAFKA_EVENTS_GROUP_PARTICIPANTS_UPDATE=false
KAFKA_EVENTS_CONNECTION_UPDATE=false
KAFKA_EVENTS_LABELS_EDIT=false
KAFKA_EVENTS_LABELS_ASSOCIATION=false
KAFKA_EVENTS_CALL=false
KAFKA_EVENTS_TYPEBOT_START=false
KAFKA_EVENTS_TYPEBOT_CHANGE_STATUS=false
# SASL Authentication (optional)
KAFKA_SASL_ENABLED=false
KAFKA_SASL_MECHANISM=plain
KAFKA_SASL_USERNAME=
KAFKA_SASL_PASSWORD=
# SSL Configuration (optional)
KAFKA_SSL_ENABLED=false
KAFKA_SSL_REJECT_UNAUTHORIZED=true
KAFKA_SSL_CA=
KAFKA_SSL_KEY=
KAFKA_SSL_CERT=
# WhatsApp Business API - Environment variables
# Token used to validate the webhook on the Facebook APP
WA_BUSINESS_TOKEN_WEBHOOK=evolution

View File

@ -26,7 +26,7 @@ module.exports = {
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/no-non-null-assertion': 'off',
'@typescript-eslint/no-unused-vars': 'warn',
'@typescript-eslint/no-unused-vars': 'error',
'import/first': 'error',
'import/no-duplicates': 'error',
'simple-import-sort/imports': 'error',

View File

@ -59,7 +59,7 @@ body:
value: |
- OS: [e.g. Ubuntu 20.04, Windows 10, macOS 12.0]
- Node.js version: [e.g. 18.17.0]
- Evolution API version: [e.g. 2.3.3]
- Evolution API version: [e.g. 2.3.4]
- Database: [e.g. PostgreSQL 14, MySQL 8.0]
- Connection type: [e.g. Baileys, WhatsApp Business API]
validations:

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "evolution-manager-v2"]
path = evolution-manager-v2
url = https://github.com/EvolutionAPI/evolution-manager-v2.git

View File

@ -1,3 +1,76 @@
# 2.3.4 (2025-09-23)
### Features
* **Kafka Integration**: Added Apache Kafka event integration for real-time event streaming
- New Kafka controller, router, and schema for event publishing
- Support for instance-specific and global event topics
- Configurable SASL/SSL authentication and connection settings
- Auto-creation of topics with configurable partitions and replication
- Consumer group management for reliable event processing
- Integration with existing event manager for seamless event distribution
* **Evolution Manager v2 Open Source**: Evolution Manager v2 is now available as open source
- Added as git submodule with HTTPS URL for easy access
- Complete open source setup with Apache 2.0 license + Evolution API custom conditions
- GitHub templates for issues, pull requests, and workflows
- Comprehensive documentation and contribution guidelines
- Docker support for development and production environments
- CI/CD workflows for code quality, security audits, and automated builds
- Multi-language support (English, Portuguese, Spanish, French)
- Modern React + TypeScript + Vite frontend with Tailwind CSS
* **EvolutionBot Enhancements**: Improved EvolutionBot functionality and message handling
- Implemented splitMessages functionality for better message segmentation
- Added linkPreview support for enhanced message presentation
- Centralized split logic across chatbot services for consistency
- Enhanced message formatting and delivery capabilities
### Fixed
* **MySQL Schema**: Fixed invalid default value errors for `createdAt` fields in `Evoai` and `EvoaiSetting` models
- Changed `@default(now())` to `@default(dbgenerated("CURRENT_TIMESTAMP"))` for MySQL compatibility
- Added missing relation fields (`N8n`, `N8nSetting`, `Evoai`, `EvoaiSetting`) in Instance model
- Resolved Prisma schema validation errors for MySQL provider
* **Prisma Schema Validation**: Fixed `instanceName` field error in message creation
- Removed invalid `instanceName` field from message objects before database insertion
- Resolved `Unknown argument 'instanceName'` Prisma validation error
- Streamlined message data structure to match Prisma schema requirements
* **Media Message Processing**: Enhanced media handling across chatbot services
- Fixed base64 conversion in EvoAI service for proper image processing
- Converted ArrayBuffer to base64 string using `Buffer.from().toString('base64')`
- Improved media URL handling and base64 encoding for better chatbot integration
- Enhanced image message detection and processing workflow
* **Evolution Manager v2 Linting**: Resolved ESLint configuration conflicts
- Disabled conflicting Prettier rules in ESLint configuration
- Added comprehensive rule overrides for TypeScript and React patterns
- Fixed import ordering and code formatting issues
- Updated security vulnerabilities in dependencies (Vite, esbuild)
### Code Quality & Refactoring
* **Chatbot Services**: Streamlined media message handling across all chatbot integrations
- Standardized base64 and mediaUrl processing patterns
- Improved code readability and maintainability in media handling logic
- Enhanced error handling for media download and conversion processes
- Unified image message detection across different chatbot services
* **Database Operations**: Improved data consistency and validation
- Enhanced Prisma schema compliance across all message operations
- Removed redundant instance name references for better data integrity
- Optimized message creation workflow with proper field validation
### Environment Variables
* Added comprehensive Kafka configuration options:
- `KAFKA_ENABLED`, `KAFKA_CLIENT_ID`, `KAFKA_BROKERS`
- `KAFKA_CONSUMER_GROUP_ID`, `KAFKA_TOPIC_PREFIX`
- `KAFKA_SASL_*` and `KAFKA_SSL_*` for authentication
- `KAFKA_EVENTS_*` for event type configuration
# 2.3.3 (2025-09-18)
### Features

View File

@ -17,5 +17,5 @@ b. Your contributed code may be used for commercial purposes, including but not
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0. Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0.
© 2024 Evolution API
© 2025 Evolution API

View File

@ -55,6 +55,9 @@ Evolution API supports various integrations to enhance its functionality. Below
- [RabbitMQ](https://www.rabbitmq.com/):
- Receive events from the Evolution API via RabbitMQ.
- [Apache Kafka](https://kafka.apache.org/):
- Receive events from the Evolution API via Apache Kafka for real-time event streaming and processing.
- [Amazon SQS](https://aws.amazon.com/pt/sqs/):
- Receive events from the Evolution API via Amazon SQS.

View File

@ -3,7 +3,7 @@ version: "3.8"
services:
api:
container_name: evolution_api
image: evolution/api:metrics
image: evoapicloud/evolution-api:latest
restart: always
depends_on:
- redis

302
env.example Normal file
View File

@ -0,0 +1,302 @@
# ===========================================
# EVOLUTION API - CONFIGURAÇÃO DE AMBIENTE
# ===========================================
# ===========================================
# SERVIDOR
# ===========================================
SERVER_NAME=evolution
SERVER_TYPE=http
SERVER_PORT=8080
SERVER_URL=http://localhost:8080
SERVER_DISABLE_DOCS=false
SERVER_DISABLE_MANAGER=false
# ===========================================
# CORS
# ===========================================
CORS_ORIGIN=*
CORS_METHODS=POST,GET,PUT,DELETE
CORS_CREDENTIALS=true
# ===========================================
# SSL (opcional)
# ===========================================
SSL_CONF_PRIVKEY=
SSL_CONF_FULLCHAIN=
# ===========================================
# BANCO DE DADOS
# ===========================================
DATABASE_PROVIDER=postgresql
DATABASE_CONNECTION_URI=postgresql://username:password@localhost:5432/evolution_api
DATABASE_CONNECTION_CLIENT_NAME=evolution
# Configurações de salvamento de dados
DATABASE_SAVE_DATA_INSTANCE=true
DATABASE_SAVE_DATA_NEW_MESSAGE=true
DATABASE_SAVE_MESSAGE_UPDATE=true
DATABASE_SAVE_DATA_CONTACTS=true
DATABASE_SAVE_DATA_CHATS=true
DATABASE_SAVE_DATA_HISTORIC=true
DATABASE_SAVE_DATA_LABELS=true
DATABASE_SAVE_IS_ON_WHATSAPP=true
DATABASE_SAVE_IS_ON_WHATSAPP_DAYS=7
DATABASE_DELETE_MESSAGE=false
# ===========================================
# REDIS
# ===========================================
CACHE_REDIS_ENABLED=true
CACHE_REDIS_URI=redis://localhost:6379
CACHE_REDIS_PREFIX_KEY=evolution-cache
CACHE_REDIS_TTL=604800
CACHE_REDIS_SAVE_INSTANCES=true
# Cache local (fallback)
CACHE_LOCAL_ENABLED=true
CACHE_LOCAL_TTL=86400
# ===========================================
# AUTENTICAÇÃO
# ===========================================
AUTHENTICATION_API_KEY=BQYHJGJHJ
AUTHENTICATION_EXPOSE_IN_FETCH_INSTANCES=false
# ===========================================
# LOGS
# ===========================================
LOG_LEVEL=ERROR,WARN,DEBUG,INFO,LOG,VERBOSE,DARK,WEBHOOKS,WEBSOCKET
LOG_COLOR=true
LOG_BAILEYS=error
# ===========================================
# INSTÂNCIAS
# ===========================================
DEL_INSTANCE=false
DEL_TEMP_INSTANCES=true
# ===========================================
# IDIOMA
# ===========================================
LANGUAGE=pt-BR
# ===========================================
# WEBHOOK
# ===========================================
WEBHOOK_GLOBAL_URL=
WEBHOOK_GLOBAL_ENABLED=false
WEBHOOK_GLOBAL_WEBHOOK_BY_EVENTS=false
# Eventos de webhook
WEBHOOK_EVENTS_APPLICATION_STARTUP=false
WEBHOOK_EVENTS_INSTANCE_CREATE=false
WEBHOOK_EVENTS_INSTANCE_DELETE=false
WEBHOOK_EVENTS_QRCODE_UPDATED=false
WEBHOOK_EVENTS_MESSAGES_SET=false
WEBHOOK_EVENTS_MESSAGES_UPSERT=false
WEBHOOK_EVENTS_MESSAGES_EDITED=false
WEBHOOK_EVENTS_MESSAGES_UPDATE=false
WEBHOOK_EVENTS_MESSAGES_DELETE=false
WEBHOOK_EVENTS_SEND_MESSAGE=false
WEBHOOK_EVENTS_SEND_MESSAGE_UPDATE=false
WEBHOOK_EVENTS_CONTACTS_SET=false
WEBHOOK_EVENTS_CONTACTS_UPDATE=false
WEBHOOK_EVENTS_CONTACTS_UPSERT=false
WEBHOOK_EVENTS_PRESENCE_UPDATE=false
WEBHOOK_EVENTS_CHATS_SET=false
WEBHOOK_EVENTS_CHATS_UPDATE=false
WEBHOOK_EVENTS_CHATS_UPSERT=false
WEBHOOK_EVENTS_CHATS_DELETE=false
WEBHOOK_EVENTS_CONNECTION_UPDATE=false
WEBHOOK_EVENTS_LABELS_EDIT=false
WEBHOOK_EVENTS_LABELS_ASSOCIATION=false
WEBHOOK_EVENTS_GROUPS_UPSERT=false
WEBHOOK_EVENTS_GROUPS_UPDATE=false
WEBHOOK_EVENTS_GROUP_PARTICIPANTS_UPDATE=false
WEBHOOK_EVENTS_CALL=false
WEBHOOK_EVENTS_TYPEBOT_START=false
WEBHOOK_EVENTS_TYPEBOT_CHANGE_STATUS=false
WEBHOOK_EVENTS_ERRORS=false
WEBHOOK_EVENTS_ERRORS_WEBHOOK=
# Configurações de webhook
WEBHOOK_REQUEST_TIMEOUT_MS=30000
WEBHOOK_RETRY_MAX_ATTEMPTS=10
WEBHOOK_RETRY_INITIAL_DELAY_SECONDS=5
WEBHOOK_RETRY_USE_EXPONENTIAL_BACKOFF=true
WEBHOOK_RETRY_MAX_DELAY_SECONDS=300
WEBHOOK_RETRY_JITTER_FACTOR=0.2
WEBHOOK_RETRY_NON_RETRYABLE_STATUS_CODES=400,401,403,404,422
# ===========================================
# WEBSOCKET
# ===========================================
WEBSOCKET_ENABLED=true
WEBSOCKET_GLOBAL_EVENTS=true
WEBSOCKET_ALLOWED_HOSTS=
# ===========================================
# RABBITMQ
# ===========================================
RABBITMQ_ENABLED=false
RABBITMQ_GLOBAL_ENABLED=false
RABBITMQ_PREFIX_KEY=
RABBITMQ_EXCHANGE_NAME=evolution_exchange
RABBITMQ_URI=
RABBITMQ_FRAME_MAX=8192
# ===========================================
# NATS
# ===========================================
NATS_ENABLED=false
NATS_GLOBAL_ENABLED=false
NATS_PREFIX_KEY=
NATS_EXCHANGE_NAME=evolution_exchange
NATS_URI=
# ===========================================
# SQS
# ===========================================
SQS_ENABLED=false
SQS_GLOBAL_ENABLED=false
SQS_GLOBAL_FORCE_SINGLE_QUEUE=false
SQS_GLOBAL_PREFIX_NAME=global
SQS_ACCESS_KEY_ID=
SQS_SECRET_ACCESS_KEY=
SQS_ACCOUNT_ID=
SQS_REGION=
SQS_MAX_PAYLOAD_SIZE=1048576
# ===========================================
# PUSHER
# ===========================================
PUSHER_ENABLED=false
PUSHER_GLOBAL_ENABLED=false
PUSHER_GLOBAL_APP_ID=
PUSHER_GLOBAL_KEY=
PUSHER_GLOBAL_SECRET=
PUSHER_GLOBAL_CLUSTER=
PUSHER_GLOBAL_USE_TLS=false
# ===========================================
# WHATSAPP BUSINESS
# ===========================================
WA_BUSINESS_TOKEN_WEBHOOK=evolution
WA_BUSINESS_URL=https://graph.facebook.com
WA_BUSINESS_VERSION=v18.0
WA_BUSINESS_LANGUAGE=en
# ===========================================
# CONFIGURAÇÕES DE SESSÃO
# ===========================================
CONFIG_SESSION_PHONE_CLIENT=Evolution API
CONFIG_SESSION_PHONE_NAME=Chrome
# ===========================================
# QR CODE
# ===========================================
QRCODE_LIMIT=30
QRCODE_COLOR=#198754
# ===========================================
# INTEGRAÇÕES
# ===========================================
# Typebot
TYPEBOT_ENABLED=false
TYPEBOT_API_VERSION=old
TYPEBOT_SEND_MEDIA_BASE64=false
# Chatwoot
CHATWOOT_ENABLED=false
CHATWOOT_MESSAGE_DELETE=false
CHATWOOT_MESSAGE_READ=false
CHATWOOT_BOT_CONTACT=true
CHATWOOT_IMPORT_DATABASE_CONNECTION_URI=
CHATWOOT_IMPORT_PLACEHOLDER_MEDIA_MESSAGE=false
# OpenAI
OPENAI_ENABLED=false
OPENAI_API_KEY_GLOBAL=
# Dify
DIFY_ENABLED=false
# N8N
N8N_ENABLED=false
# EvoAI
EVOAI_ENABLED=false
# Flowise
FLOWISE_ENABLED=false
# ===========================================
# S3 / MINIO
# ===========================================
S3_ENABLED=false
S3_ACCESS_KEY=
S3_SECRET_KEY=
S3_ENDPOINT=
S3_BUCKET=
S3_PORT=9000
S3_USE_SSL=false
S3_REGION=
S3_SKIP_POLICY=false
S3_SAVE_VIDEO=false
# ===========================================
# MÉTRICAS
# ===========================================
PROMETHEUS_METRICS=false
METRICS_AUTH_REQUIRED=false
METRICS_USER=
METRICS_PASSWORD=
METRICS_ALLOWED_IPS=
# ===========================================
# TELEMETRIA
# ===========================================
TELEMETRY_ENABLED=true
TELEMETRY_URL=
# ===========================================
# PROXY
# ===========================================
PROXY_HOST=
PROXY_PORT=
PROXY_PROTOCOL=
PROXY_USERNAME=
PROXY_PASSWORD=
# ===========================================
# CONVERSOR DE ÁUDIO
# ===========================================
API_AUDIO_CONVERTER=
API_AUDIO_CONVERTER_KEY=
# ===========================================
# FACEBOOK
# ===========================================
FACEBOOK_APP_ID=
FACEBOOK_CONFIG_ID=
FACEBOOK_USER_TOKEN=
# ===========================================
# SENTRY
# ===========================================
SENTRY_DSN=
# ===========================================
# EVENT EMITTER
# ===========================================
EVENT_EMITTER_MAX_LISTENERS=50
# ===========================================
# PROVIDER
# ===========================================
PROVIDER_ENABLED=false
PROVIDER_HOST=
PROVIDER_PORT=5656
PROVIDER_PREFIX=evolution

1
evolution-manager-v2 Submodule

@ -0,0 +1 @@
Subproject commit fcb38dd407b89697b7a7154cfd873f76729e6ece

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

461
manager/dist/assets/index-DJ2Q5K8k.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,8 +5,8 @@
<link rel="icon" type="image/png" href="https://evolution-api.com/files/evo/favicon.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Evolution Manager</title>
<script type="module" crossorigin src="/assets/index-D-oOjDYe.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-CXH2BdD4.css">
<script type="module" crossorigin src="/assets/index-DJ2Q5K8k.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-DxAxQfZR.css">
</head>
<body>
<div id="root"></div>

14
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "evolution-api",
"version": "2.3.3",
"version": "2.3.4",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "evolution-api",
"version": "2.3.3",
"version": "2.3.4",
"license": "Apache-2.0",
"dependencies": {
"@adiwajshing/keyed-db": "^0.2.4",
@ -39,6 +39,7 @@
"json-schema": "^0.4.0",
"jsonschema": "^1.4.1",
"jsonwebtoken": "^9.0.2",
"kafkajs": "^2.2.4",
"link-preview-js": "^3.0.13",
"long": "^5.2.3",
"mediainfo.js": "^0.3.4",
@ -10718,6 +10719,15 @@
"safe-buffer": "^5.0.1"
}
},
"node_modules/kafkajs": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/kafkajs/-/kafkajs-2.2.4.tgz",
"integrity": "sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==",
"license": "MIT",
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/keyv": {
"version": "5.5.1",
"resolved": "https://registry.npmjs.org/keyv/-/keyv-5.5.1.tgz",

View File

@ -1,6 +1,6 @@
{
"name": "evolution-api",
"version": "2.3.3",
"version": "2.3.4",
"description": "Rest api for communication with WhatsApp",
"main": "./dist/main.js",
"type": "commonjs",
@ -95,6 +95,7 @@
"json-schema": "^0.4.0",
"jsonschema": "^1.4.1",
"jsonwebtoken": "^9.0.2",
"kafkajs": "^2.2.4",
"link-preview-js": "^3.0.13",
"long": "^5.2.3",
"mediainfo.js": "^0.3.4",

View File

@ -0,0 +1,231 @@
/*
Warnings:
- You are about to alter the column `createdAt` on the `Chat` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Chat` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Chatwoot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Chatwoot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Contact` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Contact` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Dify` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Dify` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `DifySetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `DifySetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Evoai` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Evoai` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `EvoaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `EvoaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `EvolutionBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `EvolutionBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `EvolutionBotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `EvolutionBotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Flowise` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Flowise` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `FlowiseSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `FlowiseSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `disconnectionAt` on the `Instance` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Instance` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Instance` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `IntegrationSession` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `IntegrationSession` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to drop the column `lid` on the `IsOnWhatsapp` table. All the data in the column will be lost.
- You are about to alter the column `createdAt` on the `IsOnWhatsapp` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `IsOnWhatsapp` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Label` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Label` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Media` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `N8n` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `N8n` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `N8nSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `N8nSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Nats` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Nats` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `OpenaiBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `OpenaiBot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `OpenaiCreds` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `OpenaiCreds` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `OpenaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `OpenaiSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Proxy` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Proxy` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Pusher` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Pusher` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Rabbitmq` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Rabbitmq` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Session` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Setting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Setting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Sqs` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Sqs` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Template` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Template` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to drop the column `splitMessages` on the `Typebot` table. All the data in the column will be lost.
- You are about to drop the column `timePerChar` on the `Typebot` table. All the data in the column will be lost.
- You are about to alter the column `createdAt` on the `Typebot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Typebot` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to drop the column `splitMessages` on the `TypebotSetting` table. All the data in the column will be lost.
- You are about to drop the column `timePerChar` on the `TypebotSetting` table. All the data in the column will be lost.
- You are about to alter the column `createdAt` on the `TypebotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `TypebotSetting` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Webhook` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Webhook` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `createdAt` on the `Websocket` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
- You are about to alter the column `updatedAt` on the `Websocket` table. The data in that column could be lost. The data in that column will be cast from `Timestamp(0)` to `Timestamp`.
*/
-- DropIndex
DROP INDEX `unique_remote_instance` ON `Chat`;
-- AlterTable
ALTER TABLE `Chat` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `Chatwoot` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Contact` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `Dify` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `DifySetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Evoai` MODIFY `triggerType` ENUM('all', 'keyword', 'none', 'advanced') NULL,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `EvoaiSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `EvolutionBot` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `EvolutionBotSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Flowise` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `FlowiseSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Instance` MODIFY `disconnectionAt` TIMESTAMP NULL,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `IntegrationSession` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `IsOnWhatsapp` DROP COLUMN `lid`,
MODIFY `createdAt` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Label` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Media` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP;
-- AlterTable
ALTER TABLE `N8n` MODIFY `triggerType` ENUM('all', 'keyword', 'none', 'advanced') NULL,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `N8nSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Nats` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `OpenaiBot` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `OpenaiCreds` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `OpenaiSetting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Proxy` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Pusher` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Rabbitmq` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Session` MODIFY `createdAt` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP;
-- AlterTable
ALTER TABLE `Setting` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Sqs` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Template` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Typebot` DROP COLUMN `splitMessages`,
DROP COLUMN `timePerChar`,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NULL;
-- AlterTable
ALTER TABLE `TypebotSetting` DROP COLUMN `splitMessages`,
DROP COLUMN `timePerChar`,
MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Webhook` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- AlterTable
ALTER TABLE `Websocket` MODIFY `createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
MODIFY `updatedAt` TIMESTAMP NOT NULL;
-- CreateTable
CREATE TABLE `Kafka` (
`id` VARCHAR(191) NOT NULL,
`enabled` BOOLEAN NOT NULL DEFAULT false,
`events` JSON NOT NULL,
`createdAt` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
`updatedAt` TIMESTAMP NOT NULL,
`instanceId` VARCHAR(191) NOT NULL,
UNIQUE INDEX `Kafka_instanceId_key`(`instanceId`),
PRIMARY KEY (`id`)
) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
-- AddForeignKey
ALTER TABLE `Kafka` ADD CONSTRAINT `Kafka_instanceId_fkey` FOREIGN KEY (`instanceId`) REFERENCES `Instance`(`id`) ON DELETE CASCADE ON UPDATE CASCADE;

View File

@ -88,6 +88,7 @@ model Instance {
Rabbitmq Rabbitmq?
Nats Nats?
Sqs Sqs?
Kafka Kafka?
Websocket Websocket?
Typebot Typebot[]
Session Session?
@ -105,8 +106,11 @@ model Instance {
EvolutionBotSetting EvolutionBotSetting?
Flowise Flowise[]
FlowiseSetting FlowiseSetting?
Pusher Pusher?
N8n N8n[]
N8nSetting N8nSetting?
Evoai Evoai[]
EvoaiSetting EvoaiSetting?
Pusher Pusher?
}
model Session {
@ -309,6 +313,16 @@ model Sqs {
instanceId String @unique
}
model Kafka {
id String @id @default(cuid())
enabled Boolean @default(false)
events Json @db.Json
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String @unique
}
model Websocket {
id String @id @default(cuid())
enabled Boolean @default(false)
@ -647,7 +661,7 @@ model IsOnWhatsapp {
model N8n {
id String @id @default(cuid())
enabled Boolean @default(true) @db.TinyInt(1)
enabled Boolean @default(true) @db.TinyInt()
description String? @db.VarChar(255)
webhookUrl String? @db.VarChar(255)
basicAuthUser String? @db.VarChar(255)
@ -666,7 +680,7 @@ model N8n {
triggerType TriggerType?
triggerOperator TriggerOperator?
triggerValue String?
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String
@ -686,7 +700,7 @@ model N8nSetting {
ignoreJids Json?
splitMessages Boolean? @default(false)
timePerChar Int? @default(50) @db.Int
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Fallback N8n? @relation(fields: [n8nIdFallback], references: [id])
n8nIdFallback String? @db.VarChar(100)
@ -696,7 +710,7 @@ model N8nSetting {
model Evoai {
id String @id @default(cuid())
enabled Boolean @default(true) @db.TinyInt(1)
enabled Boolean @default(true) @db.TinyInt()
description String? @db.VarChar(255)
agentUrl String? @db.VarChar(255)
apiKey String? @db.VarChar(255)
@ -714,7 +728,7 @@ model Evoai {
triggerType TriggerType?
triggerOperator TriggerOperator?
triggerValue String?
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String
@ -734,7 +748,7 @@ model EvoaiSetting {
ignoreJids Json?
splitMessages Boolean? @default(false)
timePerChar Int? @default(50) @db.Int
createdAt DateTime? @default(now()) @db.Timestamp
createdAt DateTime? @default(dbgenerated("CURRENT_TIMESTAMP")) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Fallback Evoai? @relation(fields: [evoaiIdFallback], references: [id])
evoaiIdFallback String? @db.VarChar(100)

View File

@ -0,0 +1,17 @@
-- CreateTable
CREATE TABLE "public"."Kafka" (
"id" TEXT NOT NULL,
"enabled" BOOLEAN NOT NULL DEFAULT false,
"events" JSONB NOT NULL,
"createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP NOT NULL,
"instanceId" TEXT NOT NULL,
CONSTRAINT "Kafka_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "Kafka_instanceId_key" ON "public"."Kafka"("instanceId");
-- AddForeignKey
ALTER TABLE "public"."Kafka" ADD CONSTRAINT "Kafka_instanceId_fkey" FOREIGN KEY ("instanceId") REFERENCES "public"."Instance"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@ -88,6 +88,7 @@ model Instance {
Rabbitmq Rabbitmq?
Nats Nats?
Sqs Sqs?
Kafka Kafka?
Websocket Websocket?
Typebot Typebot[]
Session Session?
@ -312,6 +313,16 @@ model Sqs {
instanceId String @unique
}
model Kafka {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean
events Json @db.JsonB
createdAt DateTime? @default(now()) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String @unique
}
model Websocket {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean

View File

@ -89,6 +89,7 @@ model Instance {
Rabbitmq Rabbitmq?
Nats Nats?
Sqs Sqs?
Kafka Kafka?
Websocket Websocket?
Typebot Typebot[]
Session Session?
@ -313,6 +314,16 @@ model Sqs {
instanceId String @unique
}
model Kafka {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean
events Json @db.JsonB
createdAt DateTime? @default(now()) @db.Timestamp
updatedAt DateTime @updatedAt @db.Timestamp
Instance Instance @relation(fields: [instanceId], references: [id], onDelete: Cascade)
instanceId String @unique
}
model Websocket {
id String @id @default(cuid())
enabled Boolean @default(false) @db.Boolean

View File

@ -323,8 +323,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'imageMessage',
@ -337,8 +337,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'videoMessage',
@ -351,8 +351,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'audioMessage',
@ -372,8 +372,8 @@ export class EvolutionStartupService extends ChannelStartupService {
messageRaw = {
key: { fromMe: true, id: messageId, remoteJid: number },
message: {
base64: isBase64(message.media) ? message.media : undefined,
mediaUrl: isURL(message.media) ? message.media : undefined,
base64: isBase64(message.media) ? message.media : null,
mediaUrl: isURL(message.media) ? message.media : null,
quoted,
},
messageType: 'documentMessage',
@ -449,7 +449,7 @@ export class EvolutionStartupService extends ChannelStartupService {
}
}
const base64 = messageRaw.message.base64;
const { base64 } = messageRaw.message;
delete messageRaw.message.base64;
if (base64 || file || audioFile) {

View File

@ -445,7 +445,7 @@ export class BaileysStartupService extends ChannelStartupService {
try {
const profilePic = await this.profilePicture(this.instance.wuid);
this.instance.profilePictureUrl = profilePic.profilePictureUrl;
} catch (error) {
} catch {
this.instance.profilePictureUrl = null;
}
const formattedWuid = this.instance.wuid.split('@')[0].padEnd(30, ' ');
@ -524,7 +524,7 @@ export class BaileysStartupService extends ChannelStartupService {
}
return webMessageInfo[0].message;
} catch (error) {
} catch {
return { conversation: '' };
}
}
@ -597,7 +597,7 @@ export class BaileysStartupService extends ChannelStartupService {
const rand = Math.floor(Math.random() * Math.floor(proxyUrls.length));
const proxyUrl = 'http://' + proxyUrls[rand];
options = { agent: makeProxyAgent(proxyUrl), fetchAgent: makeProxyAgent(proxyUrl) };
} catch (error) {
} catch {
this.localProxy.enabled = false;
}
} else {
@ -1189,7 +1189,7 @@ export class BaileysStartupService extends ChannelStartupService {
where: { id: existingChat.id },
data: { name: received.pushName },
});
} catch (error) {
} catch {
console.log(`Chat insert record ignored: ${received.key.remoteJid} - ${this.instanceId}`);
}
}
@ -1564,7 +1564,7 @@ export class BaileysStartupService extends ChannelStartupService {
if (this.configService.get<Database>('DATABASE').SAVE_DATA.CHATS) {
try {
await this.prismaRepository.chat.update({ where: { id: existingChat.id }, data: chatToInsert });
} catch (error) {
} catch {
console.log(`Chat insert record ignored: ${chatToInsert.remoteJid} - ${chatToInsert.instanceId}`);
}
}
@ -1832,7 +1832,7 @@ export class BaileysStartupService extends ChannelStartupService {
const profilePictureUrl = await this.client.profilePictureUrl(jid, 'image');
return { wuid: jid, profilePictureUrl };
} catch (error) {
} catch {
return { wuid: jid, profilePictureUrl: null };
}
}
@ -1842,7 +1842,7 @@ export class BaileysStartupService extends ChannelStartupService {
try {
return { wuid: jid, status: (await this.client.fetchStatus(jid))[0]?.status };
} catch (error) {
} catch {
return { wuid: jid, status: null };
}
}
@ -1891,7 +1891,7 @@ export class BaileysStartupService extends ChannelStartupService {
website: business?.website?.shift(),
};
}
} catch (error) {
} catch {
return { wuid: jid, name: null, picture: null, status: null, os: null, isBusiness: false };
}
}
@ -2131,7 +2131,7 @@ export class BaileysStartupService extends ChannelStartupService {
if (!cache.REDIS.ENABLED && !cache.LOCAL.ENABLED) group = await this.findGroup({ groupJid: sender }, 'inner');
else group = await this.getGroupMetadataCache(sender);
// group = await this.findGroup({ groupJid: sender }, 'inner');
} catch (error) {
} catch {
throw new NotFoundException('Group not found');
}
@ -3640,7 +3640,7 @@ export class BaileysStartupService extends ChannelStartupService {
{},
{ logger: P({ level: 'error' }) as any, reuploadRequest: this.client.updateMediaMessage },
);
} catch (err) {
} catch {
this.logger.error('Download Media failed, trying to retry in 5 seconds...');
await new Promise((resolve) => setTimeout(resolve, 5000));
const mediaType = Object.keys(msg.message).find((key) => key.endsWith('Message'));
@ -4230,7 +4230,7 @@ export class BaileysStartupService extends ChannelStartupService {
public async inviteInfo(id: GroupInvite) {
try {
return await this.client.groupGetInviteInfo(id.inviteCode);
} catch (error) {
} catch {
throw new NotFoundException('No invite info', id.inviteCode);
}
}
@ -4253,7 +4253,7 @@ export class BaileysStartupService extends ChannelStartupService {
}
return { send: true, inviteUrl };
} catch (error) {
} catch {
throw new NotFoundException('No send invite');
}
}
@ -4717,7 +4717,7 @@ export class BaileysStartupService extends ChannelStartupService {
collectionsLength: collections?.length,
collections: collections,
};
} catch (error) {
} catch {
return { wuid: jid, name: null, isBusiness: false };
}
}

View File

@ -49,7 +49,7 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
try {
JSON.parse(str);
return true;
} catch (e) {
} catch {
return false;
}
}
@ -180,6 +180,7 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
remoteJid: string,
message: string,
settings: SettingsType,
linkPreview: boolean = true,
): Promise<void> {
if (!message) return;
@ -202,7 +203,7 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
if (mediaType) {
// Send accumulated text before sending media
if (textBuffer.trim()) {
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages);
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages, linkPreview);
textBuffer = '';
}
@ -252,7 +253,56 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
// Send any remaining text
if (textBuffer.trim()) {
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages);
await this.sendFormattedText(instance, remoteJid, textBuffer.trim(), settings, splitMessages, linkPreview);
}
}
/**
* Split message by double line breaks and return array of message parts
*/
private splitMessageByDoubleLineBreaks(message: string): string[] {
return message.split('\n\n').filter((part) => part.trim().length > 0);
}
/**
* Send a single message with proper typing indicators and delays
*/
private async sendSingleMessage(
instance: any,
remoteJid: string,
message: string,
settings: any,
linkPreview: boolean = true,
): Promise<void> {
const timePerChar = settings?.timePerChar ?? 0;
const minDelay = 1000;
const maxDelay = 20000;
const delay = Math.min(Math.max(message.length * timePerChar, minDelay), maxDelay);
this.logger.debug(`[BaseChatbot] Sending single message with linkPreview: ${linkPreview}`);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.presenceSubscribe(remoteJid);
await instance.client.sendPresenceUpdate('composing', remoteJid);
}
await new Promise<void>((resolve) => {
setTimeout(async () => {
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: message,
linkPreview,
},
false,
);
resolve();
}, delay);
});
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
}
@ -265,67 +315,24 @@ export abstract class BaseChatbotService<BotType = any, SettingsType = any> {
text: string,
settings: any,
splitMessages: boolean,
linkPreview: boolean = true,
): Promise<void> {
const timePerChar = settings?.timePerChar ?? 0;
const minDelay = 1000;
const maxDelay = 20000;
if (splitMessages) {
const multipleMessages = text.split('\n\n');
for (let index = 0; index < multipleMessages.length; index++) {
const message = multipleMessages[index];
if (!message.trim()) continue;
const messageParts = this.splitMessageByDoubleLineBreaks(text);
const delay = Math.min(Math.max(message.length * timePerChar, minDelay), maxDelay);
this.logger.debug(`[BaseChatbot] Splitting message into ${messageParts.length} parts`);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.presenceSubscribe(remoteJid);
await instance.client.sendPresenceUpdate('composing', remoteJid);
}
for (let index = 0; index < messageParts.length; index++) {
const message = messageParts[index];
await new Promise<void>((resolve) => {
setTimeout(async () => {
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: message,
},
false,
);
resolve();
}, delay);
});
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
this.logger.debug(`[BaseChatbot] Sending message part ${index + 1}/${messageParts.length}`);
await this.sendSingleMessage(instance, remoteJid, message, settings, linkPreview);
}
this.logger.debug(`[BaseChatbot] All message parts sent successfully`);
} else {
const delay = Math.min(Math.max(text.length * timePerChar, minDelay), maxDelay);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.presenceSubscribe(remoteJid);
await instance.client.sendPresenceUpdate('composing', remoteJid);
}
await new Promise<void>((resolve) => {
setTimeout(async () => {
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: text,
},
false,
);
resolve();
}, delay);
});
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
this.logger.debug(`[BaseChatbot] Sending single message`);
await this.sendSingleMessage(instance, remoteJid, text, settings, linkPreview);
}
}

View File

@ -91,19 +91,19 @@ export class ChatbotController {
pushName,
isIntegration,
};
await evolutionBotController.emit(emitData);
evolutionBotController.emit(emitData);
await typebotController.emit(emitData);
typebotController.emit(emitData);
await openaiController.emit(emitData);
openaiController.emit(emitData);
await difyController.emit(emitData);
difyController.emit(emitData);
await n8nController.emit(emitData);
n8nController.emit(emitData);
await evoaiController.emit(emitData);
evoaiController.emit(emitData);
await flowiseController.emit(emitData);
flowiseController.emit(emitData);
}
public processDebounce(

View File

@ -130,7 +130,7 @@ export class ChatwootService {
public async find(instance: InstanceDto): Promise<ChatwootDto> {
try {
return await this.waMonitor.waInstances[instance.instanceName].findChatwoot();
} catch (error) {
} catch {
this.logger.error('chatwoot not found');
return { enabled: null, url: '' };
}
@ -370,7 +370,7 @@ export class ChatwootService {
});
return contact;
} catch (error) {
} catch {
return null;
}
}
@ -407,7 +407,7 @@ export class ChatwootService {
}
return true;
} catch (error) {
} catch {
return false;
}
}
@ -2552,7 +2552,7 @@ export class ChatwootService {
await chatwootImport.importHistoryMessages(instance, this, inbox, this.provider);
const waInstance = this.waMonitor.waInstances[instance.instanceName];
waInstance.clearCacheChatwoot();
} catch (error) {
} catch {
return;
}
}

View File

@ -4,6 +4,7 @@ import { Integration } from '@api/types/wa.types';
import { ConfigService, HttpServer } from '@config/env.config';
import { Dify, DifySetting, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { isURL } from 'class-validator';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
@ -78,15 +79,35 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: contentSplit[1].split('?')[0],
},
];
payload.query = contentSplit[2] || content;
const media = content.split('|');
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: mediaBase64,
},
];
}
} else {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: media[1].split('?')[0],
},
];
}
payload.query = media[2] || content;
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@ -107,7 +128,7 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
const conversationId = response?.data?.conversation_id;
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
await this.prismaRepository.integrationSession.update({
@ -140,15 +161,35 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: contentSplit[1].split('?')[0],
},
];
payload.inputs.query = contentSplit[2] || content;
const media = content.split('|');
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: mediaBase64,
},
];
}
} else {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: media[1].split('?')[0],
},
];
payload.inputs.query = media[2] || content;
}
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@ -169,7 +210,7 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
const conversationId = response?.data?.conversation_id;
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
await this.prismaRepository.integrationSession.update({
@ -202,15 +243,26 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: contentSplit[1].split('?')[0],
},
];
payload.query = contentSplit[2] || content;
const media = content.split('|');
if (msg.message.mediaUrl || msg.message.base64) {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: msg.message.mediaUrl || msg.message.base64,
},
];
} else {
payload.files = [
{
type: 'image',
transfer_method: 'remote_url',
url: media[1].split('?')[0],
},
];
payload.query = media[2] || content;
}
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@ -246,7 +298,7 @@ export class DifyService extends BaseChatbotService<Dify, DifySetting> {
await instance.client.sendPresenceUpdate('paused', remoteJid);
if (answer) {
await this.sendMessageWhatsApp(instance, remoteJid, answer, settings);
await this.sendMessageWhatsApp(instance, remoteJid, answer, settings, true);
}
await this.prismaRepository.integrationSession.update({

View File

@ -5,6 +5,7 @@ import { ConfigService, HttpServer } from '@config/env.config';
import { Evoai, EvoaiSetting, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { downloadMediaMessage } from 'baileys';
import { isURL } from 'class-validator';
import { v4 as uuidv4 } from 'uuid';
import { BaseChatbotService } from '../../base-chatbot.service';
@ -82,23 +83,43 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
// Handle image message if present
if (this.isImageMessage(content) && msg) {
const contentSplit = content.split('|');
parts[0].text = contentSplit[2] || content;
const media = content.split('|');
parts[0].text = media[2] || content;
try {
// Download the image
const mediaBuffer = await downloadMediaMessage(msg, 'buffer', {});
const fileContent = Buffer.from(mediaBuffer).toString('base64');
const fileName = contentSplit[2] || `${msg.key?.id || 'image'}.jpg`;
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
parts.push({
type: 'file',
file: {
name: fileName,
mimeType: 'image/jpeg',
bytes: fileContent,
},
} as any);
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
parts.push({
type: 'file',
file: {
name: msg.key.id + '.jpeg',
mimeType: 'image/jpeg',
bytes: mediaBase64,
},
} as any);
}
} else {
// Download the image
const mediaBuffer = await downloadMediaMessage(msg, 'buffer', {});
const fileContent = Buffer.from(mediaBuffer).toString('base64');
const fileName = media[2] || `${msg.key?.id || 'image'}.jpg`;
parts.push({
type: 'file',
file: {
name: fileName,
mimeType: 'image/jpeg',
bytes: fileContent,
},
} as any);
}
} catch (fileErr) {
this.logger.error(`[EvoAI] Failed to process image: ${fileErr}`);
}
@ -174,7 +195,7 @@ export class EvoaiService extends BaseChatbotService<Evoai, EvoaiSetting> {
this.logger.debug(`[EvoAI] Extracted message to send: ${message}`);
if (message) {
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
} catch (error) {
this.logger.error(

View File

@ -6,6 +6,7 @@ import { ConfigService, HttpServer } from '@config/env.config';
import { EvolutionBot, EvolutionBotSetting, IntegrationSession } from '@prisma/client';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { isURL } from 'class-validator';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
@ -71,16 +72,26 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
}
}
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
if (this.isImageMessage(content) && msg) {
const media = content.split('|');
payload.files = [
{
type: 'image',
url: contentSplit[1].split('?')[0],
},
];
payload.query = contentSplit[2] || content;
if (msg.message.mediaUrl || msg.message.base64) {
payload.files = [
{
type: 'image',
url: msg.message.base64 || msg.message.mediaUrl,
},
];
} else {
payload.files = [
{
type: 'image',
url: media[1].split('?')[0],
},
];
}
payload.query = media[2] || content;
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@ -115,15 +126,10 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
},
};
this.logger.debug(`[EvolutionBot] Sending request to endpoint: ${endpoint}`);
this.logger.debug(`[EvolutionBot] Request payload: ${JSON.stringify(sanitizedPayload, null, 2)}`);
const response = await axios.post(endpoint, payload, {
headers,
});
this.logger.debug(`[EvolutionBot] Response received - Status: ${response.status}`);
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
await instance.client.sendPresenceUpdate('paused', remoteJid);
}
@ -134,10 +140,6 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
// Validate linkPreview is boolean and default to true for backward compatibility
const linkPreview = typeof rawLinkPreview === 'boolean' ? rawLinkPreview : true;
this.logger.debug(
`[EvolutionBot] Processing response - Message length: ${message?.length || 0}, LinkPreview: ${linkPreview}`,
);
if (message && typeof message === 'string' && message.startsWith("'") && message.endsWith("'")) {
const innerContent = message.slice(1, -1);
if (!innerContent.includes("'")) {
@ -146,17 +148,8 @@ export class EvolutionBotService extends BaseChatbotService<EvolutionBot, Evolut
}
if (message) {
// Send message directly with validated linkPreview option
await instance.textMessage(
{
number: remoteJid.split('@')[0],
delay: settings?.delayMessage || 1000,
text: message,
linkPreview, // Always boolean, defaults to true
},
false,
);
this.logger.debug(`[EvolutionBot] Message sent successfully with linkPreview: ${linkPreview}`);
// Use the base class method that handles splitMessages functionality
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, linkPreview);
} else {
this.logger.warn(`[EvolutionBot] No message content received from bot response`);
}

View File

@ -5,6 +5,7 @@ import { Integration } from '@api/types/wa.types';
import { ConfigService, HttpServer } from '@config/env.config';
import { Flowise as FlowiseModel, IntegrationSession } from '@prisma/client';
import axios from 'axios';
import { isURL } from 'class-validator';
import { BaseChatbotService } from '../../base-chatbot.service';
import { OpenaiService } from '../../openai/services/openai.service';
@ -82,17 +83,28 @@ export class FlowiseService extends BaseChatbotService<FlowiseModel> {
}
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
const media = content.split('|');
payload.uploads = [
{
data: contentSplit[1].split('?')[0],
type: 'url',
name: 'Flowise.png',
mime: 'image/png',
},
];
payload.question = contentSplit[2] || content;
if (msg.message.mediaUrl || msg.message.base64) {
payload.uploads = [
{
data: msg.message.base64 || msg.message.mediaUrl,
type: 'url',
name: 'Flowise.png',
mime: 'image/png',
},
];
} else {
payload.uploads = [
{
data: media[1].split('?')[0],
type: 'url',
name: 'Flowise.png',
mime: 'image/png',
},
];
payload.question = media[2] || content;
}
}
if (instance.integration === Integration.WHATSAPP_BAILEYS) {
@ -130,7 +142,7 @@ export class FlowiseService extends BaseChatbotService<FlowiseModel> {
if (message) {
// Use the base class method to send the message to WhatsApp
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
}
}

View File

@ -78,7 +78,7 @@ export class N8nService extends BaseChatbotService<N8n, N8nSetting> {
const message = response?.data?.output || response?.data?.answer;
// Use base class method instead of custom implementation
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
await this.prismaRepository.integrationSession.update({
where: {

View File

@ -6,6 +6,7 @@ import { IntegrationSession, OpenaiBot, OpenaiSetting } from '@prisma/client';
import { sendTelemetry } from '@utils/sendTelemetry';
import axios from 'axios';
import { downloadMediaMessage } from 'baileys';
import { isURL } from 'class-validator';
import FormData from 'form-data';
import OpenAI from 'openai';
import P from 'pino';
@ -85,6 +86,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
remoteJid,
"Sorry, I couldn't transcribe your audio message. Could you please type your message instead?",
settings,
true,
);
return;
}
@ -173,7 +175,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
}
// Process with the appropriate API based on bot type
await this.sendMessageToBot(instance, session, settings, openaiBot, remoteJid, pushName || '', content);
await this.sendMessageToBot(instance, session, settings, openaiBot, remoteJid, pushName || '', content, msg);
} catch (error) {
this.logger.error(`Error in process: ${error.message || JSON.stringify(error)}`);
return;
@ -191,6 +193,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
remoteJid: string,
pushName: string,
content: string,
msg?: any,
): Promise<void> {
this.logger.log(`Sending message to bot for remoteJid: ${remoteJid}, bot type: ${openaiBot.botType}`);
@ -222,10 +225,11 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
pushName,
false, // Not fromMe
content,
msg,
);
} else {
this.logger.log('Processing with ChatCompletion API');
message = await this.processChatCompletionMessage(instance, openaiBot, remoteJid, content);
message = await this.processChatCompletionMessage(instance, openaiBot, remoteJid, content, msg);
}
this.logger.log(`Got response from OpenAI: ${message?.substring(0, 50)}${message?.length > 50 ? '...' : ''}`);
@ -233,7 +237,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
// Send the response
if (message) {
this.logger.log('Sending message to WhatsApp');
await this.sendMessageWhatsApp(instance, remoteJid, message, settings);
await this.sendMessageWhatsApp(instance, remoteJid, message, settings, true);
} else {
this.logger.error('No message to send to WhatsApp');
}
@ -268,6 +272,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
pushName: string,
fromMe: boolean,
content: string,
msg?: any,
): Promise<string> {
const messageData: any = {
role: fromMe ? 'assistant' : 'user',
@ -276,18 +281,35 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
// Handle image messages
if (this.isImageMessage(content)) {
const contentSplit = content.split('|');
const url = contentSplit[1].split('?')[0];
const media = content.split('|');
messageData.content = [
{ type: 'text', text: contentSplit[2] || content },
{
type: 'image_url',
image_url: {
url: url,
if (msg.message.mediaUrl || msg.message.base64) {
let mediaBase64 = msg.message.base64 || null;
if (msg.message.mediaUrl && isURL(msg.message.mediaUrl)) {
const result = await axios.get(msg.message.mediaUrl, { responseType: 'arraybuffer' });
mediaBase64 = Buffer.from(result.data).toString('base64');
}
if (mediaBase64) {
messageData.content = [
{ type: 'text', text: media[2] || content },
{ type: 'image_url', image_url: { url: mediaBase64 } },
];
}
} else {
const url = media[1].split('?')[0];
messageData.content = [
{ type: 'text', text: media[2] || content },
{
type: 'image_url',
image_url: {
url: url,
},
},
},
];
];
}
}
// Get thread ID from session or create new thread
@ -376,6 +398,7 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
openaiBot: OpenaiBot,
remoteJid: string,
content: string,
msg?: any,
): Promise<string> {
this.logger.log('Starting processChatCompletionMessage');
@ -468,18 +491,26 @@ export class OpenaiService extends BaseChatbotService<OpenaiBot, OpenaiSetting>
// Handle image messages
if (this.isImageMessage(content)) {
this.logger.log('Found image message');
const contentSplit = content.split('|');
const url = contentSplit[1].split('?')[0];
const media = content.split('|');
messageData.content = [
{ type: 'text', text: contentSplit[2] || content },
{
type: 'image_url',
image_url: {
url: url,
if (msg.message.mediaUrl || msg.message.base64) {
messageData.content = [
{ type: 'text', text: media[2] || content },
{ type: 'image_url', image_url: { url: msg.message.base64 || msg.message.mediaUrl } },
];
} else {
const url = media[1].split('?')[0];
messageData.content = [
{ type: 'text', text: media[2] || content },
{
type: 'image_url',
image_url: {
url: url,
},
},
},
];
];
}
}
// Combine all messages: system messages, pre-defined messages, conversation history, and current message

View File

@ -318,7 +318,7 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
} else if (formattedText.includes('[buttons]')) {
await this.processButtonMessage(instance, formattedText, session.remoteJid);
} else {
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings);
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings, true);
}
sendTelemetry('/message/sendText');
@ -393,7 +393,7 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
} else if (formattedText.includes('[buttons]')) {
await this.processButtonMessage(instance, formattedText, session.remoteJid);
} else {
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings);
await this.sendMessageWhatsApp(instance, session.remoteJid, formattedText, settings, true);
}
sendTelemetry('/message/sendText');
@ -642,15 +642,21 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
if (!content) {
if (unknownMessage) {
await this.sendMessageWhatsApp(waInstance, remoteJid, unknownMessage, {
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
await this.sendMessageWhatsApp(
waInstance,
remoteJid,
unknownMessage,
});
{
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
unknownMessage,
},
true,
);
sendTelemetry('/message/sendText');
}
return;
@ -801,15 +807,21 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
if (!data?.messages || data.messages.length === 0) {
if (!content) {
if (unknownMessage) {
await this.sendMessageWhatsApp(waInstance, remoteJid, unknownMessage, {
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
await this.sendMessageWhatsApp(
waInstance,
remoteJid,
unknownMessage,
});
{
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
unknownMessage,
},
true,
);
sendTelemetry('/message/sendText');
}
return;
@ -903,15 +915,21 @@ export class TypebotService extends BaseChatbotService<TypebotModel, any> {
if (!content) {
if (unknownMessage) {
await this.sendMessageWhatsApp(waInstance, remoteJid, unknownMessage, {
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
await this.sendMessageWhatsApp(
waInstance,
remoteJid,
unknownMessage,
});
{
delayMessage,
expire,
keywordFinish,
listeningFromMe,
stopBotFromMe,
keepOpen,
unknownMessage,
},
true,
);
sendTelemetry('/message/sendText');
}
return;

View File

@ -40,6 +40,11 @@ export class EventDto {
useTLS?: boolean;
events?: string[];
};
kafka?: {
enabled?: boolean;
events?: string[];
};
}
export function EventInstanceMixin<TBase extends Constructor>(Base: TBase) {
@ -82,5 +87,10 @@ export function EventInstanceMixin<TBase extends Constructor>(Base: TBase) {
useTLS?: boolean;
events?: string[];
};
kafka?: {
enabled?: boolean;
events?: string[];
};
};
}

View File

@ -1,3 +1,4 @@
import { KafkaController } from '@api/integrations/event/kafka/kafka.controller';
import { NatsController } from '@api/integrations/event/nats/nats.controller';
import { PusherController } from '@api/integrations/event/pusher/pusher.controller';
import { RabbitmqController } from '@api/integrations/event/rabbitmq/rabbitmq.controller';
@ -17,6 +18,7 @@ export class EventManager {
private natsController: NatsController;
private sqsController: SqsController;
private pusherController: PusherController;
private kafkaController: KafkaController;
constructor(prismaRepository: PrismaRepository, waMonitor: WAMonitoringService) {
this.prisma = prismaRepository;
@ -28,6 +30,7 @@ export class EventManager {
this.nats = new NatsController(prismaRepository, waMonitor);
this.sqs = new SqsController(prismaRepository, waMonitor);
this.pusher = new PusherController(prismaRepository, waMonitor);
this.kafka = new KafkaController(prismaRepository, waMonitor);
}
public set prisma(prisma: PrismaRepository) {
@ -93,12 +96,20 @@ export class EventManager {
return this.pusherController;
}
public set kafka(kafka: KafkaController) {
this.kafkaController = kafka;
}
public get kafka() {
return this.kafkaController;
}
public init(httpServer: Server): void {
this.websocket.init(httpServer);
this.rabbitmq.init();
this.nats.init();
this.sqs.init();
this.pusher.init();
this.kafka.init();
}
public async emit(eventData: {
@ -119,42 +130,47 @@ export class EventManager {
await this.sqs.emit(eventData);
await this.webhook.emit(eventData);
await this.pusher.emit(eventData);
await this.kafka.emit(eventData);
}
public async setInstance(instanceName: string, data: any): Promise<any> {
if (data.websocket)
if (data.websocket) {
await this.websocket.set(instanceName, {
websocket: {
enabled: true,
events: data.websocket?.events,
},
});
}
if (data.rabbitmq)
if (data.rabbitmq) {
await this.rabbitmq.set(instanceName, {
rabbitmq: {
enabled: true,
events: data.rabbitmq?.events,
},
});
}
if (data.nats)
if (data.nats) {
await this.nats.set(instanceName, {
nats: {
enabled: true,
events: data.nats?.events,
},
});
}
if (data.sqs)
if (data.sqs) {
await this.sqs.set(instanceName, {
sqs: {
enabled: true,
events: data.sqs?.events,
},
});
}
if (data.webhook)
if (data.webhook) {
await this.webhook.set(instanceName, {
webhook: {
enabled: true,
@ -165,8 +181,9 @@ export class EventManager {
byEvents: data.webhook?.byEvents,
},
});
}
if (data.pusher)
if (data.pusher) {
await this.pusher.set(instanceName, {
pusher: {
enabled: true,
@ -178,5 +195,15 @@ export class EventManager {
useTLS: data.pusher?.useTLS,
},
});
}
if (data.kafka) {
await this.kafka.set(instanceName, {
kafka: {
enabled: true,
events: data.kafka?.events,
},
});
}
}
}

View File

@ -1,3 +1,4 @@
import { KafkaRouter } from '@api/integrations/event/kafka/kafka.router';
import { NatsRouter } from '@api/integrations/event/nats/nats.router';
import { PusherRouter } from '@api/integrations/event/pusher/pusher.router';
import { RabbitmqRouter } from '@api/integrations/event/rabbitmq/rabbitmq.router';
@ -18,5 +19,6 @@ export class EventRouter {
this.router.use('/nats', new NatsRouter(...guards).router);
this.router.use('/pusher', new PusherRouter(...guards).router);
this.router.use('/sqs', new SqsRouter(...guards).router);
this.router.use('/kafka', new KafkaRouter(...guards).router);
}
}

View File

@ -22,6 +22,9 @@ export const eventSchema: JSONSchema7 = {
sqs: {
$ref: '#/$defs/event',
},
kafka: {
$ref: '#/$defs/event',
},
},
$defs: {
event: {

View File

@ -0,0 +1,414 @@
import { PrismaRepository } from '@api/repository/repository.service';
import { WAMonitoringService } from '@api/services/monitor.service';
import { configService, Kafka, Log } from '@config/env.config';
import { Logger } from '@config/logger.config';
import { Consumer, ConsumerConfig, Kafka as KafkaJS, KafkaConfig, Producer, ProducerConfig } from 'kafkajs';
import { EmitData, EventController, EventControllerInterface } from '../event.controller';
export class KafkaController extends EventController implements EventControllerInterface {
private kafkaClient: KafkaJS | null = null;
private producer: Producer | null = null;
private consumer: Consumer | null = null;
private readonly logger = new Logger('KafkaController');
private reconnectAttempts = 0;
private maxReconnectAttempts = 10;
private reconnectDelay = 5000; // 5 seconds
private isReconnecting = false;
constructor(prismaRepository: PrismaRepository, waMonitor: WAMonitoringService) {
super(prismaRepository, waMonitor, configService.get<Kafka>('KAFKA')?.ENABLED, 'kafka');
}
public async init(): Promise<void> {
if (!this.status) {
return;
}
await this.connect();
}
private async connect(): Promise<void> {
try {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const clientConfig: KafkaConfig = {
clientId: kafkaConfig.CLIENT_ID || 'evolution-api',
brokers: kafkaConfig.BROKERS || ['localhost:9092'],
connectionTimeout: kafkaConfig.CONNECTION_TIMEOUT || 3000,
requestTimeout: kafkaConfig.REQUEST_TIMEOUT || 30000,
retry: {
initialRetryTime: 100,
retries: 8,
},
};
// Add SASL authentication if configured
if (kafkaConfig.SASL?.ENABLED) {
clientConfig.sasl = {
mechanism: (kafkaConfig.SASL.MECHANISM as any) || 'plain',
username: kafkaConfig.SASL.USERNAME,
password: kafkaConfig.SASL.PASSWORD,
};
}
// Add SSL configuration if enabled
if (kafkaConfig.SSL?.ENABLED) {
clientConfig.ssl = {
rejectUnauthorized: kafkaConfig.SSL.REJECT_UNAUTHORIZED !== false,
ca: kafkaConfig.SSL.CA ? [kafkaConfig.SSL.CA] : undefined,
key: kafkaConfig.SSL.KEY,
cert: kafkaConfig.SSL.CERT,
};
}
this.kafkaClient = new KafkaJS(clientConfig);
// Initialize producer
const producerConfig: ProducerConfig = {
maxInFlightRequests: 1,
idempotent: true,
transactionTimeout: 30000,
};
this.producer = this.kafkaClient.producer(producerConfig);
await this.producer.connect();
// Initialize consumer for global events if enabled
if (kafkaConfig.GLOBAL_ENABLED) {
await this.initGlobalConsumer();
}
this.reconnectAttempts = 0;
this.isReconnecting = false;
this.logger.info('Kafka initialized successfully');
// Create topics if they don't exist
if (kafkaConfig.AUTO_CREATE_TOPICS) {
await this.createTopics();
}
} catch (error) {
this.logger.error({
local: 'KafkaController.connect',
message: 'Failed to connect to Kafka',
error: error.message || error,
});
this.scheduleReconnect();
throw error;
}
}
private async initGlobalConsumer(): Promise<void> {
try {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const consumerConfig: ConsumerConfig = {
groupId: kafkaConfig.CONSUMER_GROUP_ID || 'evolution-api-consumers',
sessionTimeout: 30000,
heartbeatInterval: 3000,
};
this.consumer = this.kafkaClient.consumer(consumerConfig);
await this.consumer.connect();
// Subscribe to global topics
const events = kafkaConfig.EVENTS;
if (events) {
const eventKeys = Object.keys(events).filter((event) => events[event]);
for (const event of eventKeys) {
const topicName = this.getTopicName(event, true);
await this.consumer.subscribe({ topic: topicName });
}
// Start consuming messages
await this.consumer.run({
eachMessage: async ({ topic, message }) => {
try {
const data = JSON.parse(message.value?.toString() || '{}');
this.logger.debug(`Received message from topic ${topic}: ${JSON.stringify(data)}`);
// Process the message here if needed
// This is where you can add custom message processing logic
} catch (error) {
this.logger.error(`Error processing message from topic ${topic}: ${error}`);
}
},
});
this.logger.info('Global Kafka consumer initialized');
}
} catch (error) {
this.logger.error(`Failed to initialize global Kafka consumer: ${error}`);
}
}
private async createTopics(): Promise<void> {
try {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const admin = this.kafkaClient.admin();
await admin.connect();
const topics = [];
// Create global topics if enabled
if (kafkaConfig.GLOBAL_ENABLED && kafkaConfig.EVENTS) {
const eventKeys = Object.keys(kafkaConfig.EVENTS).filter((event) => kafkaConfig.EVENTS[event]);
for (const event of eventKeys) {
const topicName = this.getTopicName(event, true);
topics.push({
topic: topicName,
numPartitions: kafkaConfig.NUM_PARTITIONS || 1,
replicationFactor: kafkaConfig.REPLICATION_FACTOR || 1,
});
}
}
if (topics.length > 0) {
await admin.createTopics({
topics,
waitForLeaders: true,
});
this.logger.info(`Created ${topics.length} Kafka topics`);
}
await admin.disconnect();
} catch (error) {
this.logger.error(`Failed to create Kafka topics: ${error}`);
}
}
private getTopicName(event: string, isGlobal: boolean = false, instanceName?: string): string {
const kafkaConfig = configService.get<Kafka>('KAFKA');
const prefix = kafkaConfig.TOPIC_PREFIX || 'evolution';
if (isGlobal) {
return `${prefix}.global.${event.toLowerCase().replace(/_/g, '.')}`;
} else {
return `${prefix}.${instanceName}.${event.toLowerCase().replace(/_/g, '.')}`;
}
}
private handleConnectionLoss(): void {
if (this.isReconnecting) {
return;
}
this.cleanup();
this.scheduleReconnect();
}
private scheduleReconnect(): void {
if (this.reconnectAttempts >= this.maxReconnectAttempts) {
this.logger.error(
`Maximum reconnect attempts (${this.maxReconnectAttempts}) reached. Stopping reconnection attempts.`,
);
return;
}
if (this.isReconnecting) {
return;
}
this.isReconnecting = true;
this.reconnectAttempts++;
const delay = this.reconnectDelay * Math.pow(2, Math.min(this.reconnectAttempts - 1, 5));
this.logger.info(
`Scheduling Kafka reconnection attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`,
);
setTimeout(async () => {
try {
this.logger.info(
`Attempting to reconnect to Kafka (attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts})`,
);
await this.connect();
this.logger.info('Successfully reconnected to Kafka');
} catch (error) {
this.logger.error({
local: 'KafkaController.scheduleReconnect',
message: `Reconnection attempt ${this.reconnectAttempts} failed`,
error: error.message || error,
});
this.isReconnecting = false;
this.scheduleReconnect();
}
}, delay);
}
private async ensureConnection(): Promise<boolean> {
if (!this.producer) {
this.logger.warn('Kafka producer is not available, attempting to reconnect...');
if (!this.isReconnecting) {
this.scheduleReconnect();
}
return false;
}
return true;
}
public async emit({
instanceName,
origin,
event,
data,
serverUrl,
dateTime,
sender,
apiKey,
integration,
}: EmitData): Promise<void> {
if (integration && !integration.includes('kafka')) {
return;
}
if (!this.status) {
return;
}
if (!(await this.ensureConnection())) {
this.logger.warn(`Failed to emit event ${event} for instance ${instanceName}: No Kafka connection`);
return;
}
const instanceKafka = await this.get(instanceName);
const kafkaLocal = instanceKafka?.events;
const kafkaGlobal = configService.get<Kafka>('KAFKA').GLOBAL_ENABLED;
const kafkaEvents = configService.get<Kafka>('KAFKA').EVENTS;
const we = event.replace(/[.-]/gm, '_').toUpperCase();
const logEnabled = configService.get<Log>('LOG').LEVEL.includes('WEBHOOKS');
const message = {
event,
instance: instanceName,
data,
server_url: serverUrl,
date_time: dateTime,
sender,
apikey: apiKey,
timestamp: Date.now(),
};
const messageValue = JSON.stringify(message);
// Instance-specific events
if (instanceKafka?.enabled && this.producer && Array.isArray(kafkaLocal) && kafkaLocal.includes(we)) {
const topicName = this.getTopicName(event, false, instanceName);
let retry = 0;
while (retry < 3) {
try {
await this.producer.send({
topic: topicName,
messages: [
{
key: instanceName,
value: messageValue,
headers: {
event,
instance: instanceName,
origin,
timestamp: dateTime,
},
},
],
});
if (logEnabled) {
const logData = {
local: `${origin}.sendData-Kafka`,
...message,
};
this.logger.log(logData);
}
break;
} catch (error) {
this.logger.error({
local: 'KafkaController.emit',
message: `Error publishing local Kafka message (attempt ${retry + 1}/3)`,
error: error.message || error,
});
retry++;
if (retry >= 3) {
this.handleConnectionLoss();
}
}
}
}
// Global events
if (kafkaGlobal && kafkaEvents[we] && this.producer) {
const topicName = this.getTopicName(event, true);
let retry = 0;
while (retry < 3) {
try {
await this.producer.send({
topic: topicName,
messages: [
{
key: `${instanceName}-${event}`,
value: messageValue,
headers: {
event,
instance: instanceName,
origin,
timestamp: dateTime,
},
},
],
});
if (logEnabled) {
const logData = {
local: `${origin}.sendData-Kafka-Global`,
...message,
};
this.logger.log(logData);
}
break;
} catch (error) {
this.logger.error({
local: 'KafkaController.emit',
message: `Error publishing global Kafka message (attempt ${retry + 1}/3)`,
error: error.message || error,
});
retry++;
if (retry >= 3) {
this.handleConnectionLoss();
}
}
}
}
}
public async cleanup(): Promise<void> {
try {
if (this.consumer) {
await this.consumer.disconnect();
this.consumer = null;
}
if (this.producer) {
await this.producer.disconnect();
this.producer = null;
}
this.kafkaClient = null;
} catch (error) {
this.logger.warn({
local: 'KafkaController.cleanup',
message: 'Error during cleanup',
error: error.message || error,
});
this.producer = null;
this.consumer = null;
this.kafkaClient = null;
}
}
}

View File

@ -0,0 +1,36 @@
import { RouterBroker } from '@api/abstract/abstract.router';
import { InstanceDto } from '@api/dto/instance.dto';
import { EventDto } from '@api/integrations/event/event.dto';
import { HttpStatus } from '@api/routes/index.router';
import { eventManager } from '@api/server.module';
import { eventSchema, instanceSchema } from '@validate/validate.schema';
import { RequestHandler, Router } from 'express';
export class KafkaRouter extends RouterBroker {
constructor(...guards: RequestHandler[]) {
super();
this.router
.post(this.routerPath('set'), ...guards, async (req, res) => {
const response = await this.dataValidate<EventDto>({
request: req,
schema: eventSchema,
ClassRef: EventDto,
execute: (instance, data) => eventManager.kafka.set(instance.instanceName, data),
});
res.status(HttpStatus.CREATED).json(response);
})
.get(this.routerPath('find'), ...guards, async (req, res) => {
const response = await this.dataValidate<InstanceDto>({
request: req,
schema: instanceSchema,
ClassRef: InstanceDto,
execute: (instance) => eventManager.kafka.get(instance.instanceName),
});
res.status(HttpStatus.OK).json(response);
});
}
public readonly router: Router = Router();
}

View File

@ -0,0 +1,21 @@
import { JSONSchema7 } from 'json-schema';
import { v4 } from 'uuid';
import { EventController } from '../event.controller';
export const kafkaSchema: JSONSchema7 = {
$id: v4(),
type: 'object',
properties: {
enabled: { type: 'boolean', enum: [true, false] },
events: {
type: 'array',
minItems: 0,
items: {
type: 'string',
enum: EventController.events,
},
},
},
required: ['enabled'],
};

View File

@ -33,7 +33,7 @@ const bucketExists = async () => {
try {
const list = await minioClient.listBuckets();
return list.find((bucket) => bucket.name === bucketName);
} catch (error) {
} catch {
return false;
}
}

View File

@ -25,7 +25,7 @@ export class ProxyService {
}
return result;
} catch (error) {
} catch {
return null;
}
}

View File

@ -24,7 +24,7 @@ export class SettingsService {
}
return result;
} catch (error) {
} catch {
return null;
}
}

View File

@ -153,6 +153,34 @@ export type Sqs = {
};
};
export type Kafka = {
ENABLED: boolean;
CLIENT_ID: string;
BROKERS: string[];
CONNECTION_TIMEOUT: number;
REQUEST_TIMEOUT: number;
GLOBAL_ENABLED: boolean;
CONSUMER_GROUP_ID: string;
TOPIC_PREFIX: string;
NUM_PARTITIONS: number;
REPLICATION_FACTOR: number;
AUTO_CREATE_TOPICS: boolean;
EVENTS: EventsRabbitmq;
SASL?: {
ENABLED: boolean;
MECHANISM: string;
USERNAME: string;
PASSWORD: string;
};
SSL?: {
ENABLED: boolean;
REJECT_UNAUTHORIZED: boolean;
CA?: string;
KEY?: string;
CERT?: string;
};
};
export type Websocket = {
ENABLED: boolean;
GLOBAL_EVENTS: boolean;
@ -372,6 +400,7 @@ export interface Env {
RABBITMQ: Rabbitmq;
NATS: Nats;
SQS: Sqs;
KAFKA: Kafka;
WEBSOCKET: Websocket;
WA_BUSINESS: WaBusiness;
LOG: Log;
@ -587,6 +616,68 @@ export class ConfigService {
TYPEBOT_START: process.env?.SQS_GLOBAL_TYPEBOT_START === 'true',
},
},
KAFKA: {
ENABLED: process.env?.KAFKA_ENABLED === 'true',
CLIENT_ID: process.env?.KAFKA_CLIENT_ID || 'evolution-api',
BROKERS: process.env?.KAFKA_BROKERS?.split(',') || ['localhost:9092'],
CONNECTION_TIMEOUT: Number.parseInt(process.env?.KAFKA_CONNECTION_TIMEOUT || '3000'),
REQUEST_TIMEOUT: Number.parseInt(process.env?.KAFKA_REQUEST_TIMEOUT || '30000'),
GLOBAL_ENABLED: process.env?.KAFKA_GLOBAL_ENABLED === 'true',
CONSUMER_GROUP_ID: process.env?.KAFKA_CONSUMER_GROUP_ID || 'evolution-api-consumers',
TOPIC_PREFIX: process.env?.KAFKA_TOPIC_PREFIX || 'evolution',
NUM_PARTITIONS: Number.parseInt(process.env?.KAFKA_NUM_PARTITIONS || '1'),
REPLICATION_FACTOR: Number.parseInt(process.env?.KAFKA_REPLICATION_FACTOR || '1'),
AUTO_CREATE_TOPICS: process.env?.KAFKA_AUTO_CREATE_TOPICS === 'true',
EVENTS: {
APPLICATION_STARTUP: process.env?.KAFKA_EVENTS_APPLICATION_STARTUP === 'true',
INSTANCE_CREATE: process.env?.KAFKA_EVENTS_INSTANCE_CREATE === 'true',
INSTANCE_DELETE: process.env?.KAFKA_EVENTS_INSTANCE_DELETE === 'true',
QRCODE_UPDATED: process.env?.KAFKA_EVENTS_QRCODE_UPDATED === 'true',
MESSAGES_SET: process.env?.KAFKA_EVENTS_MESSAGES_SET === 'true',
MESSAGES_UPSERT: process.env?.KAFKA_EVENTS_MESSAGES_UPSERT === 'true',
MESSAGES_EDITED: process.env?.KAFKA_EVENTS_MESSAGES_EDITED === 'true',
MESSAGES_UPDATE: process.env?.KAFKA_EVENTS_MESSAGES_UPDATE === 'true',
MESSAGES_DELETE: process.env?.KAFKA_EVENTS_MESSAGES_DELETE === 'true',
SEND_MESSAGE: process.env?.KAFKA_EVENTS_SEND_MESSAGE === 'true',
SEND_MESSAGE_UPDATE: process.env?.KAFKA_EVENTS_SEND_MESSAGE_UPDATE === 'true',
CONTACTS_SET: process.env?.KAFKA_EVENTS_CONTACTS_SET === 'true',
CONTACTS_UPSERT: process.env?.KAFKA_EVENTS_CONTACTS_UPSERT === 'true',
CONTACTS_UPDATE: process.env?.KAFKA_EVENTS_CONTACTS_UPDATE === 'true',
PRESENCE_UPDATE: process.env?.KAFKA_EVENTS_PRESENCE_UPDATE === 'true',
CHATS_SET: process.env?.KAFKA_EVENTS_CHATS_SET === 'true',
CHATS_UPSERT: process.env?.KAFKA_EVENTS_CHATS_UPSERT === 'true',
CHATS_UPDATE: process.env?.KAFKA_EVENTS_CHATS_UPDATE === 'true',
CHATS_DELETE: process.env?.KAFKA_EVENTS_CHATS_DELETE === 'true',
CONNECTION_UPDATE: process.env?.KAFKA_EVENTS_CONNECTION_UPDATE === 'true',
LABELS_EDIT: process.env?.KAFKA_EVENTS_LABELS_EDIT === 'true',
LABELS_ASSOCIATION: process.env?.KAFKA_EVENTS_LABELS_ASSOCIATION === 'true',
GROUPS_UPSERT: process.env?.KAFKA_EVENTS_GROUPS_UPSERT === 'true',
GROUP_UPDATE: process.env?.KAFKA_EVENTS_GROUPS_UPDATE === 'true',
GROUP_PARTICIPANTS_UPDATE: process.env?.KAFKA_EVENTS_GROUP_PARTICIPANTS_UPDATE === 'true',
CALL: process.env?.KAFKA_EVENTS_CALL === 'true',
TYPEBOT_START: process.env?.KAFKA_EVENTS_TYPEBOT_START === 'true',
TYPEBOT_CHANGE_STATUS: process.env?.KAFKA_EVENTS_TYPEBOT_CHANGE_STATUS === 'true',
},
SASL:
process.env?.KAFKA_SASL_ENABLED === 'true'
? {
ENABLED: true,
MECHANISM: process.env?.KAFKA_SASL_MECHANISM || 'plain',
USERNAME: process.env?.KAFKA_SASL_USERNAME || '',
PASSWORD: process.env?.KAFKA_SASL_PASSWORD || '',
}
: undefined,
SSL:
process.env?.KAFKA_SSL_ENABLED === 'true'
? {
ENABLED: true,
REJECT_UNAUTHORIZED: process.env?.KAFKA_SSL_REJECT_UNAUTHORIZED !== 'false',
CA: process.env?.KAFKA_SSL_CA,
KEY: process.env?.KAFKA_SSL_KEY,
CERT: process.env?.KAFKA_SSL_CERT,
}
: undefined,
},
WEBSOCKET: {
ENABLED: process.env?.WEBSOCKET_ENABLED === 'true',
GLOBAL_EVENTS: process.env?.WEBSOCKET_GLOBAL_EVENTS === 'true',

View File

@ -19,7 +19,7 @@ export async function keyExists(sessionId: string): Promise<any> {
try {
const key = await prismaRepository.session.findUnique({ where: { sessionId: sessionId } });
return !!key;
} catch (error) {
} catch {
return false;
}
}
@ -38,7 +38,7 @@ export async function saveKey(sessionId: string, keyJson: any): Promise<any> {
where: { sessionId: sessionId },
data: { creds: JSON.stringify(keyJson) },
});
} catch (error) {
} catch {
return null;
}
}
@ -49,7 +49,7 @@ export async function getAuthKey(sessionId: string): Promise<any> {
if (!register) return null;
const auth = await prismaRepository.session.findUnique({ where: { sessionId: sessionId } });
return JSON.parse(auth?.creds);
} catch (error) {
} catch {
return null;
}
}
@ -59,7 +59,7 @@ async function deleteAuthKey(sessionId: string): Promise<any> {
const register = await keyExists(sessionId);
if (!register) return;
await prismaRepository.session.delete({ where: { sessionId: sessionId } });
} catch (error) {
} catch {
return;
}
}
@ -68,7 +68,7 @@ async function fileExists(file: string): Promise<any> {
try {
const stat = await fs.stat(file);
if (stat.isFile()) return true;
} catch (error) {
} catch {
return;
}
}
@ -119,7 +119,7 @@ export default async function useMultiFileAuthStatePrisma(
const parsedData = JSON.parse(rawData, BufferJSON.reviver);
return parsedData;
} catch (error) {
} catch {
return null;
}
}
@ -137,7 +137,7 @@ export default async function useMultiFileAuthStatePrisma(
} else {
await deleteAuthKey(sessionId);
}
} catch (error) {
} catch {
return;
}
}