feat(makefile): update run command to exclude frontend and log files during reload

This commit is contained in:
Davidson Gomes 2025-05-24 10:27:30 -03:00
parent 956d16a854
commit c4a4e5fd68
30 changed files with 2184 additions and 563 deletions

149
.github/workflows/build-and-deploy.yml vendored Normal file
View File

@ -0,0 +1,149 @@
name: Build and Deploy Docker Images
on:
push:
branches:
- main
tags:
- "*.*.*"
pull_request:
branches:
- main
jobs:
detect-changes:
name: Detect Changes
runs-on: ubuntu-latest
outputs:
backend-changed: ${{ steps.changes.outputs.backend }}
frontend-changed: ${{ steps.changes.outputs.frontend }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Detect changes
id: changes
uses: dorny/paths-filter@v2
with:
filters: |
backend:
- 'src/**'
- 'migrations/**'
- 'scripts/**'
- 'Dockerfile'
- 'pyproject.toml'
- 'alembic.ini'
- 'conftest.py'
- 'setup.py'
- 'Makefile'
- '.dockerignore'
frontend:
- 'frontend/**'
build-backend:
name: Build Backend Image
runs-on: ubuntu-latest
needs: detect-changes
if: needs.detect-changes.outputs.backend-changed == 'true' || github.event_name == 'push'
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable={{is_default_branch}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
build-frontend:
name: Build Frontend Image
runs-on: ubuntu-latest
needs: detect-changes
if: needs.detect-changes.outputs.frontend-changed == 'true' || github.event_name == 'push'
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai-frontend
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable={{is_default_branch}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
context: ./frontend
file: ./frontend/Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
NEXT_PUBLIC_API_URL=${{ vars.NEXT_PUBLIC_API_URL || 'https://api-evoai.evoapicloud.com' }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

139
.github/workflows/build-homolog.yml vendored Normal file
View File

@ -0,0 +1,139 @@
name: Build Homolog Images
on:
push:
branches:
- develop
- homolog
jobs:
detect-changes:
name: Detect Changes
runs-on: ubuntu-latest
outputs:
backend-changed: ${{ steps.changes.outputs.backend }}
frontend-changed: ${{ steps.changes.outputs.frontend }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Detect changes
id: changes
uses: dorny/paths-filter@v2
with:
filters: |
backend:
- 'src/**'
- 'migrations/**'
- 'scripts/**'
- 'Dockerfile'
- 'pyproject.toml'
- 'alembic.ini'
- 'conftest.py'
- 'setup.py'
- 'Makefile'
- '.dockerignore'
frontend:
- 'frontend/**'
build-backend-homolog:
name: Build Backend Homolog
runs-on: ubuntu-latest
needs: detect-changes
if: needs.detect-changes.outputs.backend-changed == 'true' || github.event_name == 'push'
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai
tags: |
type=raw,value=homolog
type=raw,value=homolog-{{sha}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
build-frontend-homolog:
name: Build Frontend Homolog
runs-on: ubuntu-latest
needs: detect-changes
if: needs.detect-changes.outputs.frontend-changed == 'true' || github.event_name == 'push'
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai-frontend
tags: |
type=raw,value=homolog
type=raw,value=homolog-{{sha}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
context: ./frontend
file: ./frontend/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
NEXT_PUBLIC_API_URL=${{ vars.NEXT_PUBLIC_API_URL_HOMOLOG || 'https://api-homolog-evoai.evoapicloud.com' }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -1,48 +0,0 @@
name: Build Docker image
on:
push:
tags:
- "*.*.*"
jobs:
build_deploy:
name: Build and Deploy
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai
tags: type=semver,pattern=v{{version}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -1,48 +0,0 @@
name: Build Docker image
on:
push:
branches:
- develop
jobs:
build_deploy:
name: Build and Deploy
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai
tags: homolog
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -1,48 +0,0 @@
name: Build Docker image
on:
push:
branches:
- main
jobs:
build_deploy:
name: Build and Deploy
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai
tags: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -18,7 +18,7 @@ alembic-downgrade:
# Command to run the server
run:
uvicorn src.main:app --host 0.0.0.0 --port 8000 --reload --env-file .env
uvicorn src.main:app --host 0.0.0.0 --port 8000 --reload --env-file .env --reload-exclude frontend/ --reload-exclude "*.log" --reload-exclude "*.tmp"
# Command to run the server in production mode
run-prod:

253
README.md
View File

@ -66,6 +66,7 @@ Executes a specific task using a target agent with structured task instructions.
## 🛠️ Technologies
### Backend
- **FastAPI**: Web framework for building the API
- **SQLAlchemy**: ORM for database interaction
- **PostgreSQL**: Main database
@ -78,7 +79,17 @@ Executes a specific task using a target agent with structured task instructions.
- **Jinja2**: Template engine for email rendering
- **Bcrypt**: Password hashing and security
- **LangGraph**: Framework for building stateful, multi-agent workflows
- **ReactFlow**: Library for building node-based visual workflows
### Frontend
- **Next.js 15**: React framework with App Router
- **React 18**: User interface library
- **TypeScript**: Type-safe JavaScript
- **Tailwind CSS**: Utility-first CSS framework
- **shadcn/ui**: Modern component library
- **React Hook Form**: Form management
- **Zod**: Schema validation
- **ReactFlow**: Node-based visual workflows
- **React Query**: Server state management
## 📊 Langfuse Integration (Tracing & Observability)
@ -105,59 +116,220 @@ For more information about the A2A protocol, visit [Google's A2A Protocol Docume
## 📋 Prerequisites
### Backend
- **Python**: 3.10 or higher
- **PostgreSQL**: 13.0 or higher
- **Redis**: 6.0 or higher
- **Git**: For version control
- **Make**: For running Makefile commands
### Frontend
- **Node.js**: 18.0 or higher
- **pnpm**: Package manager (recommended) or npm/yarn
## 🔧 Installation
1. Clone the repository:
### 1. Clone the Repository
```bash
git clone https://github.com/EvolutionAPI/evo-ai.git
cd evo-ai
```
2. Create a virtual environment and install dependencies:
### 2. Backend Setup
#### Virtual Environment and Dependencies
```bash
# Create and activate virtual environment
make venv
source venv/bin/activate # Linux/Mac
make install-dev # For development dependencies
# or on Windows: venv\Scripts\activate
# Install development dependencies
make install-dev
```
3. Set up environment variables:
#### Environment Configuration
```bash
# Copy and configure backend environment
cp .env.example .env
# Edit the .env file with your settings
# Edit the .env file with your database, Redis, and other settings
```
4. Initialize the database and seed data:
#### Database Setup
```bash
# Initialize database and apply migrations
make alembic-upgrade
# Seed initial data (admin user, sample clients, etc.)
make seed-all
```
## 🖥️ Frontend Installation
### 3. Frontend Setup
1. Clone the frontend repository:
#### Install Dependencies
```bash
git clone https://github.com/EvolutionAPI/evo-ai-frontend.git
cd evo-ai-frontend
# Navigate to frontend directory
cd frontend
# Install dependencies using pnpm (recommended)
pnpm install
# Or using npm
# npm install
# Or using yarn
# yarn install
```
2. Follow the installation instructions in the frontend repository's README.
#### Frontend Environment Configuration
## 🚀 Getting Started
```bash
# Copy and configure frontend environment
cp .env.example .env
# Edit .env with your API URL (default: http://localhost:8000)
```
After installation, start by configuring your MCP server, creating a client, and setting up your agents.
The frontend `.env.local` should contain:
### Configuration (.env file)
```env
NEXT_PUBLIC_API_URL=http://localhost:8000
```
## 🚀 Running the Application
### Development Mode
#### Start Backend (Terminal 1)
```bash
# From project root
make run
# Backend will be available at http://localhost:8000
```
#### Start Frontend (Terminal 2)
```bash
# From frontend directory
cd frontend
pnpm dev
# Or using npm/yarn
# npm run dev
# yarn dev
# Frontend will be available at http://localhost:3000
```
### Production Mode
#### Backend
```bash
make run-prod # Production with multiple workers
```
#### Frontend
```bash
cd frontend
pnpm build && pnpm start
# Or using npm/yarn
# npm run build && npm start
# yarn build && yarn start
```
## 🐳 Docker Installation
### Full Stack with Docker Compose
```bash
# Build and start all services (backend + database + redis)
make docker-build
make docker-up
# Initialize database with seed data
make docker-seed
```
### Frontend with Docker
```bash
# From frontend directory
cd frontend
# Build frontend image
docker build -t evo-ai-frontend .
# Run frontend container
docker run -p 3000:3000 -e NEXT_PUBLIC_API_URL=http://localhost:8000 evo-ai-frontend
```
Or using the provided docker-compose:
```bash
# From frontend directory
cd frontend
docker-compose up -d
```
## 🎯 Getting Started
After installation, follow these steps:
1. **Access the Frontend**: Open `http://localhost:3000`
2. **Create Admin Account**: Use the seeded admin credentials or register a new account
3. **Configure MCP Server**: Set up your first MCP server connection
4. **Create Client**: Add a client to organize your agents
5. **Build Your First Agent**: Create and configure your AI agent
6. **Test Agent**: Use the chat interface to interact with your agent
### Default Admin Credentials
After running the seeders, you can login with:
- **Email**: Check the seeder output for the generated admin email
- **Password**: Check the seeder output for the generated password
## 🖥️ API Documentation
The interactive API documentation is available at:
- Swagger UI: `http://localhost:8000/docs`
- ReDoc: `http://localhost:8000/redoc`
## 👨‍💻 Development Commands
### Backend Commands
```bash
# Database migrations
make alembic-upgrade # Update database to latest version
make alembic-revision message="description" # Create new migration
# Seeders
make seed-all # Run all seeders
# Code verification
make lint # Verify code with flake8
make format # Format code with black
```
### Frontend Commands
```bash
# From frontend directory
cd frontend
# Development
pnpm dev # Start development server
pnpm build # Build for production
pnpm start # Start production server
pnpm lint # Run ESLint
```
## 🚀 Configuration
### Backend Configuration (.env file)
Key settings include:
@ -182,6 +354,13 @@ EMAIL_PROVIDER="sendgrid" # Options: "sendgrid" or "smtp"
ENCRYPTION_KEY="your-encryption-key"
```
### Frontend Configuration (.env.local file)
```bash
# API Configuration
NEXT_PUBLIC_API_URL="http://localhost:8000" # Backend API URL
```
> **Note**: While Google ADK is fully supported, the CrewAI engine option is still under active development. For production environments, it's recommended to use the default "adk" engine.
## 🔐 Authentication
@ -193,49 +372,7 @@ The API uses JWT (JSON Web Token) authentication with:
- Password recovery flow
- Account lockout after multiple failed login attempts
## 🚀 Running the Project
```bash
make run # For development with automatic reload
make run-prod # For production with multiple workers
```
The API will be available at `http://localhost:8000`
## 👨‍💻 Development Commands
```bash
# Database migrations
make alembic-upgrade # Update database to latest version
make alembic-revision message="description" # Create new migration
# Seeders
make seed-all # Run all seeders
# Code verification
make lint # Verify code with flake8
make format # Format code with black
```
## 🐳 Running with Docker
1. Configure the `.env` file
2. Start the services:
```bash
make docker-build
make docker-up
make docker-seed
```
## 📚 API Documentation
The interactive API documentation is available at:
- Swagger UI: `http://localhost:8000/docs`
- ReDoc: `http://localhost:8000/redoc`
## ⭐ Star Us on GitHub
## 🚀 Star Us on GitHub
If you find EvoAI useful, please consider giving us a star! Your support helps us grow our community and continue improving the product.

View File

@ -1,48 +0,0 @@
name: Build Docker image
on:
push:
tags:
- "*.*.*"
jobs:
build_deploy:
name: Build and Deploy
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai-frontend
tags: type=semver,pattern=v{{version}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -1,48 +0,0 @@
name: Build Docker image
on:
push:
branches:
- develop
jobs:
build_deploy:
name: Build and Deploy
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai-frontend
tags: homolog
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -1,48 +0,0 @@
name: Build Docker image
on:
push:
branches:
- main
jobs:
build_deploy:
name: Build and Deploy
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: evoapicloud/evo-ai-frontend
tags: latest
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v5
with:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

View File

@ -215,7 +215,7 @@ export function AgentCard({
return new Date(agent.created_at).toLocaleDateString();
};
// Função para exportar o agente como JSON
// Function to export the agent as JSON
const handleExportAgent = () => {
try {
exportAsJson(
@ -231,18 +231,18 @@ export function AgentCard({
}
};
// Função para testar o agente A2A no laboratório
// Function to test the A2A agent in the lab
const handleTestA2A = () => {
// Usar a URL do agent card como URL base para testes A2A
// Use the agent card URL as base for A2A tests
const agentUrl = agent.agent_card_url?.replace(
"/.well-known/agent.json",
""
);
// Usar a API key diretamente do config do agente
// Use the API key directly from the agent config
const apiKey = agent.config?.api_key;
// Construir a URL com parâmetros para o laboratório de testes
// Build the URL with parameters for the lab tests
const params = new URLSearchParams();
if (agentUrl) {
@ -253,7 +253,7 @@ export function AgentCard({
params.set("api_key", apiKey);
}
// Redirecionar para o laboratório de testes na aba "lab"
// Redirect to the lab tests in the "lab" tab
const testUrl = `/documentation?${params.toString()}#lab`;
router.push(testUrl);

View File

@ -440,15 +440,15 @@ export default function AgentsPage() {
setEditingAgent(null);
};
// Função para exportar todos os agentes como JSON
// Function to export all agents as JSON
const handleExportAllAgents = () => {
try {
// Criar nome do arquivo com data atual
// Create file name with current date
const date = new Date();
const formattedDate = `${date.getFullYear()}-${(date.getMonth() + 1).toString().padStart(2, '0')}-${date.getDate().toString().padStart(2, '0')}`;
const filename = `agents-export-${formattedDate}`;
// Usar a função utilitária para exportar
// Use the utility function to export
// Pass agents both as the data and as allAgents parameter to properly resolve references
const result = exportAsJson({ agents: filteredAgents }, filename, true, agents);

View File

@ -189,12 +189,12 @@ const Canva = forwardRef(({ agent }: { agent: Agent | null }, ref) => {
setActiveExecutionNodeId,
}));
// Effect para limpar o nó ativo após um timeout
// Effect to clear the active node after a timeout
useEffect(() => {
if (activeExecutionNodeId) {
const timer = setTimeout(() => {
setActiveExecutionNodeId(null);
}, 5000); // Aumentar para 5 segundos para dar mais tempo de visualização
}, 5000); // Increase to 5 seconds to give more time to visualize
return () => clearTimeout(timer);
}
@ -218,13 +218,13 @@ const Canva = forwardRef(({ agent }: { agent: Agent | null }, ref) => {
}
}, [agent, setNodes, setEdges]);
// Atualizar os nós quando o nó ativo muda para adicionar classe visual
// Update nodes when the active node changes to add visual class
useEffect(() => {
if (nodes.length > 0) {
setNodes((nds: any) =>
nds.map((node: any) => {
if (node.id === activeExecutionNodeId) {
// Adiciona uma classe para destacar o nó ativo
// Add a class to highlight the active node
return {
...node,
className: "active-execution-node",
@ -234,7 +234,7 @@ const Canva = forwardRef(({ agent }: { agent: Agent | null }, ref) => {
},
};
} else {
// Remove a classe de destaque
// Remove the highlight class
const { isExecuting, ...restData } = node.data || {};
return {
...node,

View File

@ -108,15 +108,15 @@ export function AgentForm({ selectedNode, handleUpdateNode, setEdges, setIsOpen,
const [isTestModalOpen, setIsTestModalOpen] = useState(false);
const [isEditMode, setIsEditMode] = useState(false);
// Acessar a referência do canvas a partir do localStorage
// Access the canvas reference from localStorage
const canvasRef = useRef<any>(null);
useEffect(() => {
// Quando o componente é montado, verifica se há uma referência de canvas no contexto global
// When the component is mounted, check if there is a canvas reference in the global context
if (typeof window !== "undefined") {
const workflowsPage = document.querySelector('[data-workflow-page="true"]');
if (workflowsPage) {
// Se estamos na página de workflows, tentamos acessar a ref do canvas
// If we are on the workflows page, try to access the canvas ref
const canvasElement = workflowsPage.querySelector('[data-canvas-ref="true"]');
if (canvasElement && (canvasElement as any).__reactRef) {
canvasRef.current = (canvasElement as any).__reactRef.current;

View File

@ -89,8 +89,8 @@ export function AgentTestChatModal({ open, onOpenChange, agent, canvasRef }: Age
const onEvent = useCallback((event: any) => {
setMessages((prev) => [...prev, event]);
// Verificar se a mensagem vem de um nó de workflow e destacar o nó
// somente se o canvasRef estiver disponível (chamado do Test Workflow na página principal)
// Check if the message comes from a workflow node and highlight the node
// only if the canvasRef is available (called from Test Workflow on the main page)
if (event.author && event.author.startsWith('workflow-node:') && canvasRef?.current) {
const nodeId = event.author.split(':')[1];
canvasRef.current.setActiveExecutionNodeId(nodeId);
@ -139,7 +139,7 @@ export function AgentTestChatModal({ open, onOpenChange, agent, canvasRef }: Age
setExternalId(generateExternalId());
setIsInitializing(true);
// Breve delay para mostrar o status de inicialização
// Short delay to show the initialization status
const timer = setTimeout(() => {
setIsInitializing(false);
}, 1200);

View File

@ -223,7 +223,7 @@ function MessageForm({
</SelectTrigger>
<SelectContent className="bg-neutral-800 border-neutral-700 text-neutral-200">
<SelectItem value="text">Text</SelectItem>
{/* Outras opções podem ser habilitadas no futuro */}
{/* Other options can be enabled in the future */}
{/* <SelectItem value="image">Image</SelectItem>
<SelectItem value="file">File</SelectItem>
<SelectItem value="video">Video</SelectItem> */}

View File

@ -185,7 +185,7 @@ function WorkflowsContent() {
open={isTestModalOpen}
onOpenChange={setIsTestModalOpen}
agent={agent}
canvasRef={canvaRef} // Passamos a referência do canvas para permitir a visualização dos nós em execução
canvasRef={canvaRef} // Pass the canvas reference to allow visualization of running nodes
/>
)}

View File

@ -140,7 +140,7 @@ export function AgentInfoDialog({
}
};
// Função para exportar o agente como JSON
// Function to export the agent as JSON
const handleExportAgent = async () => {
if (!agent) return;

View File

@ -60,9 +60,9 @@ export function ChatInput({
const fileInputRef = useRef<HTMLInputElement>(null);
const textareaRef = useRef<HTMLTextAreaElement>(null);
// Autofocus no textarea quando o componente for montado
// Autofocus the textarea when the component is mounted
React.useEffect(() => {
// Pequeno timeout para garantir que o foco seja aplicado após a renderização completa
// Small timeout to ensure focus is applied after the complete rendering
if (autoFocus) {
const timer = setTimeout(() => {
if (textareaRef.current && !isLoading) {
@ -87,7 +87,7 @@ export function ChatInput({
setTimeout(() => {
setResetFileUpload(false);
// Mantém o foco no textarea após enviar a mensagem
// Keep the focus on the textarea after sending the message
if (autoFocus && textareaRef.current) {
textareaRef.current.focus();
}

View File

@ -49,6 +49,11 @@ interface LabSectionProps {
setTaskId: (id: string) => void;
callId: string;
setCallId: (id: string) => void;
a2aMethod: string;
setA2aMethod: (method: string) => void;
authMethod: string;
setAuthMethod: (method: string) => void;
generateNewIds: () => void;
sendRequest: () => Promise<void>;
sendStreamRequestWithEventSource: () => Promise<void>;
isLoading: boolean;
@ -76,6 +81,11 @@ export function LabSection({
setTaskId,
callId,
setCallId,
a2aMethod,
setA2aMethod,
authMethod,
setAuthMethod,
generateNewIds,
sendRequest,
sendStreamRequestWithEventSource,
isLoading,
@ -125,6 +135,11 @@ export function LabSection({
setTaskId={setTaskId}
callId={callId}
setCallId={setCallId}
a2aMethod={a2aMethod}
setA2aMethod={setA2aMethod}
authMethod={authMethod}
setAuthMethod={setAuthMethod}
generateNewIds={generateNewIds}
sendRequest={sendRequest}
isLoading={isLoading}
/>
@ -144,6 +159,7 @@ export function LabSection({
setTaskId={setTaskId}
callId={callId}
setCallId={setCallId}
authMethod={authMethod}
sendStreamRequest={sendStreamRequestWithEventSource}
isStreaming={isStreaming}
streamResponse={streamResponse}

View File

@ -851,12 +851,12 @@ function DocumentationContent() {
body: JSON.stringify(streamRpcRequest),
});
// Verificar o content-type da resposta
// Verify the content-type of the response
const contentType = initialResponse.headers.get("Content-Type");
addDebugLog(`Response content type: ${contentType || "not specified"}`);
if (contentType && contentType.includes("text/event-stream")) {
// É uma resposta SSE (Server-Sent Events)
// It's an SSE (Server-Sent Events) response
addDebugLog("Detected SSE response, processing stream directly");
processEventStream(initialResponse);
return;
@ -877,10 +877,10 @@ function DocumentationContent() {
try {
const responseText = await initialResponse.text();
// Verificar se a resposta começa com "data:", o que indica um SSE
// Verify if the response starts with "data:", which indicates an SSE
if (responseText.trim().startsWith("data:")) {
addDebugLog("Response has SSE format but wrong content-type");
// Criar uma resposta sintética para processar como stream
// Create a synthetic response to process as stream
const syntheticResponse = new Response(responseText, {
headers: {
"Content-Type": "text/event-stream",
@ -890,7 +890,7 @@ function DocumentationContent() {
return;
}
// Tentar processar como JSON
// Try to process as JSON
const initialData = JSON.parse(responseText);
addDebugLog("Initial stream response: " + JSON.stringify(initialData));
@ -913,7 +913,7 @@ function DocumentationContent() {
} catch (parseError) {
addDebugLog(`Error parsing response: ${parseError}`);
// Se não conseguimos processar como JSON ou SSE, mostrar o erro
// If we can't process as JSON or SSE, show the error
setStreamResponse(
`Error: Unable to process response: ${parseError instanceof Error ? parseError.message : String(parseError)}`
);

View File

@ -71,7 +71,6 @@
"cmdk": "1.0.4",
"date-fns": "2.28.0",
"embla-carousel-react": "8.5.1",
"evo-ai-frontend": "file:",
"input-otp": "1.4.1",
"lucide-react": "^0.454.0",
"next": "15.2.4",

File diff suppressed because it is too large Load Diff

View File

@ -27,10 +27,20 @@
"""
from fastapi import APIRouter, Depends, HTTPException, status, Header, Query
from fastapi import (
APIRouter,
Depends,
HTTPException,
status,
Header,
Query,
File,
UploadFile,
Form,
)
from sqlalchemy.orm import Session
from src.config.database import get_db
from typing import List, Dict, Any, Optional
from typing import List, Dict, Any, Optional, Union
import uuid
from src.core.jwt_middleware import (
get_jwt_token,
@ -48,6 +58,7 @@ from src.schemas.schemas import (
)
from src.services import agent_service, mcp_server_service, apikey_service
import logging
import json
logger = logging.getLogger(__name__)
@ -621,3 +632,74 @@ async def get_shared_agent(
agent.agent_card_url = agent.agent_card_url_property
return agent
@router.post("/import", response_model=List[Agent], status_code=status.HTTP_201_CREATED)
async def import_agents(
file: UploadFile = File(...),
folder_id: Optional[str] = Form(None),
x_client_id: uuid.UUID = Header(..., alias="x-client-id"),
db: Session = Depends(get_db),
payload: dict = Depends(get_jwt_token),
):
"""Import one or more agents from a JSON file"""
# Verify if the user has access to this client's data
await verify_user_client(payload, db, x_client_id)
# Convert folder_id to UUID if provided
folder_uuid = None
if folder_id:
try:
folder_uuid = uuid.UUID(folder_id)
# Verify the folder exists and belongs to the client
folder = agent_service.get_agent_folder(db, folder_uuid)
if not folder:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="Folder not found"
)
if folder.client_id != x_client_id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Folder does not belong to the specified client",
)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid folder ID format",
)
try:
# Check file type
if not file.filename.endswith(".json"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Only JSON files are supported",
)
# Read file content
file_content = await file.read()
try:
# Parse JSON content
agents_data = json.loads(file_content)
except json.JSONDecodeError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid JSON format"
)
# Call the service function to import agents
imported_agents = await agent_service.import_agents_from_json(
db, agents_data, x_client_id, folder_uuid
)
return imported_agents
except HTTPException:
# Re-raise HTTP exceptions
raise
except Exception as e:
logger.error(f"Error in agent import: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error importing agents: {str(e)}",
)

View File

@ -68,16 +68,16 @@ logger = logging.getLogger(__name__)
class A2ATypeValidator:
"""Valida e converte tipos entre implementação custom e SDK oficial"""
"""Validate and convert types between custom and official SDK implementations"""
@staticmethod
def is_sdk_available() -> bool:
"""Verifica se o SDK está disponível"""
"""Check if SDK is available"""
return SDK_AVAILABLE
@staticmethod
def validate_agent_card(card_data: Dict[str, Any]) -> Optional[Any]:
"""Valida agent card usando types do SDK se disponível"""
"""Validate agent card using SDK types if available"""
if not SDK_AVAILABLE:
logger.debug("SDK not available, using custom validation")
return CustomAgentCard(**card_data)
@ -90,7 +90,7 @@ class A2ATypeValidator:
@staticmethod
def validate_message(message_data: Dict[str, Any]) -> Optional[Any]:
"""Valida mensagem usando types do SDK se disponível"""
"""Validate message using SDK types if available"""
if not SDK_AVAILABLE:
return CustomMessage(**message_data)
@ -102,7 +102,7 @@ class A2ATypeValidator:
@staticmethod
def validate_task(task_data: Dict[str, Any]) -> Optional[Any]:
"""Valida task usando types do SDK se disponível"""
"""Validate task using SDK types if available"""
if not SDK_AVAILABLE:
return CustomTask(**task_data)
@ -114,29 +114,29 @@ class A2ATypeValidator:
class A2ATypeConverter:
"""Converte entre tipos custom e SDK"""
"""Convert between custom and SDK types"""
@staticmethod
def custom_task_to_sdk(custom_task: CustomTask) -> Optional[Any]:
"""Converte CustomTask para SDKTask"""
"""Convert CustomTask to SDKTask"""
if not SDK_AVAILABLE:
return custom_task
try:
# Converte status
# Convert status
sdk_status = None
if custom_task.status:
sdk_status = A2ATypeConverter.custom_task_status_to_sdk(
custom_task.status
)
# Se status é None, criar um status básico
# If status is None, create a basic status
if not sdk_status:
sdk_status = SDKTaskStatus(
state=SDKTaskState.unknown, message=None, timestamp=None
)
# Converte artifacts
# Convert artifacts
sdk_artifacts = []
if custom_task.artifacts:
for artifact in custom_task.artifacts:
@ -144,7 +144,7 @@ class A2ATypeConverter:
if sdk_artifact:
sdk_artifacts.append(sdk_artifact)
# Converte history
# Convert history
sdk_history = []
if custom_task.history:
for message in custom_task.history:
@ -155,7 +155,7 @@ class A2ATypeConverter:
return SDKTask(
id=custom_task.id,
contextId=custom_task.sessionId,
kind="task", # Novo campo no SDK
kind="task", # New field in SDK
status=sdk_status,
artifacts=sdk_artifacts if sdk_artifacts else None,
history=sdk_history if sdk_history else None,
@ -167,15 +167,15 @@ class A2ATypeConverter:
@staticmethod
def sdk_task_to_custom(sdk_task) -> Optional[CustomTask]:
"""Converte SDKTask para CustomTask"""
"""Convert SDKTask to CustomTask"""
if not SDK_AVAILABLE:
return sdk_task
try:
# Converte status
# Convert status
custom_status = A2ATypeConverter.sdk_task_status_to_custom(sdk_task.status)
# Converte artifacts
# Convert artifacts
custom_artifacts = []
if sdk_task.artifacts:
for artifact in sdk_task.artifacts:
@ -183,7 +183,7 @@ class A2ATypeConverter:
if custom_artifact:
custom_artifacts.append(custom_artifact)
# Converte history
# Convert history
custom_history = []
if sdk_task.history:
for message in sdk_task.history:
@ -205,12 +205,12 @@ class A2ATypeConverter:
@staticmethod
def custom_task_status_to_sdk(custom_status: CustomTaskStatus) -> Optional[Any]:
"""Converte CustomTaskStatus para SDKTaskStatus"""
"""Convert CustomTaskStatus to SDKTaskStatus"""
if not SDK_AVAILABLE:
return custom_status
try:
# Mapeia estados
# Map states
state_mapping = {
CustomTaskState.SUBMITTED: SDKTaskState.submitted,
CustomTaskState.WORKING: SDKTaskState.working,
@ -223,14 +223,14 @@ class A2ATypeConverter:
sdk_state = state_mapping.get(custom_status.state, SDKTaskState.unknown)
# Converte message se existir
# Convert message if exists
sdk_message = None
if custom_status.message:
sdk_message = A2ATypeConverter.custom_message_to_sdk(
custom_status.message
)
# Converter timestamp para string se for datetime
# Convert timestamp to string if it's a datetime
timestamp_str = custom_status.timestamp
if hasattr(custom_status.timestamp, "isoformat"):
timestamp_str = custom_status.timestamp.isoformat()
@ -244,12 +244,12 @@ class A2ATypeConverter:
@staticmethod
def sdk_task_status_to_custom(sdk_status) -> Optional[CustomTaskStatus]:
"""Converte SDKTaskStatus para CustomTaskStatus"""
"""Convert SDKTaskStatus to CustomTaskStatus"""
if not SDK_AVAILABLE:
return sdk_status
try:
# Mapeia estados de volta
# Map states back
state_mapping = {
SDKTaskState.submitted: CustomTaskState.SUBMITTED,
SDKTaskState.working: CustomTaskState.WORKING,
@ -262,7 +262,7 @@ class A2ATypeConverter:
custom_state = state_mapping.get(sdk_status.state, CustomTaskState.UNKNOWN)
# Converte message se existir
# Convert message if exists
custom_message = None
if sdk_status.message:
custom_message = A2ATypeConverter.sdk_message_to_custom(
@ -280,12 +280,12 @@ class A2ATypeConverter:
@staticmethod
def custom_message_to_sdk(custom_message: CustomMessage) -> Optional[Any]:
"""Converte CustomMessage para SDKMessage"""
"""Convert CustomMessage to SDKMessage"""
if not SDK_AVAILABLE:
return custom_message
try:
# Converte parts
# Convert parts
sdk_parts = []
for part in custom_message.parts:
if hasattr(part, "type"):
@ -318,7 +318,7 @@ class A2ATypeConverter:
@staticmethod
def sdk_message_to_custom(sdk_message) -> Optional[CustomMessage]:
"""Converte SDKMessage para CustomMessage"""
"""Convert SDKMessage to CustomMessage"""
if not SDK_AVAILABLE:
logger.info("SDK not available, returning original message")
return sdk_message
@ -333,23 +333,23 @@ class A2ATypeConverter:
f"SDK message parts length: {len(getattr(sdk_message, 'parts', []))}"
)
# Converte parts de volta
# Convert parts back
custom_parts = []
for idx, part in enumerate(sdk_message.parts):
logger.info(f"Processing part {idx}: {type(part)}")
logger.info(f"Part repr: {repr(part)}")
try:
# O SDK TextPart não permite acesso direto via getattr
# Vamos extrair dados do repr string
# The SDK TextPart does not allow direct access via getattr
# We will extract data from the repr string
part_repr = repr(part)
logger.info(f"Parsing part repr: {part_repr}")
# Verificar se é TextPart
# Check if it's a TextPart
if "TextPart" in str(type(part)) or "kind='text'" in part_repr:
logger.info("Detected TextPart")
# Extrair texto do repr
# Extract text from repr
import re
text_match = re.search(r"text='([^']*)'", part_repr)
@ -357,7 +357,7 @@ class A2ATypeConverter:
logger.info(f"Extracted text: {text_content}")
# Criar dicionário em vez de SimpleNamespace para compatibilidade com Pydantic
# Create dictionary instead of SimpleNamespace for Pydantic compatibility
text_part = {
"type": "text",
"text": text_content,
@ -369,11 +369,11 @@ class A2ATypeConverter:
elif "FilePart" in str(type(part)) or "kind='file'" in part_repr:
logger.info("Detected FilePart")
# Para file parts, precisaríamos extrair mais dados
# Por enquanto, criar estrutura básica
# For file parts, we would need to extract more data
# For now, create a basic structure
file_part = {
"type": "file",
"file": None, # Seria necessário extrair do SDK
"file": None, # It would be necessary to extract from SDK
"metadata": None,
}
custom_parts.append(file_part)
@ -381,7 +381,7 @@ class A2ATypeConverter:
else:
logger.warning(f"Unknown part type in repr: {part_repr}")
# Fallback: tentar extrair qualquer texto disponível
# Fallback: try to extract any available text
if "text=" in part_repr:
import re
@ -405,7 +405,7 @@ class A2ATypeConverter:
logger.info(f"Total custom parts created: {len(custom_parts)}")
# Converte role de enum para string se necessário
# Convert role from enum to string if necessary
role_str = sdk_message.role
if hasattr(sdk_message.role, "value"):
role_str = sdk_message.role.value
@ -432,16 +432,16 @@ class A2ATypeConverter:
@staticmethod
def custom_artifact_to_sdk(custom_artifact: CustomArtifact) -> Optional[Any]:
"""Converte CustomArtifact para SDKArtifact"""
"""Convert CustomArtifact to SDKArtifact"""
if not SDK_AVAILABLE:
return custom_artifact
try:
# Converter parts para formato SDK
# Convert parts to SDK format
sdk_parts = []
if custom_artifact.parts:
for part in custom_artifact.parts:
# Se part é um dicionário, converter para objeto SDK appropriado
# If part is a dictionary, convert to appropriate SDK object
if isinstance(part, dict):
if part.get("type") == "text":
sdk_parts.append(
@ -459,12 +459,12 @@ class A2ATypeConverter:
metadata=part.get("metadata"),
)
)
# Se já é um objeto SDK, usar diretamente
# If it's already a SDK object, use it directly
elif hasattr(part, "kind"):
sdk_parts.append(part)
# Se é um TextPart custom, converter
# If it's a custom TextPart, convert it
else:
# Fallback: assumir text part
# Fallback: assume text part
text_content = getattr(part, "text", str(part))
sdk_parts.append(
SDKTextPart(
@ -474,7 +474,7 @@ class A2ATypeConverter:
)
)
# Gerar artifactId se não existir
# Generate artifactId if it doesn't exist
artifact_id = getattr(custom_artifact, "artifactId", None)
if not artifact_id:
from uuid import uuid4
@ -494,7 +494,7 @@ class A2ATypeConverter:
@staticmethod
def sdk_artifact_to_custom(sdk_artifact) -> Optional[CustomArtifact]:
"""Converte SDKArtifact para CustomArtifact"""
"""Convert SDKArtifact to CustomArtifact"""
if not SDK_AVAILABLE:
return sdk_artifact
@ -514,12 +514,12 @@ class A2ATypeConverter:
@staticmethod
def custom_agent_card_to_sdk(custom_card: CustomAgentCard) -> Optional[Any]:
"""Converte CustomAgentCard para SDKAgentCard"""
"""Convert CustomAgentCard to SDKAgentCard"""
if not SDK_AVAILABLE:
return custom_card
try:
# Converte capabilities
# Convert capabilities
sdk_capabilities = None
if custom_card.capabilities:
sdk_capabilities = SDKAgentCapabilities(
@ -528,7 +528,7 @@ class A2ATypeConverter:
stateTransitionHistory=custom_card.capabilities.stateTransitionHistory,
)
# Converte provider
# Convert provider
sdk_provider = None
if custom_card.provider:
sdk_provider = SDKAgentProvider(
@ -536,7 +536,7 @@ class A2ATypeConverter:
url=custom_card.provider.url,
)
# Converte skills
# Convert skills
sdk_skills = []
if custom_card.skills:
for skill in custom_card.skills:
@ -569,9 +569,9 @@ class A2ATypeConverter:
return None
# Funções utilitárias para facilitar o uso
# Utility functions to facilitate usage
def validate_with_sdk(data: Dict[str, Any], data_type: str) -> Any:
"""Função utilitária para validar dados com SDK quando disponível"""
"""Utility function to validate data with SDK when available"""
validator = A2ATypeValidator()
if data_type == "agent_card":
@ -585,7 +585,7 @@ def validate_with_sdk(data: Dict[str, Any], data_type: str) -> Any:
def convert_to_sdk_format(custom_obj: Any) -> Any:
"""Função utilitária para converter objeto custom para formato SDK"""
"""Utility function to convert custom object to SDK format"""
converter = A2ATypeConverter()
if isinstance(custom_obj, CustomTask):
@ -600,7 +600,7 @@ def convert_to_sdk_format(custom_obj: Any) -> Any:
def convert_from_sdk_format(sdk_obj: Any) -> Any:
"""Função utilitária para converter objeto SDK para formato custom"""
"""Utility function to convert SDK object to custom format"""
converter = A2ATypeConverter()
if SDK_AVAILABLE:

View File

@ -82,10 +82,10 @@ logger = logging.getLogger(__name__)
class EvoAIAgentExecutor:
"""
Implementação direta da Message API para o SDK oficial.
Direct implementation of the Message API for the official SDK.
Ao invés de tentar converter para Task API, implementa diretamente
os métodos esperados pelo SDK: message/send e message/stream
Instead of trying to convert to Task API, it implements directly
the methods expected by the SDK: message/send and message/stream
"""
def __init__(self, db: Session, agent_id: UUID):
@ -96,24 +96,24 @@ class EvoAIAgentExecutor:
self, context: "RequestContext", event_queue: "EventQueue"
) -> None:
"""
Implementa diretamente a execução de mensagens usando agent_runner.
Direct implementation of message execution using agent_runner.
Não usa task manager - vai direto para a lógica de execução.
Does not use task manager - goes directly to execution logic.
"""
try:
logger.info("=" * 80)
logger.info(f"🚀 EXECUTOR EXECUTE() CHAMADO! Agent: {self.agent_id}")
logger.info(f"🚀 EXECUTOR EXECUTE() CALLED! Agent: {self.agent_id}")
logger.info(f"Context: {context}")
logger.info(f"Message: {getattr(context, 'message', 'NO_MESSAGE')}")
logger.info("=" * 80)
# Verifica se há mensagem
# Check if there is a message
if not hasattr(context, "message") or not context.message:
logger.error("❌ No message in context")
await self._emit_error_event(event_queue, "No message provided")
return
# Extrai texto da mensagem
# Extract text from message
message_text = self._extract_text_from_message(context.message)
if not message_text:
logger.error("❌ No text found in message")
@ -122,18 +122,18 @@ class EvoAIAgentExecutor:
logger.info(f"📝 Extracted message: {message_text}")
# Gera session_id único
# Generate unique session_id
session_id = context.context_id or str(uuid4())
logger.info(f"📝 Using session_id: {session_id}")
# Importa services necessários
# Import services needed
from src.services.service_providers import (
session_service,
artifacts_service,
memory_service,
)
# Chama agent_runner diretamente (sem task manager)
# Call agent_runner directly (without task manager)
logger.info("🔄 Calling agent_runner directly...")
from src.services.adk.agent_runner import run_agent
@ -146,15 +146,15 @@ class EvoAIAgentExecutor:
artifacts_service=artifacts_service,
memory_service=memory_service,
db=self.db,
files=None, # TODO: processar files se necessário
files=None, # TODO: process files if needed
)
logger.info(f"✅ Agent result: {result}")
# Converte resultado para evento SDK
# Convert result to SDK event
final_response = result.get("final_response", "No response")
# Cria mensagem de resposta compatível com SDK
# Create response message compatible with SDK
response_message = new_agent_text_message(final_response)
event_queue.enqueue_event(response_message)
@ -168,7 +168,7 @@ class EvoAIAgentExecutor:
await self._emit_error_event(event_queue, f"Execution error: {str(e)}")
def _extract_text_from_message(self, message) -> str:
"""Extrai texto da mensagem SDK."""
"""Extract text from SDK message."""
try:
logger.info(f"🔍 DEBUG MESSAGE STRUCTURE:")
logger.info(f"Message type: {type(message)}")
@ -190,12 +190,12 @@ class EvoAIAgentExecutor:
logger.info(f"Part {i} text: {part.text}")
return part.text
# Tenta outras formas de acessar o texto
# Try other ways to access the text
if hasattr(message, "text"):
logger.info(f"Message has direct text: {message.text}")
return message.text
# Se for string diretamente
# If it's a string directly
if isinstance(message, str):
logger.info(f"Message is string: {message}")
return message
@ -210,7 +210,7 @@ class EvoAIAgentExecutor:
return ""
async def _emit_error_event(self, event_queue: "EventQueue", error_message: str):
"""Emite evento de erro."""
"""Emit error event."""
try:
error_msg = new_agent_text_message(f"Error: {error_message}")
event_queue.enqueue_event(error_msg)
@ -220,14 +220,14 @@ class EvoAIAgentExecutor:
async def cancel(
self, context: "RequestContext", event_queue: "EventQueue"
) -> None:
"""Implementa cancelamento (básico por enquanto)."""
"""Implement cancellation (basic for now)."""
logger.info(f"Cancel called for agent {self.agent_id}")
# Por enquanto, só log - implementar cancelamento real se necessário
# For now, only log - implement real cancellation if needed
class EvoAISDKService:
"""
Serviço principal que cria e gerencia servidores A2A usando o SDK oficial.
Main service that creates and manages A2A servers using the official SDK.
"""
def __init__(self, db: Session):
@ -236,7 +236,7 @@ class EvoAISDKService:
def create_a2a_server(self, agent_id: UUID) -> Optional[Any]:
"""
Cria um servidor A2A usando o SDK oficial mas com lógica interna.
Create an A2A server using the official SDK but with internal logic.
"""
if not SDK_AVAILABLE:
logger.error("❌ a2a-sdk not available, cannot create SDK server")
@ -247,7 +247,7 @@ class EvoAISDKService:
logger.info(f"🏗️ CREATING A2A SDK SERVER FOR AGENT {agent_id}")
logger.info("=" * 80)
# Busca agent
# Search for agent in database
logger.info("🔍 Searching for agent in database...")
agent = get_agent(self.db, agent_id)
if not agent:
@ -256,36 +256,36 @@ class EvoAISDKService:
logger.info(f"✅ Found agent: {agent.name}")
# Cria agent card usando lógica existente
# Create agent card using existing logic
logger.info("🏗️ Creating agent card...")
agent_card = self._create_agent_card(agent)
logger.info(f"✅ Agent card created: {agent_card.name}")
# Cria executor usando adapter
# Create executor using adapter
logger.info("🏗️ Creating agent executor adapter...")
agent_executor = EvoAIAgentExecutor(self.db, agent_id)
logger.info("✅ Agent executor created")
# Cria task store
# Create task store
logger.info("🏗️ Creating task store...")
task_store = InMemoryTaskStore()
logger.info("✅ Task store created")
# Cria request handler
# Create request handler
logger.info("🏗️ Creating request handler...")
request_handler = DefaultRequestHandler(
agent_executor=agent_executor, task_store=task_store
)
logger.info("✅ Request handler created")
# Cria aplicação Starlette
# Create Starlette application
logger.info("🏗️ Creating Starlette application...")
server = A2AStarletteApplication(
agent_card=agent_card, http_handler=request_handler
)
logger.info("✅ Starlette application created")
# Armazena servidor
# Store server
server_key = str(agent_id)
self.servers[server_key] = server
@ -305,7 +305,7 @@ class EvoAISDKService:
def get_server(self, agent_id: UUID) -> Optional[Any]:
"""
Retorna servidor existente ou cria um novo.
Returns existing server or creates a new one.
"""
server_key = str(agent_id)
@ -316,38 +316,38 @@ class EvoAISDKService:
def _create_agent_card(self, agent) -> AgentCard:
"""
Cria AgentCard usando lógica existente mas no formato SDK.
Create AgentCard using existing logic but in SDK format.
"""
# Reutiliza lógica do A2AService existente
# Reuse existing A2AService logic
a2a_service = A2AService(self.db, A2ATaskManager(self.db))
custom_card = a2a_service.get_agent_card(agent.id)
# Converte para formato SDK
# Convert to SDK format
sdk_card = convert_to_sdk_format(custom_card)
if sdk_card:
return sdk_card
# Fallback: cria card básico
# Fallback: create basic card
return AgentCard(
name=agent.name,
description=agent.description or "",
url=f"{settings.API_URL}/api/v1/a2a-sdk/{agent.id}",
version=settings.API_VERSION,
capabilities=AgentCapabilities(
streaming=True, pushNotifications=False, stateTransitionHistory=True
streaming=True, pushNotifications=True, stateTransitionHistory=True
),
provider=AgentProvider(
organization=settings.ORGANIZATION_NAME, url=settings.ORGANIZATION_URL
),
defaultInputModes=["text"],
defaultInputModes=["text", "file"],
defaultOutputModes=["text"],
skills=[],
)
def remove_server(self, agent_id: UUID) -> bool:
"""
Remove servidor do cache.
Remove server from cache.
"""
server_key = str(agent_id)
if server_key in self.servers:
@ -357,7 +357,7 @@ class EvoAISDKService:
def list_servers(self) -> Dict[str, Dict[str, Any]]:
"""
Lista todos os servidores ativos.
List all active servers.
"""
result = {}
for agent_id, server in self.servers.items():
@ -369,19 +369,19 @@ class EvoAISDKService:
return result
# Função utilitária para criar servidor SDK facilmente
# Utility function to create SDK server easily
def create_a2a_sdk_server(db: Session, agent_id: UUID) -> Optional[Any]:
"""
Função utilitária para criar servidor A2A usando SDK.
Utility function to create A2A server using SDK.
"""
service = EvoAISDKService(db)
return service.create_a2a_server(agent_id)
# Função para verificar compatibilidade
# Function to check compatibility
def check_sdk_compatibility() -> Dict[str, Any]:
"""
Verifica compatibilidade e funcionalidades disponíveis do SDK.
Check compatibility and available features of the SDK.
"""
return {
"sdk_available": SDK_AVAILABLE,

View File

@ -119,7 +119,7 @@ class WorkflowAgent(BaseAgent):
if not content:
content = [
Event(
author="workflow_agent",
author=f"workflow-node:{node_id}",
content=Content(parts=[Part(text="Content not found")]),
)
]
@ -136,6 +136,12 @@ class WorkflowAgent(BaseAgent):
# Store specific results for this node
node_outputs = state.get("node_outputs", {})
node_outputs[node_id] = {"started_at": datetime.now().isoformat()}
new_event = Event(
author=f"workflow-node:{node_id}",
content=Content(parts=[Part(text="Workflow started")]),
)
content = content + [new_event]
yield {
"content": content,
@ -171,7 +177,7 @@ class WorkflowAgent(BaseAgent):
yield {
"content": [
Event(
author="workflow_agent",
author=f"workflow-node:{node_id}",
content=Content(parts=[Part(text="Agent not found")]),
)
],
@ -192,7 +198,12 @@ class WorkflowAgent(BaseAgent):
new_content = []
async for event in root_agent.run_async(ctx):
conversation_history.append(event)
new_content.append(event)
modified_event = Event(
author=f"workflow-node:{node_id}", content=event.content
)
new_content.append(modified_event)
print(f"New content: {new_content}")
@ -284,7 +295,7 @@ class WorkflowAgent(BaseAgent):
condition_content = [
Event(
author="workflow_agent",
author=f"workflow-node:{node_id}",
content=Content(parts=[Part(text="Cycle limit reached")]),
)
]
@ -315,7 +326,7 @@ class WorkflowAgent(BaseAgent):
condition_content = [
Event(
author=label,
author=f"workflow-node:{node_id}",
content=Content(
parts=[
Part(
@ -351,7 +362,7 @@ class WorkflowAgent(BaseAgent):
label = node_data.get("label", "message_node")
new_event = Event(
author=label,
author=f"workflow-node:{node_id}",
content=Content(parts=[Part(text=message_content)]),
)
content = content + [new_event]
@ -913,7 +924,7 @@ class WorkflowAgent(BaseAgent):
error_msg = f"Error executing the workflow agent: {str(error)}"
print(error_msg)
return Event(
author=self.name,
author=f"workflow-error:{self.name}",
content=Content(
role="agent",
parts=[Part(text=error_msg)],

View File

@ -995,3 +995,343 @@ def get_agents_by_folder(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error listing agents of folder",
)
async def import_agents_from_json(
db: Session,
agents_data: Dict[str, Any],
client_id: uuid.UUID,
folder_id: Optional[uuid.UUID] = None,
) -> List[Agent]:
"""
Import one or more agents from JSON data
Args:
db (Session): Database session
agents_data (Dict[str, Any]): JSON data containing agent definitions
client_id (uuid.UUID): Client ID to associate with the imported agents
folder_id (Optional[uuid.UUID]): Optional folder ID to assign agents to
Returns:
List[Agent]: List of imported agents
"""
# Check if the JSON contains a single agent or multiple agents
if "agents" in agents_data:
# Multiple agents import
agents_list = agents_data["agents"]
if not isinstance(agents_list, list):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The 'agents' field must contain a list of agent definitions",
)
else:
# Single agent import
agents_list = [agents_data]
imported_agents = []
errors = []
id_mapping = {} # Maps original IDs to newly created agent IDs
# First pass: Import all non-workflow agents to establish ID mappings
for agent_data in agents_list:
# Skip workflow agents in the first pass, we'll handle them in the second pass
if agent_data.get("type") == "workflow":
continue
try:
# Store original ID if present for reference mapping
original_id = None
if "id" in agent_data:
original_id = agent_data["id"]
del agent_data["id"] # Always create a new agent with new ID
# Set the client ID for this agent if not provided
if "client_id" not in agent_data:
agent_data["client_id"] = str(client_id)
else:
# Ensure the provided client_id matches the authenticated client
agent_client_id = uuid.UUID(agent_data["client_id"])
if agent_client_id != client_id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Cannot import agent for client ID {agent_client_id}",
)
# Set folder_id if provided and not already set in the agent data
if folder_id and "folder_id" not in agent_data:
agent_data["folder_id"] = str(folder_id)
# Process config: Keep original configuration intact except for agent references
if "config" in agent_data and agent_data["config"]:
config = agent_data["config"]
# Process sub_agents if present
if "sub_agents" in config and config["sub_agents"]:
processed_sub_agents = []
for sub_agent_id in config["sub_agents"]:
try:
# Check if agent exists in database
existing_agent = get_agent(db, sub_agent_id)
if existing_agent:
processed_sub_agents.append(str(existing_agent.id))
else:
logger.warning(
f"Referenced sub_agent {sub_agent_id} not found - will be skipped"
)
except Exception as e:
logger.warning(
f"Error processing sub_agent {sub_agent_id}: {str(e)}"
)
config["sub_agents"] = processed_sub_agents
# Process agent_tools if present
if "agent_tools" in config and config["agent_tools"]:
processed_agent_tools = []
for agent_tool_id in config["agent_tools"]:
try:
# Check if agent exists in database
existing_agent = get_agent(db, agent_tool_id)
if existing_agent:
processed_agent_tools.append(str(existing_agent.id))
else:
logger.warning(
f"Referenced agent_tool {agent_tool_id} not found - will be skipped"
)
except Exception as e:
logger.warning(
f"Error processing agent_tool {agent_tool_id}: {str(e)}"
)
config["agent_tools"] = processed_agent_tools
# Convert to AgentCreate schema
agent_create = AgentCreate(**agent_data)
# Create the agent using existing create_agent function
db_agent = await create_agent(db, agent_create)
# Store mapping from original ID to new ID
if original_id:
id_mapping[original_id] = str(db_agent.id)
# If folder_id is provided but not in agent_data (couldn't be set at creation time)
# assign the agent to the folder after creation
if folder_id and not agent_data.get("folder_id"):
db_agent = assign_agent_to_folder(db, db_agent.id, folder_id)
# Set agent card URL if needed
if not db_agent.agent_card_url:
db_agent.agent_card_url = db_agent.agent_card_url_property
imported_agents.append(db_agent)
except Exception as e:
# Log the error and continue with other agents
agent_name = agent_data.get("name", "Unknown")
error_msg = f"Error importing agent '{agent_name}': {str(e)}"
logger.error(error_msg)
errors.append(error_msg)
# Second pass: Process workflow agents
for agent_data in agents_list:
# Only process workflow agents in the second pass
if agent_data.get("type") != "workflow":
continue
try:
# Store original ID if present for reference mapping
original_id = None
if "id" in agent_data:
original_id = agent_data["id"]
del agent_data["id"] # Always create a new agent with new ID
# Set the client ID for this agent if not provided
if "client_id" not in agent_data:
agent_data["client_id"] = str(client_id)
else:
# Ensure the provided client_id matches the authenticated client
agent_client_id = uuid.UUID(agent_data["client_id"])
if agent_client_id != client_id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Cannot import agent for client ID {agent_client_id}",
)
# Set folder_id if provided and not already set in the agent data
if folder_id and "folder_id" not in agent_data:
agent_data["folder_id"] = str(folder_id)
# Process workflow nodes
if "config" in agent_data and agent_data["config"]:
config = agent_data["config"]
# Process workflow nodes
if "workflow" in config and config["workflow"]:
workflow = config["workflow"]
if "nodes" in workflow and isinstance(workflow["nodes"], list):
for node in workflow["nodes"]:
if (
isinstance(node, dict)
and node.get("type") == "agent-node"
):
if "data" in node and "agent" in node["data"]:
agent_node = node["data"]["agent"]
# Store the original node ID
node_agent_id = None
if "id" in agent_node:
node_agent_id = agent_node["id"]
# Check if this ID is in our mapping (we created it in this import)
if node_agent_id in id_mapping:
# Use our newly created agent
# Get the agent from database with the mapped ID
mapped_id = uuid.UUID(
id_mapping[node_agent_id]
)
db_agent = get_agent(db, mapped_id)
if db_agent:
# Replace with database agent definition
# Extract agent data as dictionary
agent_dict = {
"id": str(db_agent.id),
"name": db_agent.name,
"description": db_agent.description,
"role": db_agent.role,
"goal": db_agent.goal,
"type": db_agent.type,
"model": db_agent.model,
"instruction": db_agent.instruction,
"config": db_agent.config,
}
node["data"]["agent"] = agent_dict
else:
# Check if this agent exists in database
try:
existing_agent = get_agent(
db, node_agent_id
)
if existing_agent:
# Replace with database agent definition
# Extract agent data as dictionary
agent_dict = {
"id": str(existing_agent.id),
"name": existing_agent.name,
"description": existing_agent.description,
"role": existing_agent.role,
"goal": existing_agent.goal,
"type": existing_agent.type,
"model": existing_agent.model,
"instruction": existing_agent.instruction,
"config": existing_agent.config,
}
node["data"]["agent"] = agent_dict
else:
# Agent doesn't exist, so we'll create a new one
# First, remove ID to get a new one
if "id" in agent_node:
del agent_node["id"]
# Set client_id to match parent
agent_node["client_id"] = str(
client_id
)
# Create agent
inner_agent_create = AgentCreate(
**agent_node
)
inner_db_agent = await create_agent(
db, inner_agent_create
)
# Replace with the new agent
# Extract agent data as dictionary
agent_dict = {
"id": str(inner_db_agent.id),
"name": inner_db_agent.name,
"description": inner_db_agent.description,
"role": inner_db_agent.role,
"goal": inner_db_agent.goal,
"type": inner_db_agent.type,
"model": inner_db_agent.model,
"instruction": inner_db_agent.instruction,
"config": inner_db_agent.config,
}
node["data"]["agent"] = agent_dict
except Exception as e:
logger.warning(
f"Error processing agent node {node_agent_id}: {str(e)}"
)
# Continue using the agent definition as is,
# but without ID to get a new one
if "id" in agent_node:
del agent_node["id"]
agent_node["client_id"] = str(client_id)
# Process sub_agents if present
if "sub_agents" in config and config["sub_agents"]:
processed_sub_agents = []
for sub_agent_id in config["sub_agents"]:
# Check if agent exists in database
try:
# Check if this is an agent we just created
if sub_agent_id in id_mapping:
processed_sub_agents.append(id_mapping[sub_agent_id])
else:
# Check if this agent exists in database
existing_agent = get_agent(db, sub_agent_id)
if existing_agent:
processed_sub_agents.append(str(existing_agent.id))
else:
logger.warning(
f"Referenced sub_agent {sub_agent_id} not found - will be skipped"
)
except Exception as e:
logger.warning(
f"Error processing sub_agent {sub_agent_id}: {str(e)}"
)
config["sub_agents"] = processed_sub_agents
# Convert to AgentCreate schema
agent_create = AgentCreate(**agent_data)
# Create the agent using existing create_agent function
db_agent = await create_agent(db, agent_create)
# Store mapping from original ID to new ID
if original_id:
id_mapping[original_id] = str(db_agent.id)
# If folder_id is provided but not in agent_data (couldn't be set at creation time)
# assign the agent to the folder after creation
if folder_id and not agent_data.get("folder_id"):
db_agent = assign_agent_to_folder(db, db_agent.id, folder_id)
# Set agent card URL if needed
if not db_agent.agent_card_url:
db_agent.agent_card_url = db_agent.agent_card_url_property
imported_agents.append(db_agent)
except Exception as e:
# Log the error and continue with other agents
agent_name = agent_data.get("name", "Unknown")
error_msg = f"Error importing agent '{agent_name}': {str(e)}"
logger.error(error_msg)
errors.append(error_msg)
# If no agents were imported successfully, raise an error
if not imported_agents and errors:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={"message": "Failed to import any agents", "errors": errors},
)
return imported_agents

View File

@ -181,7 +181,13 @@ def get_session_events(
if not hasattr(session, "events") or session.events is None:
return []
return session.events
sorted_events = sorted(
session.events,
key=lambda event: event.timestamp if hasattr(event, "timestamp") else 0,
)
return sorted_events
except HTTPException:
# Passes HTTP exceptions from get_session_by_id
raise

View File

@ -77,7 +77,7 @@ logger = logging.getLogger(__name__)
class A2AImplementation(Enum):
"""Tipo de implementação A2A."""
"""A2A implementation type."""
CUSTOM = "custom"
SDK = "sdk"
@ -86,7 +86,7 @@ class A2AImplementation(Enum):
@dataclass
class A2AClientConfig:
"""Configuração do cliente A2A."""
"""A2A client configuration."""
base_url: str
api_key: str
@ -97,7 +97,7 @@ class A2AClientConfig:
@dataclass
class A2AResponse:
"""Resposta unificada do A2A."""
"""A2A unified response."""
success: bool
data: Optional[Any] = None
@ -108,10 +108,10 @@ class A2AResponse:
class EnhancedA2AClient:
"""
Cliente A2A melhorado que suporta tanto implementação custom quanto SDK oficial.
Enhanced A2A client that supports both custom implementation and official SDK.
Detecta automaticamente a melhor implementação disponível e fornece
interface unificada para comunicação com agents A2A.
Automatically detects and uses the best available implementation
and provides a unified interface for communication with A2A agents.
"""
def __init__(self, config: A2AClientConfig):
@ -131,8 +131,8 @@ class EnhancedA2AClient:
await self.close()
async def initialize(self):
"""Inicializa o cliente e detecta implementações disponíveis."""
# Inicializa HTTP client
"""Initialize the client and detect available implementations."""
# Initialize HTTP client
headers = {"x-api-key": self.config.api_key, "Content-Type": "application/json"}
if self.config.custom_headers:
headers.update(self.config.custom_headers)
@ -141,15 +141,15 @@ class EnhancedA2AClient:
timeout=self.config.timeout, headers=headers
)
# Detecta implementações disponíveis
# Detect available implementations
await self._detect_available_implementations()
# Inicializa SDK client se disponível
# Initialize SDK client if available
if A2AImplementation.SDK in self.available_implementations and SDK_AVAILABLE:
await self._initialize_sdk_client()
async def close(self):
"""Fecha recursos do cliente."""
"""Close client resources."""
if self.httpx_client:
await self.httpx_client.aclose()
@ -158,10 +158,10 @@ class EnhancedA2AClient:
pass
async def _detect_available_implementations(self):
"""Detecta quais implementações estão disponíveis no servidor."""
"""Detect which implementations are available on the server."""
implementations = []
# Testa implementação custom
# Test custom implementation
try:
custom_health_url = f"{self.config.base_url}/api/v1/a2a/health"
response = await self.httpx_client.get(custom_health_url)
@ -171,7 +171,7 @@ class EnhancedA2AClient:
except Exception as e:
logger.debug(f"Custom implementation not available: {e}")
# Testa implementação SDK
# Test SDK implementation
try:
sdk_health_url = f"{self.config.base_url}/api/v1/a2a-sdk/health"
response = await self.httpx_client.get(sdk_health_url)
@ -187,14 +187,14 @@ class EnhancedA2AClient:
)
async def _initialize_sdk_client(self):
"""Inicializa cliente SDK se disponível."""
"""Initialize SDK client if available."""
if not SDK_AVAILABLE:
logger.warning("SDK not available for client initialization")
return
try:
# Para o SDK client, precisamos descobrir agents disponíveis
# Por enquanto, mantemos None e inicializamos conforme necessário
# For the SDK client, we need to discover available agents
# For now, we keep None and initialize as needed
self.sdk_client = None
logger.info("SDK client initialization prepared")
except Exception as e:
@ -203,7 +203,7 @@ class EnhancedA2AClient:
def _choose_implementation(
self, preferred: Optional[A2AImplementation] = None
) -> A2AImplementation:
"""Escolhe a melhor implementação baseado na preferência e disponibilidade."""
"""Choose the best implementation based on preference and availability."""
if preferred and preferred in self.available_implementations:
return preferred
@ -216,7 +216,7 @@ class EnhancedA2AClient:
f"falling back to auto-selection"
)
# Auto-seleção: prefere SDK se disponível, senão custom
# Auto-selection: prefer SDK if available, otherwise custom
if A2AImplementation.SDK in self.available_implementations:
return A2AImplementation.SDK
elif A2AImplementation.CUSTOM in self.available_implementations:
@ -230,11 +230,11 @@ class EnhancedA2AClient:
implementation: Optional[A2AImplementation] = None,
) -> A2AResponse:
"""
Obtém agent card usando a implementação especificada ou a melhor disponível.
Get agent card using the specified implementation or the best available.
"""
agent_id_str = str(agent_id)
# Verifica cache
# Check
cache_key = f"{agent_id_str}_{implementation}"
if cache_key in self._agent_cards_cache:
logger.debug(f"Returning cached agent card for {agent_id_str}")
@ -265,7 +265,7 @@ class EnhancedA2AClient:
)
async def _get_agent_card_custom(self, agent_id: str) -> A2AResponse:
"""Obtém agent card usando implementação custom."""
"""Get agent card using custom implementation."""
url = f"{self.config.base_url}/api/v1/a2a/{agent_id}/.well-known/agent.json"
response = await self.httpx_client.get(url)
@ -275,7 +275,7 @@ class EnhancedA2AClient:
return A2AResponse(success=True, data=data, raw_response=response)
async def _get_agent_card_sdk(self, agent_id: str) -> A2AResponse:
"""Obtém agent card usando implementação SDK."""
"""Get agent card using SDK implementation."""
url = f"{self.config.base_url}/api/v1/a2a-sdk/{agent_id}/.well-known/agent.json"
response = await self.httpx_client.get(url)
@ -293,7 +293,7 @@ class EnhancedA2AClient:
metadata: Optional[Dict[str, Any]] = None,
) -> A2AResponse:
"""
Envia mensagem para agent usando a implementação especificada.
Send message to agent using the specified implementation.
"""
agent_id_str = str(agent_id)
session_id = session_id or str(uuid4())
@ -328,19 +328,19 @@ class EnhancedA2AClient:
session_id: str,
metadata: Optional[Dict[str, Any]],
) -> A2AResponse:
"""Envia mensagem usando implementação custom."""
"""Send message using custom implementation."""
url = f"{self.config.base_url}/api/v1/a2a/{agent_id}"
# Cria mensagem no formato custom
# Create message in custom format
custom_message = CustomMessage(
role="user", parts=[{"type": "text", "text": message}], metadata=metadata
)
# Cria request usando método correto da especificação A2A
# Create request using correct method from A2A specification
request_data = {
"jsonrpc": "2.0",
"id": str(uuid4()),
"method": "tasks/send", # Método correto da especificação A2A
"method": "tasks/send", # Correct method from A2A specification
"params": {
"id": str(uuid4()),
"sessionId": session_id,
@ -365,17 +365,17 @@ class EnhancedA2AClient:
session_id: str,
metadata: Optional[Dict[str, Any]],
) -> A2AResponse:
"""Envia mensagem usando implementação SDK - usa Message API conforme especificação."""
"""Send message using SDK implementation - uses Message API according to specification."""
if not SDK_AVAILABLE:
raise ValueError("SDK not available")
# Para implementação SDK, usamos o endpoint SDK
# For SDK implementation, we use the SDK endpoint
url = f"{self.config.base_url}/api/v1/a2a-sdk/{agent_id}"
# Message API conforme especificação oficial - apenas message nos params
# Message API according to official specification - only message in params
message_id = str(uuid4())
# Formato exato da especificação oficial
# Exact format according to official specification
request_data = {
"jsonrpc": "2.0",
"id": str(uuid4()),
@ -385,11 +385,11 @@ class EnhancedA2AClient:
"role": "user",
"parts": [
{
"type": "text", # Especificação usa "type" não "kind"
"type": "text", # Specification uses "type" not "kind"
"text": message,
}
],
"messageId": message_id, # Obrigatório conforme especificação
"messageId": message_id, # According to specification
}
},
}
@ -409,7 +409,7 @@ class EnhancedA2AClient:
metadata: Optional[Dict[str, Any]] = None,
) -> AsyncIterator[A2AResponse]:
"""
Envia mensagem com streaming usando a implementação especificada.
Send message with streaming using the specified implementation.
"""
agent_id_str = str(agent_id)
session_id = session_id or str(uuid4())
@ -445,15 +445,15 @@ class EnhancedA2AClient:
session_id: str,
metadata: Optional[Dict[str, Any]],
) -> AsyncIterator[A2AResponse]:
"""Envia mensagem com streaming usando implementação custom - usa Task API."""
"""Send message with streaming using custom implementation - uses Task API."""
url = f"{self.config.base_url}/api/v1/a2a/{agent_id}/subscribe"
# Cria mensagem no formato custom
# Create message in custom format
custom_message = CustomMessage(
role="user", parts=[{"type": "text", "text": message}], metadata=metadata
)
# Nossa implementação custom usa Task API (tasks/subscribe)
# Our custom implementation uses Task API (tasks/subscribe)
request_data = {
"jsonrpc": "2.0",
"id": str(uuid4()),
@ -489,16 +489,16 @@ class EnhancedA2AClient:
session_id: str,
metadata: Optional[Dict[str, Any]],
) -> AsyncIterator[A2AResponse]:
"""Envia mensagem com streaming usando implementação SDK - usa Message API conforme especificação."""
"""Send message with streaming using SDK implementation - uses Message API according to specification."""
if not SDK_AVAILABLE:
raise ValueError("SDK not available")
url = f"{self.config.base_url}/api/v1/a2a-sdk/{agent_id}"
# Message API conforme especificação oficial - apenas message nos params
# Message API according to official specification - only message in params
message_id = str(uuid4())
# Formato exato da especificação oficial para streaming
# Exact format according to official specification for streaming
request_data = {
"jsonrpc": "2.0",
"id": str(uuid4()),
@ -508,11 +508,11 @@ class EnhancedA2AClient:
"role": "user",
"parts": [
{
"type": "text", # Especificação usa "type" não "kind"
"type": "text", # Specification uses "type" not "kind"
"text": message,
}
],
"messageId": message_id, # Obrigatório conforme especificação
"messageId": message_id, # According to specification
}
},
}
@ -534,7 +534,7 @@ class EnhancedA2AClient:
self, agent_id: Union[str, UUID]
) -> Dict[str, Any]:
"""
Compara as duas implementações para um agent específico.
Compare the two implementations for a specific agent.
"""
agent_id_str = str(agent_id)
comparison = {
@ -547,7 +547,7 @@ class EnhancedA2AClient:
"differences": [],
}
# Obtém cards de ambas as implementações
# Get cards from both implementations
if A2AImplementation.CUSTOM in self.available_implementations:
try:
custom_response = await self._get_agent_card_custom(agent_id_str)
@ -564,12 +564,12 @@ class EnhancedA2AClient:
except Exception as e:
comparison["sdk_error"] = str(e)
# Compara se ambas estão disponíveis
# Compare if both are available
if comparison["custom_card"] and comparison["sdk_card"]:
custom = comparison["custom_card"]
sdk = comparison["sdk_card"]
# Lista de campos para comparar
# List of fields to compare
fields_to_compare = ["name", "description", "version", "url"]
for field in fields_to_compare:
@ -586,7 +586,7 @@ class EnhancedA2AClient:
async def health_check(self) -> Dict[str, Any]:
"""
Verifica saúde de todas as implementações disponíveis.
Check health of all available implementations.
"""
health = {
"client_initialized": True,
@ -596,7 +596,7 @@ class EnhancedA2AClient:
"implementations_health": {},
}
# Testa custom implementation
# Test custom implementation
try:
custom_health_url = f"{self.config.base_url}/api/v1/a2a/health"
response = await self.httpx_client.get(custom_health_url)
@ -611,7 +611,7 @@ class EnhancedA2AClient:
"error": str(e),
}
# Testa SDK implementation
# Test SDK implementation
try:
sdk_health_url = f"{self.config.base_url}/api/v1/a2a-sdk/health"
response = await self.httpx_client.get(sdk_health_url)
@ -629,22 +629,22 @@ class EnhancedA2AClient:
return health
async def _detect_implementation(self) -> A2AImplementation:
"""Detecta automaticamente a implementação disponível."""
"""Detect automatically the available implementation."""
logger.info("Auto-detecting A2A implementation...")
# Se forçamos uma implementação específica, use-a
# If we force a specific implementation, use it
if self.config.implementation != A2AImplementation.AUTO:
logger.info(
f"Using forced implementation: {self.config.implementation.value}"
)
return self.config.implementation
# Se temos agent_id, verifica especificamente baseado na URL de health check
# If we have agent_id, check specifically based on health check URL
agent_id = getattr(self, "_current_agent_id", None)
implementations_to_try = []
# Se o agent_id foi detectado como sendo de uma URL SDK específica, prefira SDK
# If the agent_id was detected as being from a specific SDK URL, prefer SDK
if (
agent_id
and hasattr(self, "_prefer_sdk_from_url")
@ -678,12 +678,12 @@ class EnhancedA2AClient:
except Exception as e:
logger.info(f"{impl.value} implementation failed: {str(e)}")
# Fallback para custom se nada funcionar
# Fallback to custom if nothing works
logger.warning("No implementation detected, falling back to CUSTOM")
return A2AImplementation.CUSTOM
# Função utilitária para criar cliente facilmente
# Utility function to create client easily
async def create_enhanced_a2a_client(
base_url: str,
api_key: str,
@ -691,7 +691,7 @@ async def create_enhanced_a2a_client(
**kwargs,
) -> EnhancedA2AClient:
"""
Função utilitária para criar e inicializar cliente A2A melhorado.
Utility function to create and initialize enhanced A2A client.
"""
config = A2AClientConfig(
base_url=base_url, api_key=api_key, implementation=implementation, **kwargs
@ -702,9 +702,9 @@ async def create_enhanced_a2a_client(
return client
# Exemplo de uso
# Example of usage
async def example_usage():
"""Exemplo de como usar o cliente melhorado."""
"""Example of how to use the enhanced client."""
config = A2AClientConfig(
base_url="http://localhost:8000",
api_key="your-api-key",