feat(dependencies): add psycopg2 for PostgreSQL support and update README description
This commit is contained in:
parent
01d50af7ad
commit
b8a95e047f
@ -1,6 +1,6 @@
|
||||
# Evo AI - AI Agents Platform
|
||||
|
||||
Evo AI is an open-source platform for creating and managing AI agents, enabling integration with different AI models and services.
|
||||
Evo AI is an free platform for creating and managing AI agents, enabling integration with different AI models and services.
|
||||
|
||||
## 🚀 Overview
|
||||
|
||||
|
@ -1,33 +0,0 @@
|
||||
"""add_a2a_fields_in_agents_table
|
||||
|
||||
Revision ID: 07ac76cc090a
|
||||
Revises: 6cd898ec9f7c
|
||||
Create Date: 2025-04-30 17:32:29.582234
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "07ac76cc090a"
|
||||
down_revision: Union[str, None] = "6cd898ec9f7c"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
@ -1,32 +0,0 @@
|
||||
"""add_a2a_fields_in_agents_table
|
||||
|
||||
Revision ID: 545d3083200b
|
||||
Revises: 07ac76cc090a
|
||||
Create Date: 2025-04-30 17:35:31.573159
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '545d3083200b'
|
||||
down_revision: Union[str, None] = '07ac76cc090a'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('agents', sa.Column('agent_card_url', sa.String(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('agents', 'agent_card_url')
|
||||
# ### end Alembic commands ###
|
@ -1,34 +0,0 @@
|
||||
"""add_email_field_on_clients_table
|
||||
|
||||
Revision ID: 6cd898ec9f7c
|
||||
Revises: ab6f3a31f3e8
|
||||
Create Date: 2025-04-28 15:52:26.406846
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '6cd898ec9f7c'
|
||||
down_revision: Union[str, None] = 'ab6f3a31f3e8'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('clients', sa.Column('email', sa.String(), nullable=False))
|
||||
op.create_index(op.f('ix_clients_email'), 'clients', ['email'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_clients_email'), table_name='clients')
|
||||
op.drop_column('clients', 'email')
|
||||
# ### end Alembic commands ###
|
@ -1,32 +0,0 @@
|
||||
"""init migration
|
||||
|
||||
Revision ID: ab6f3a31f3e8
|
||||
Revises:
|
||||
Create Date: 2025-04-28 15:37:40.885065
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ab6f3a31f3e8'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
132
migrations/versions/c107446e38aa_init_migrations.py
Normal file
132
migrations/versions/c107446e38aa_init_migrations.py
Normal file
@ -0,0 +1,132 @@
|
||||
"""init migrations
|
||||
|
||||
Revision ID: c107446e38aa
|
||||
Revises:
|
||||
Create Date: 2025-05-02 08:01:10.713496
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'c107446e38aa'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('clients',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_clients_email'), 'clients', ['email'], unique=True)
|
||||
op.create_table('mcp_servers',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('config_type', sa.String(), nullable=False),
|
||||
sa.Column('config_json', sa.JSON(), nullable=False),
|
||||
sa.Column('environments', sa.JSON(), nullable=False),
|
||||
sa.Column('tools', sa.JSON(), nullable=False),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.CheckConstraint("config_type IN ('studio', 'sse')", name='check_mcp_server_config_type'),
|
||||
sa.CheckConstraint("type IN ('official', 'community')", name='check_mcp_server_type'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('tools',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('config_json', sa.JSON(), nullable=False),
|
||||
sa.Column('environments', sa.JSON(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('agents',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('type', sa.String(), nullable=False),
|
||||
sa.Column('model', sa.String(), nullable=True),
|
||||
sa.Column('api_key', sa.String(), nullable=True),
|
||||
sa.Column('instruction', sa.Text(), nullable=True),
|
||||
sa.Column('agent_card_url', sa.String(), nullable=True),
|
||||
sa.Column('config', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.CheckConstraint("type IN ('llm', 'sequential', 'parallel', 'loop', 'a2a')", name='check_agent_type'),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('contacts',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('ext_id', sa.String(), nullable=True),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('meta', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=False),
|
||||
sa.Column('password_hash', sa.String(), nullable=False),
|
||||
sa.Column('client_id', sa.UUID(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_admin', sa.Boolean(), nullable=True),
|
||||
sa.Column('email_verified', sa.Boolean(), nullable=True),
|
||||
sa.Column('verification_token', sa.String(), nullable=True),
|
||||
sa.Column('verification_token_expiry', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('password_reset_token', sa.String(), nullable=True),
|
||||
sa.Column('password_reset_expiry', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=True),
|
||||
sa.Column('action', sa.String(), nullable=False),
|
||||
sa.Column('resource_type', sa.String(), nullable=False),
|
||||
sa.Column('resource_id', sa.String(), nullable=True),
|
||||
sa.Column('details', sa.JSON(), nullable=True),
|
||||
sa.Column('ip_address', sa.String(), nullable=True),
|
||||
sa.Column('user_agent', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('audit_logs')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_table('contacts')
|
||||
op.drop_table('agents')
|
||||
op.drop_table('tools')
|
||||
op.drop_table('mcp_servers')
|
||||
op.drop_index(op.f('ix_clients_email'), table_name='clients')
|
||||
op.drop_table('clients')
|
||||
# ### end Alembic commands ###
|
@ -25,6 +25,7 @@ dependencies = [
|
||||
"uvicorn==0.34.2",
|
||||
"pydantic==2.11.3",
|
||||
"sqlalchemy==2.0.40",
|
||||
"psycopg2==2.9.10",
|
||||
"psycopg2-binary==2.9.10",
|
||||
"google-cloud-aiplatform==1.90.0",
|
||||
"python-dotenv==1.1.0",
|
||||
|
@ -54,6 +54,7 @@ def create_mcp_servers():
|
||||
{
|
||||
"name": "Sequential Thinking",
|
||||
"description": "Sequential Thinking helps users organize their thoughts and break down complex problems through a structured workflow. By guiding users through defined cognitive stages like Problem Definition, Research, Analysis, Synthesis, and Conclusion, it provides a framework for progressive thinking. The server tracks the progression of your thinking process, identifies connections between similar thoughts, monitors progress, and generates summaries, making it easier to approach challenges methodically and reach well-reasoned conclusions.",
|
||||
"config_type": "studio",
|
||||
"config_json": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
@ -84,6 +85,7 @@ def create_mcp_servers():
|
||||
{
|
||||
"name": "CloudFlare",
|
||||
"description": "Model Context Protocol (MCP) is a new, standardized protocol for managing context between large language models (LLMs) and external systems. In this repository, we provide an installer as well as an MCP Server for Cloudflare's API.\r\n\r\nThis lets you use Claude Desktop, or any MCP Client, to use natural language to accomplish things on your Cloudflare account, e.g.:\r\n\r\nList all the Cloudflare workers on my <some-email>@gmail.com account.\r\nCan you tell me about any potential issues on this particular worker '...'?",
|
||||
"config_type": "sse",
|
||||
"config_json": {
|
||||
"url": "https://observability.mcp.cloudflare.com/sse"
|
||||
},
|
||||
@ -122,6 +124,7 @@ def create_mcp_servers():
|
||||
{
|
||||
"name": "Brave Search",
|
||||
"description": "Brave Search allows you to seamlessly integrate Brave Search functionality into AI assistants like Claude. By implementing a Model Context Protocol (MCP) server, it enables the AI to leverage Brave Search's web search and local business search capabilities. It provides tools for both general web searches and specific local searches, enhancing the AI assistant's ability to provide relevant and up-to-date information.",
|
||||
"config_type": "studio",
|
||||
"config_json": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-brave-search"],
|
||||
@ -166,6 +169,7 @@ def create_mcp_servers():
|
||||
server = MCPServer(
|
||||
name=server_data["name"],
|
||||
description=server_data["description"],
|
||||
config_type=server_data["config_type"],
|
||||
config_json=server_data["config_json"],
|
||||
environments=server_data["environments"],
|
||||
tools=server_data["tools"],
|
||||
|
@ -32,7 +32,7 @@ router = APIRouter(
|
||||
|
||||
|
||||
# Session Routes
|
||||
@router.get("/client/{client_id}", response_model=List[Adk_Session])
|
||||
@router.get("/client/{client_id}")
|
||||
async def get_client_sessions(
|
||||
client_id: uuid.UUID,
|
||||
db: Session = Depends(get_db),
|
||||
@ -43,7 +43,7 @@ async def get_client_sessions(
|
||||
return get_sessions_by_client(db, client_id)
|
||||
|
||||
|
||||
@router.get("/agent/{agent_id}", response_model=List[Adk_Session])
|
||||
@router.get("/agent/{agent_id}")
|
||||
async def get_agent_sessions(
|
||||
agent_id: uuid.UUID,
|
||||
db: Session = Depends(get_db),
|
||||
|
@ -146,6 +146,7 @@ class MCPServer(Base):
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
name = Column(String, nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
config_type = Column(String, nullable=False, default="studio")
|
||||
config_json = Column(JSON, nullable=False, default={})
|
||||
environments = Column(JSON, nullable=False, default={})
|
||||
tools = Column(JSON, nullable=False, default=[])
|
||||
@ -157,6 +158,9 @@ class MCPServer(Base):
|
||||
CheckConstraint(
|
||||
"type IN ('official', 'community')", name="check_mcp_server_type"
|
||||
),
|
||||
CheckConstraint(
|
||||
"config_type IN ('studio', 'sse')", name="check_mcp_server_config_type"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -190,6 +190,7 @@ class ToolConfig(BaseModel):
|
||||
class MCPServerBase(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
config_type: str = Field(default="studio")
|
||||
config_json: Dict[str, Any] = Field(default_factory=dict)
|
||||
environments: Dict[str, Any] = Field(default_factory=dict)
|
||||
tools: List[ToolConfig] = Field(default_factory=list)
|
||||
|
@ -12,6 +12,21 @@ import httpx
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _convert_uuid_to_str(obj):
|
||||
"""
|
||||
Recursively convert all UUID objects to strings in a dictionary, list or scalar value.
|
||||
This ensures JSON serialize for complex nested objects.
|
||||
"""
|
||||
if isinstance(obj, dict):
|
||||
return {key: _convert_uuid_to_str(value) for key, value in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
return [_convert_uuid_to_str(item) for item in obj]
|
||||
elif isinstance(obj, uuid.UUID):
|
||||
return str(obj)
|
||||
else:
|
||||
return obj
|
||||
|
||||
|
||||
def validate_sub_agents(db: Session, sub_agents: List[uuid.UUID]) -> bool:
|
||||
"""Validate if all sub-agents exist"""
|
||||
for agent_id in sub_agents:
|
||||
@ -143,8 +158,13 @@ async def create_agent(db: Session, agent: AgentCreate) -> Agent:
|
||||
if "mcp_servers" in config:
|
||||
processed_servers = []
|
||||
for server in config["mcp_servers"]:
|
||||
# Convert server id to UUID if it's a string
|
||||
server_id = server["id"]
|
||||
if isinstance(server_id, str):
|
||||
server_id = uuid.UUID(server_id)
|
||||
|
||||
# Search for MCP server in the database
|
||||
mcp_server = get_mcp_server(db, server["id"])
|
||||
mcp_server = get_mcp_server(db, server_id)
|
||||
if not mcp_server:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
@ -185,7 +205,22 @@ async def create_agent(db: Session, agent: AgentCreate) -> Agent:
|
||||
|
||||
agent.config = config
|
||||
|
||||
db_agent = Agent(**agent.model_dump())
|
||||
# Ensure all config objects are serializable (convert UUIDs to strings)
|
||||
if agent.config is not None:
|
||||
agent.config = _convert_uuid_to_str(agent.config)
|
||||
|
||||
# Convert agent to dict ensuring all UUIDs are converted to strings
|
||||
agent_dict = agent.model_dump()
|
||||
agent_dict = _convert_uuid_to_str(agent_dict)
|
||||
|
||||
# Create agent from the processed dictionary
|
||||
db_agent = Agent(**agent_dict)
|
||||
|
||||
# Make one final check to ensure all nested objects are serializable
|
||||
# (especially nested UUIDs in config)
|
||||
if db_agent.config is not None:
|
||||
db_agent.config = _convert_uuid_to_str(db_agent.config)
|
||||
|
||||
db.add(db_agent)
|
||||
db.commit()
|
||||
db.refresh(db_agent)
|
||||
@ -195,9 +230,20 @@ async def create_agent(db: Session, agent: AgentCreate) -> Agent:
|
||||
except SQLAlchemyError as e:
|
||||
db.rollback()
|
||||
logger.error(f"Error creating agent: {str(e)}")
|
||||
|
||||
# Add debugging info
|
||||
try:
|
||||
import json
|
||||
|
||||
if "agent_dict" in locals():
|
||||
agent_json = json.dumps(agent_dict)
|
||||
logger.info(f"Agent creation attempt with: {agent_json[:200]}...")
|
||||
except Exception as json_err:
|
||||
logger.error(f"Could not serialize agent for debugging: {str(json_err)}")
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Error creating agent",
|
||||
detail=f"Error creating agent: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@ -294,8 +340,13 @@ async def update_agent(
|
||||
if "mcp_servers" in config:
|
||||
processed_servers = []
|
||||
for server in config["mcp_servers"]:
|
||||
# Convert server id to UUID if it's a string
|
||||
server_id = server["id"]
|
||||
if isinstance(server_id, str):
|
||||
server_id = uuid.UUID(server_id)
|
||||
|
||||
# Search for MCP server in the database
|
||||
mcp_server = get_mcp_server(db, server["id"])
|
||||
mcp_server = get_mcp_server(db, server_id)
|
||||
if not mcp_server:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
@ -336,6 +387,10 @@ async def update_agent(
|
||||
|
||||
agent_data["config"] = config
|
||||
|
||||
# Ensure all config objects are serializable (convert UUIDs to strings)
|
||||
if "config" in agent_data and agent_data["config"] is not None:
|
||||
agent_data["config"] = _convert_uuid_to_str(agent_data["config"])
|
||||
|
||||
for key, value in agent_data.items():
|
||||
setattr(agent, key, value)
|
||||
|
||||
@ -348,21 +403,23 @@ async def update_agent(
|
||||
|
||||
|
||||
def delete_agent(db: Session, agent_id: uuid.UUID) -> bool:
|
||||
"""Remove an agent (soft delete)"""
|
||||
"""Remove an agent from the database"""
|
||||
try:
|
||||
db_agent = get_agent(db, agent_id)
|
||||
if not db_agent:
|
||||
return False
|
||||
|
||||
# Actually delete the agent from the database
|
||||
db.delete(db_agent)
|
||||
db.commit()
|
||||
logger.info(f"Agent deactivated successfully: {agent_id}")
|
||||
logger.info(f"Agent deleted successfully: {agent_id}")
|
||||
return True
|
||||
except SQLAlchemyError as e:
|
||||
db.rollback()
|
||||
logger.error(f"Error deactivating agent {agent_id}: {str(e)}")
|
||||
logger.error(f"Error deleting agent {agent_id}: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Error deactivating agent",
|
||||
detail="Error deleting agent",
|
||||
)
|
||||
|
||||
|
||||
|
@ -11,20 +11,36 @@ from src.services.agent_service import get_agents_by_client
|
||||
|
||||
import uuid
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _session_to_dict(session: SessionModel):
|
||||
"""Convert Session model to dictionary with created_at field"""
|
||||
result = {
|
||||
"id": session.id,
|
||||
"app_name": session.app_name,
|
||||
"user_id": session.user_id,
|
||||
"state": session.state,
|
||||
"create_time": session.create_time,
|
||||
"update_time": session.update_time,
|
||||
"created_at": session.create_time,
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
def get_sessions_by_client(
|
||||
db: Session,
|
||||
client_id: uuid.UUID,
|
||||
) -> List[SessionModel]:
|
||||
) -> List[dict]:
|
||||
"""Search for sessions of a client with pagination"""
|
||||
try:
|
||||
agents_by_client = get_agents_by_client(db, client_id)
|
||||
sessions = []
|
||||
for agent in agents_by_client:
|
||||
sessions.extend(get_sessions_by_agent(db, agent.id))
|
||||
db_sessions = get_sessions_by_agent(db, agent.id)
|
||||
sessions.extend(db_sessions)
|
||||
|
||||
return sessions
|
||||
except SQLAlchemyError as e:
|
||||
@ -40,13 +56,15 @@ def get_sessions_by_agent(
|
||||
agent_id: uuid.UUID,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
) -> List[SessionModel]:
|
||||
) -> List[dict]:
|
||||
"""Search for sessions of an agent with pagination"""
|
||||
try:
|
||||
agent_id_str = str(agent_id)
|
||||
query = db.query(SessionModel).filter(SessionModel.app_name == agent_id_str)
|
||||
|
||||
return query.offset(skip).limit(limit).all()
|
||||
db_sessions = query.offset(skip).limit(limit).all()
|
||||
# Convert each session to dictionary with created_at field
|
||||
return [_session_to_dict(session) for session in db_sessions]
|
||||
except SQLAlchemyError as e:
|
||||
logger.error(f"Error searching for sessions of agent {agent_id_str}: {str(e)}")
|
||||
raise HTTPException(
|
||||
|
@ -53,7 +53,7 @@ def create_user(
|
||||
try:
|
||||
# If not admin and no client_id, create an associated client
|
||||
if not is_admin and local_client_id is None:
|
||||
client = Client(name=user_data.name)
|
||||
client = Client(name=user_data.name, email=user_data.email)
|
||||
db.add(client)
|
||||
db.flush() # Get the client ID
|
||||
local_client_id = client.id
|
||||
|
Loading…
Reference in New Issue
Block a user