diff --git a/.cursorrules b/.cursorrules index e082fc47..8fd09ec6 100644 --- a/.cursorrules +++ b/.cursorrules @@ -41,6 +41,10 @@ src/ │ ├── auth_service.py # JWT authentication logic │ ├── email_service.py # Email sending service │ └── audit_service.py # Audit logs logic +├── templates/ +│ ├── emails/ +│ │ ├── verification_email.html +│ │ └── password_reset.html └── utils/ └── security.py # Security utilities (JWT, hash) ``` diff --git a/.env b/.env index 5eb09e10..c4dc41a3 100644 --- a/.env +++ b/.env @@ -24,14 +24,14 @@ REDIS_PASSWORD="" TOOLS_CACHE_TTL=3600 # Configurações JWT -JWT_SECRET_KEY="sua-chave-secreta-jwt" +JWT_SECRET_KEY="f6884ef5be4c279686ff90f0ed9d4656685eef9807245019ac94a3fbe32b0938" JWT_ALGORITHM="HS256" -JWT_EXPIRATION_TIME=30 # Em minutos +JWT_EXPIRATION_TIME=3600 # Em minutos # SendGrid -SENDGRID_API_KEY="sua-sendgrid-api-key" -EMAIL_FROM="noreply@yourdomain.com" -APP_URL="https://yourdomain.com" +SENDGRID_API_KEY="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" +EMAIL_FROM="noreply@evolution-api.com" +APP_URL="https://evoai.evoapicloud.com" # Configurações do Servidor HOST="0.0.0.0" diff --git a/.venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc b/.venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc index 5f7c2f39..fd97b0bd 100644 Binary files a/.venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc and b/.venv/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc differ diff --git a/.venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc b/.venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc index af817593..18580b3a 100644 Binary files a/.venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc and b/.venv/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc differ diff --git a/.venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc b/.venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc index 461df410..0c1aae85 100644 Binary files a/.venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc and b/.venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc differ diff --git a/migrations/versions/4a61703e9b7e_allow_null_model_and_api_key.py b/migrations/versions/4a61703e9b7e_allow_null_model_and_api_key.py deleted file mode 100644 index 4cb71688..00000000 --- a/migrations/versions/4a61703e9b7e_allow_null_model_and_api_key.py +++ /dev/null @@ -1,90 +0,0 @@ -"""allow_null_model_and_api_key - -Revision ID: 4a61703e9b7e -Revises: 9d819594ac9b -Create Date: 2025-04-28 12:04:33.607371 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = '4a61703e9b7e' -down_revision: Union[str, None] = '9d819594ac9b' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('events') - op.drop_table('user_states') - op.drop_table('app_states') - op.drop_table('sessions') - op.alter_column('agents', 'model', - existing_type=sa.VARCHAR(), - nullable=True) - op.alter_column('agents', 'api_key', - existing_type=sa.VARCHAR(), - nullable=True) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('agents', 'api_key', - existing_type=sa.VARCHAR(), - nullable=False) - op.alter_column('agents', 'model', - existing_type=sa.VARCHAR(), - nullable=False) - op.create_table('sessions', - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('state', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), - sa.Column('create_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.Column('update_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('app_name', 'user_id', 'id', name='sessions_pkey'), - postgresql_ignore_search_path=False - ) - op.create_table('app_states', - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('state', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), - sa.Column('update_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('app_name', name='app_states_pkey') - ) - op.create_table('user_states', - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('state', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), - sa.Column('update_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('app_name', 'user_id', name='user_states_pkey') - ) - op.create_table('events', - sa.Column('id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('session_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('invocation_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('author', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('branch', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('timestamp', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.Column('content', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('actions', postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column('long_running_tool_ids_json', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('grounding_metadata', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('partial', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('turn_complete', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('error_code', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('error_message', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('interrupted', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['app_name', 'user_id', 'session_id'], ['sessions.app_name', 'sessions.user_id', 'sessions.id'], name='events_app_name_user_id_session_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'app_name', 'user_id', 'session_id', name='events_pkey') - ) - # ### end Alembic commands ### diff --git a/migrations/versions/6cd898ec9f7c_add_email_field_on_clients_table.py b/migrations/versions/6cd898ec9f7c_add_email_field_on_clients_table.py new file mode 100644 index 00000000..69919cf1 --- /dev/null +++ b/migrations/versions/6cd898ec9f7c_add_email_field_on_clients_table.py @@ -0,0 +1,34 @@ +"""add_email_field_on_clients_table + +Revision ID: 6cd898ec9f7c +Revises: ab6f3a31f3e8 +Create Date: 2025-04-28 15:52:26.406846 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '6cd898ec9f7c' +down_revision: Union[str, None] = 'ab6f3a31f3e8' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('clients', sa.Column('email', sa.String(), nullable=False)) + op.create_index(op.f('ix_clients_email'), 'clients', ['email'], unique=True) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_clients_email'), table_name='clients') + op.drop_column('clients', 'email') + # ### end Alembic commands ### diff --git a/migrations/versions/98780d4fb293_add_audit_table.py b/migrations/versions/98780d4fb293_add_audit_table.py deleted file mode 100644 index dbd60b9a..00000000 --- a/migrations/versions/98780d4fb293_add_audit_table.py +++ /dev/null @@ -1,44 +0,0 @@ -"""add_audit_table - -Revision ID: 98780d4fb293 -Revises: f11fb4060739 -Create Date: 2025-04-28 15:17:10.491183 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '98780d4fb293' -down_revision: Union[str, None] = 'f11fb4060739' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('audit_logs', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('user_id', sa.UUID(), nullable=True), - sa.Column('action', sa.String(), nullable=False), - sa.Column('resource_type', sa.String(), nullable=False), - sa.Column('resource_id', sa.String(), nullable=True), - sa.Column('details', sa.JSON(), nullable=True), - sa.Column('ip_address', sa.String(), nullable=True), - sa.Column('user_agent', sa.String(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('audit_logs') - # ### end Alembic commands ### diff --git a/migrations/versions/9d819594ac9b_init_migration.py b/migrations/versions/9d819594ac9b_init_migration.py deleted file mode 100644 index a02e3819..00000000 --- a/migrations/versions/9d819594ac9b_init_migration.py +++ /dev/null @@ -1,91 +0,0 @@ -"""init migration - -Revision ID: 9d819594ac9b -Revises: -Create Date: 2025-04-28 11:53:49.375964 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '9d819594ac9b' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('clients', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('mcp_servers', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('config_json', sa.JSON(), nullable=False), - sa.Column('environments', sa.JSON(), nullable=False), - sa.Column('type', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), - sa.CheckConstraint("type IN ('official', 'community')", name='check_mcp_server_type'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('tools', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('name', sa.String(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('config_json', sa.JSON(), nullable=False), - sa.Column('environments', sa.JSON(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('agents', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('client_id', sa.UUID(), nullable=True), - sa.Column('name', sa.String(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('type', sa.String(), nullable=False), - sa.Column('model', sa.String(), nullable=False), - sa.Column('api_key', sa.String(), nullable=False), - sa.Column('instruction', sa.Text(), nullable=True), - sa.Column('config', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), - sa.CheckConstraint("type IN ('llm', 'sequential', 'parallel', 'loop')", name='check_agent_type'), - sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('contacts', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('client_id', sa.UUID(), nullable=True), - sa.Column('ext_id', sa.String(), nullable=True), - sa.Column('name', sa.String(), nullable=True), - sa.Column('meta', sa.JSON(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), - sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('contacts') - op.drop_table('agents') - op.drop_table('tools') - op.drop_table('mcp_servers') - op.drop_table('clients') - # ### end Alembic commands ### diff --git a/migrations/versions/2d612b95d0ea_add_tools_field_to_mcp_servers.py b/migrations/versions/ab6f3a31f3e8_init_migration.py similarity index 58% rename from migrations/versions/2d612b95d0ea_add_tools_field_to_mcp_servers.py rename to migrations/versions/ab6f3a31f3e8_init_migration.py index bb8d76d7..1a8ef2a3 100644 --- a/migrations/versions/2d612b95d0ea_add_tools_field_to_mcp_servers.py +++ b/migrations/versions/ab6f3a31f3e8_init_migration.py @@ -1,19 +1,19 @@ -"""add_tools_field_to_mcp_servers +"""init migration -Revision ID: 2d612b95d0ea -Revises: da8e7fb4da5d -Create Date: 2025-04-28 12:39:21.430144 +Revision ID: ab6f3a31f3e8 +Revises: +Create Date: 2025-04-28 15:37:40.885065 """ from typing import Sequence, Union from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql + # revision identifiers, used by Alembic. -revision: str = '2d612b95d0ea' -down_revision: Union[str, None] = 'da8e7fb4da5d' +revision: str = 'ab6f3a31f3e8' +down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -21,12 +21,12 @@ depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: """Upgrade schema.""" # ### commands auto generated by Alembic - please adjust! ### - op.add_column('mcp_servers', sa.Column('tools', sa.JSON(), nullable=False, server_default='[]')) + pass # ### end Alembic commands ### def downgrade() -> None: """Downgrade schema.""" # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('mcp_servers', 'tools') + pass # ### end Alembic commands ### diff --git a/migrations/versions/da8e7fb4da5d_fix_agent_table.py b/migrations/versions/da8e7fb4da5d_fix_agent_table.py deleted file mode 100644 index 3e0c90ec..00000000 --- a/migrations/versions/da8e7fb4da5d_fix_agent_table.py +++ /dev/null @@ -1,78 +0,0 @@ -"""fix_agent_table - -Revision ID: da8e7fb4da5d -Revises: 4a61703e9b7e -Create Date: 2025-04-28 12:29:31.292844 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = 'da8e7fb4da5d' -down_revision: Union[str, None] = '4a61703e9b7e' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('user_states') - op.drop_table('app_states') - op.drop_table('events') - op.drop_table('sessions') - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('sessions', - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('state', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), - sa.Column('create_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.Column('update_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('app_name', 'user_id', 'id', name='sessions_pkey'), - postgresql_ignore_search_path=False - ) - op.create_table('events', - sa.Column('id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('session_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('invocation_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('author', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('branch', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('timestamp', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.Column('content', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('actions', postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column('long_running_tool_ids_json', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('grounding_metadata', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True), - sa.Column('partial', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('turn_complete', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column('error_code', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('error_message', sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column('interrupted', sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['app_name', 'user_id', 'session_id'], ['sessions.app_name', 'sessions.user_id', 'sessions.id'], name='events_app_name_user_id_session_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id', 'app_name', 'user_id', 'session_id', name='events_pkey') - ) - op.create_table('app_states', - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('state', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), - sa.Column('update_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('app_name', name='app_states_pkey') - ) - op.create_table('user_states', - sa.Column('app_name', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('user_id', sa.VARCHAR(), autoincrement=False, nullable=False), - sa.Column('state', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), - sa.Column('update_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), - sa.PrimaryKeyConstraint('app_name', 'user_id', name='user_states_pkey') - ) - # ### end Alembic commands ### diff --git a/migrations/versions/f11fb4060739_add_user_table.py b/migrations/versions/f11fb4060739_add_user_table.py deleted file mode 100644 index 7070612e..00000000 --- a/migrations/versions/f11fb4060739_add_user_table.py +++ /dev/null @@ -1,50 +0,0 @@ -"""add_user_table - -Revision ID: f11fb4060739 -Revises: 2d612b95d0ea -Create Date: 2025-04-28 15:01:34.432588 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = 'f11fb4060739' -down_revision: Union[str, None] = '2d612b95d0ea' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - """Upgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('users', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('email', sa.String(), nullable=False), - sa.Column('password_hash', sa.String(), nullable=False), - sa.Column('client_id', sa.UUID(), nullable=True), - sa.Column('is_active', sa.Boolean(), nullable=True), - sa.Column('is_admin', sa.Boolean(), nullable=True), - sa.Column('email_verified', sa.Boolean(), nullable=True), - sa.Column('verification_token', sa.String(), nullable=True), - sa.Column('verification_token_expiry', sa.DateTime(timezone=True), nullable=True), - sa.Column('password_reset_token', sa.String(), nullable=True), - sa.Column('password_reset_expiry', sa.DateTime(timezone=True), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), - sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), - sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) - # ### end Alembic commands ### - - -def downgrade() -> None: - """Downgrade schema.""" - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_users_email'), table_name='users') - op.drop_table('users') - # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index 8bfa5cf4..3ad02cac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,4 +15,8 @@ asyncpg python-jose[cryptography] passlib[bcrypt] sendgrid -pydantic[email] \ No newline at end of file +pydantic[email] +pydantic-settings +fastapi_utils +bcrypt +jinja2 \ No newline at end of file diff --git a/scripts/seeders/mcp_server_seeder.py b/scripts/seeders/mcp_server_seeder.py index 77a9a0fc..1ff7142a 100644 --- a/scripts/seeders/mcp_server_seeder.py +++ b/scripts/seeders/mcp_server_seeder.py @@ -17,105 +17,169 @@ from dotenv import load_dotenv from src.models.models import MCPServer # Configurar logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) logger = logging.getLogger(__name__) + def create_mcp_servers(): """Cria servidores MCP padrão no sistema""" try: # Carregar variáveis de ambiente load_dotenv() - + # Obter configurações do banco de dados db_url = os.getenv("POSTGRES_CONNECTION_STRING") if not db_url: logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") return False - + # Conectar ao banco de dados engine = create_engine(db_url) Session = sessionmaker(bind=engine) session = Session() - + try: # Verificar se já existem servidores MCP existing_servers = session.query(MCPServer).all() if existing_servers: - logger.info(f"Já existem {len(existing_servers)} servidores MCP cadastrados") + logger.info( + f"Já existem {len(existing_servers)} servidores MCP cadastrados" + ) return True - + # Definições dos servidores MCP mcp_servers = [ { - "name": "Anthropic Claude", - "description": "Servidor para modelos Claude da Anthropic", + "name": "Sequential Thinking", + "description": "Sequential Thinking helps users organize their thoughts and break down complex problems through a structured workflow. By guiding users through defined cognitive stages like Problem Definition, Research, Analysis, Synthesis, and Conclusion, it provides a framework for progressive thinking. The server tracks the progression of your thinking process, identifies connections between similar thoughts, monitors progress, and generates summaries, making it easier to approach challenges methodically and reach well-reasoned conclusions.", "config_json": { - "provider": "anthropic", - "models": ["claude-3-sonnet-20240229", "claude-3-opus-20240229", "claude-3-haiku-20240307"], - "api_base": "https://api.anthropic.com/v1", - "api_key_env": "ANTHROPIC_API_KEY" + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-sequential-thinking", + ], }, - "environments": { - "production": True, - "development": True, - "staging": True - }, - "tools": ["function_calling", "web_search"], - "type": "official" + "environments": {}, + "tools": ["sequential_thinking"], + "type": "community", + "id": "4519dd69-9343-4792-af51-dc4d322fb0c9", + "created_at": "2025-04-28T15:14:16.901236Z", + "updated_at": "2025-04-28T15:43:42.755205Z", }, { - "name": "OpenAI GPT", - "description": "Servidor para modelos GPT da OpenAI", + "name": "CloudFlare", + "description": "Model Context Protocol (MCP) is a new, standardized protocol for managing context between large language models (LLMs) and external systems. In this repository, we provide an installer as well as an MCP Server for Cloudflare's API.\r\n\r\nThis lets you use Claude Desktop, or any MCP Client, to use natural language to accomplish things on your Cloudflare account, e.g.:\r\n\r\nList all the Cloudflare workers on my @gmail.com account.\r\nCan you tell me about any potential issues on this particular worker '...'?", "config_json": { - "provider": "openai", - "models": ["gpt-4", "gpt-4-turbo", "gpt-3.5-turbo"], - "api_base": "https://api.openai.com/v1", - "api_key_env": "OPENAI_API_KEY" + "url": "https://observability.mcp.cloudflare.com/sse" }, - "environments": { - "production": True, - "development": True, - "staging": True - }, - "tools": ["function_calling", "web_search", "image_generation"], - "type": "official" + "environments": {}, + "tools": [ + "worker_list", + "worker_get", + "worker_put", + "worker_delete", + "worker_get_worker", + "worker_logs_by_worker_name", + "worker_logs_by_ray_id", + "worker_logs_keys", + "get_kvs", + "kv_get", + "kv_put", + "kv_list", + "kv_delete", + "r2_list_buckets", + "r2_create_bucket", + "r2_delete_bucket", + "r2_list_objects", + "r2_get_object", + "r2_put_object", + "r2_delete_object", + "d1_list_databases", + "d1_create_database", + "d1_delete_database", + "d1_query", + "durable_objects_list", + "durable_objects_create", + "durable_objects_delete", + "durable_objects_list_instances", + "durable_objects_get_instance", + "durable_objects_delete_instance", + "queues_list", + "queues_create", + "queues_delete", + "queues_get", + "queues_send_message", + "queues_get_messages", + "queues_update_consumer", + "workers_ai_list_models", + "workers_ai_get_model", + "workers_ai_run_inference", + "workers_ai_list_tasks", + "workflows_list", + "workflows_create", + "workflows_delete", + "workflows_get", + "workflows_update", + "workflows_execute", + "templates_list", + "templates_get", + "templates_create_from_template", + "w4p_list_dispatchers", + "w4p_create_dispatcher", + "w4p_delete_dispatcher", + "w4p_get_dispatcher", + "w4p_update_dispatcher", + "bindings_list", + "bindings_create", + "bindings_update", + "bindings_delete", + "routing_list_routes", + "routing_create_route", + "routing_update_route", + "routing_delete_route", + "cron_list", + "cron_create", + "cron_update", + "cron_delete", + "zones_list", + "zones_create", + "zones_delete", + "zones_get", + "zones_check_activation", + "secrets_list", + "secrets_put", + "secrets_delete", + "versions_list", + "versions_get", + "versions_rollback", + "wrangler_get_config", + "wrangler_update_config", + "analytics_get", + ], + "type": "official", + "id": "9138d1a2-24e6-4a75-87b0-bfa4932273e8", + "created_at": "2025-04-28T15:16:53.350824Z", + "updated_at": "2025-04-28T15:48:04.821766Z", }, { - "name": "Google Gemini", - "description": "Servidor para modelos Gemini do Google", + "name": "Brave Search", + "description": "Brave Search allows you to seamlessly integrate Brave Search functionality into AI assistants like Claude. By implementing a Model Context Protocol (MCP) server, it enables the AI to leverage Brave Search's web search and local business search capabilities. It provides tools for both general web searches and specific local searches, enhancing the AI assistant's ability to provide relevant and up-to-date information.", "config_json": { - "provider": "google", - "models": ["gemini-pro", "gemini-ultra"], - "api_base": "https://generativelanguage.googleapis.com/v1", - "api_key_env": "GOOGLE_API_KEY" + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-brave-search"], + "env": {"BRAVE_API_KEY": "env@@BRAVE_API_KEY"}, }, - "environments": { - "production": True, - "development": True, - "staging": True - }, - "tools": ["function_calling", "web_search"], - "type": "official" + "environments": {"BRAVE_API_KEY": "env@@BRAVE_API_KEY"}, + "tools": ["brave_web_search", "brave_local_search"], + "type": "official", + "id": "416c94d7-77f5-43f4-8181-aeb87934ecbf", + "created_at": "2025-04-28T15:20:07.647225Z", + "updated_at": "2025-04-28T15:49:17.434428Z", }, - { - "name": "Ollama Local", - "description": "Servidor para modelos locais via Ollama", - "config_json": { - "provider": "ollama", - "models": ["llama3", "mistral", "mixtral"], - "api_base": "http://localhost:11434", - "api_key_env": None - }, - "environments": { - "production": False, - "development": True, - "staging": False - }, - "tools": [], - "type": "community" - } ] - + # Criar os servidores MCP for server_data in mcp_servers: server = MCPServer( @@ -124,27 +188,28 @@ def create_mcp_servers(): config_json=server_data["config_json"], environments=server_data["environments"], tools=server_data["tools"], - type=server_data["type"] + type=server_data["type"], ) - + session.add(server) logger.info(f"Servidor MCP '{server_data['name']}' criado com sucesso") - + session.commit() logger.info("Todos os servidores MCP foram criados com sucesso") return True - + except SQLAlchemyError as e: session.rollback() logger.error(f"Erro de banco de dados ao criar servidores MCP: {str(e)}") return False - + except Exception as e: logger.error(f"Erro ao criar servidores MCP: {str(e)}") return False finally: session.close() + if __name__ == "__main__": success = create_mcp_servers() - sys.exit(0 if success else 1) \ No newline at end of file + sys.exit(0 if success else 1) diff --git a/scripts/seeders/tool_seeder.py b/scripts/seeders/tool_seeder.py index b99a2b44..476253c8 100644 --- a/scripts/seeders/tool_seeder.py +++ b/scripts/seeders/tool_seeder.py @@ -45,113 +45,10 @@ def create_tools(): return True # Definições das ferramentas - tools = [ - { - "name": "web_search", - "description": "Pesquisa na web para obter informações atualizadas", - "config_json": { - "provider": "brave", - "api_base": "https://api.search.brave.com/res/v1/web/search", - "api_key_env": "BRAVE_API_KEY", - "max_results": 5, - "safe_search": "moderate" - }, - "environments": { - "production": True, - "development": True, - "staging": True - } - }, - { - "name": "document_query", - "description": "Consulta documentos internos para obter informações específicas", - "config_json": { - "provider": "internal", - "api_base": "${KNOWLEDGE_API_URL}/documents", - "api_key_env": "KNOWLEDGE_API_KEY", - "embeddings_model": "text-embedding-3-small", - "max_chunks": 10, - "similarity_threshold": 0.75 - }, - "environments": { - "production": True, - "development": True, - "staging": True - } - }, - { - "name": "knowledge_base", - "description": "Consulta base de conhecimento para solução de problemas", - "config_json": { - "provider": "internal", - "api_base": "${KNOWLEDGE_API_URL}/kb", - "api_key_env": "KNOWLEDGE_API_KEY", - "max_results": 3, - "categories": ["support", "faq", "troubleshooting"] - }, - "environments": { - "production": True, - "development": True, - "staging": True - } - }, - { - "name": "whatsapp_integration", - "description": "Integração com WhatsApp para envio e recebimento de mensagens", - "config_json": { - "provider": "meta", - "api_base": "https://graph.facebook.com/v17.0", - "api_key_env": "WHATSAPP_API_KEY", - "phone_number_id": "${WHATSAPP_PHONE_ID}", - "webhook_verify_token": "${WHATSAPP_VERIFY_TOKEN}", - "templates_enabled": True - }, - "environments": { - "production": True, - "development": False, - "staging": True - } - }, - { - "name": "telegram_integration", - "description": "Integração com Telegram para envio e recebimento de mensagens", - "config_json": { - "provider": "telegram", - "api_base": "https://api.telegram.org", - "api_key_env": "TELEGRAM_BOT_TOKEN", - "webhook_url": "${APP_URL}/webhook/telegram", - "allowed_updates": ["message", "callback_query"] - }, - "environments": { - "production": True, - "development": False, - "staging": True - } - } - ] + tools = [] # Criar as ferramentas for tool_data in tools: - # Substituir placeholders por variáveis de ambiente quando disponíveis - if "api_base" in tool_data["config_json"]: - if "${KNOWLEDGE_API_URL}" in tool_data["config_json"]["api_base"]: - tool_data["config_json"]["api_base"] = tool_data["config_json"]["api_base"].replace( - "${KNOWLEDGE_API_URL}", os.getenv("KNOWLEDGE_API_URL", "http://localhost:5540") - ) - - if "webhook_url" in tool_data["config_json"]: - if "${APP_URL}" in tool_data["config_json"]["webhook_url"]: - tool_data["config_json"]["webhook_url"] = tool_data["config_json"]["webhook_url"].replace( - "${APP_URL}", os.getenv("APP_URL", "http://localhost:8000") - ) - - if "phone_number_id" in tool_data["config_json"]: - if "${WHATSAPP_PHONE_ID}" in tool_data["config_json"]["phone_number_id"]: - tool_data["config_json"]["phone_number_id"] = os.getenv("WHATSAPP_PHONE_ID", "") - - if "webhook_verify_token" in tool_data["config_json"]: - if "${WHATSAPP_VERIFY_TOKEN}" in tool_data["config_json"]["webhook_verify_token"]: - tool_data["config_json"]["webhook_verify_token"] = os.getenv("WHATSAPP_VERIFY_TOKEN", "") tool = Tool( name=tool_data["name"], diff --git a/src/__pycache__/main.cpython-310.pyc b/src/__pycache__/main.cpython-310.pyc index 844941de..37150e48 100644 Binary files a/src/__pycache__/main.cpython-310.pyc and b/src/__pycache__/main.cpython-310.pyc differ diff --git a/src/api/__pycache__/routes.cpython-310.pyc b/src/api/__pycache__/routes.cpython-310.pyc index e5d23638..c51bf5a2 100644 Binary files a/src/api/__pycache__/routes.cpython-310.pyc and b/src/api/__pycache__/routes.cpython-310.pyc differ diff --git a/src/api/routes.py b/src/api/routes.py index 7e766bc5..5628e3d5 100644 --- a/src/api/routes.py +++ b/src/api/routes.py @@ -1,11 +1,17 @@ -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException, status, Body from sqlalchemy.orm import Session from typing import List, Dict, Any import uuid from datetime import datetime +from pydantic import BaseModel, EmailStr from src.config.database import get_db -from src.core.jwt_middleware import get_jwt_token, verify_user_client, verify_admin, get_current_user_client_id +from src.core.jwt_middleware import ( + get_jwt_token, + verify_user_client, + verify_admin, + get_current_user_client_id, +) from src.schemas.schemas import ( Client, ClientCreate, @@ -18,6 +24,7 @@ from src.schemas.schemas import ( Tool, ToolCreate, ) +from src.schemas.user import UserCreate from src.services import ( client_service, contact_service, @@ -52,6 +59,12 @@ session_service = DatabaseSessionService(db_url=POSTGRES_CONNECTION_STRING) artifacts_service = InMemoryArtifactService() memory_service = InMemoryMemoryService() +# Definindo um novo schema para registro combinado de cliente e usuário +class ClientRegistration(BaseModel): + name: str + email: EmailStr + password: str + @router.post( "/chat", @@ -71,13 +84,12 @@ async def chat( agent = agent_service.get_agent(db, request.agent_id) if not agent: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Agente não encontrado" + status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) - + # Verificar se o usuário tem acesso ao agente (via cliente) await verify_user_client(payload, db, agent.client_id) - + try: final_response_text = await run_agent( request.agent_id, @@ -127,13 +139,12 @@ async def get_agent_sessions( agent = agent_service.get_agent(db, agent_id) if not agent: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Agente não encontrado" + status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) - + # Verificar se o usuário tem acesso ao agente (via cliente) await verify_user_client(payload, db, agent.client_id) - + return get_sessions_by_agent(db, agent_id, skip, limit) @@ -147,17 +158,16 @@ async def get_session( session = get_session_by_id(session_service, session_id) if not session: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Sessão não encontrada" + status_code=status.HTTP_404_NOT_FOUND, detail="Sessão não encontrada" ) - + # Verificar se o agente da sessão pertence ao cliente do usuário agent_id = uuid.UUID(session.agent_id) if session.agent_id else None if agent_id: agent = agent_service.get_agent(db, agent_id) if agent: await verify_user_client(payload, db, agent.client_id) - + return session @@ -174,17 +184,16 @@ async def get_agent_messages( session = get_session_by_id(session_service, session_id) if not session: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Sessão não encontrada" + status_code=status.HTTP_404_NOT_FOUND, detail="Sessão não encontrada" ) - + # Verificar se o agente da sessão pertence ao cliente do usuário agent_id = uuid.UUID(session.agent_id) if session.agent_id else None if agent_id: agent = agent_service.get_agent(db, agent_id) if agent: await verify_user_client(payload, db, agent.client_id) - + return get_session_events(session_service, session_id) @@ -201,30 +210,52 @@ async def remove_session( session = get_session_by_id(session_service, session_id) if not session: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Sessão não encontrada" + status_code=status.HTTP_404_NOT_FOUND, detail="Sessão não encontrada" ) - + # Verificar se o agente da sessão pertence ao cliente do usuário agent_id = uuid.UUID(session.agent_id) if session.agent_id else None if agent_id: agent = agent_service.get_agent(db, agent_id) if agent: await verify_user_client(payload, db, agent.client_id) - + return delete_session(session_service, session_id) # Rotas para Clientes + + @router.post("/clients/", response_model=Client, status_code=status.HTTP_201_CREATED) -async def create_client( - client: ClientCreate, +async def create_user( + registration: ClientRegistration, db: Session = Depends(get_db), payload: dict = Depends(get_jwt_token), ): + """ + Cria um cliente e um usuário associado a ele + + Args: + registration: Dados do cliente e usuário a serem criados + db: Sessão do banco de dados + payload: Payload do token JWT + + Returns: + Client: Cliente criado + """ # Apenas administradores podem criar clientes await verify_admin(payload) - return client_service.create_client(db, client) + + # Criar objetos ClientCreate e UserCreate a partir do ClientRegistration + client = ClientCreate(name=registration.name, email=registration.email) + user = UserCreate(email=registration.email, password=registration.password, name=registration.name) + + # Criar cliente com usuário + client_obj, message = client_service.create_client_with_user(db, client, user) + if not client_obj: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) + + return client_obj @router.get("/clients/", response_model=List[Client]) @@ -237,7 +268,7 @@ async def read_clients( # Se for administrador, pode ver todos os clientes # Se for usuário comum, só vê o próprio cliente client_id = get_current_user_client_id(payload) - + if client_id: # Usuário comum - retorna apenas seu próprio cliente client = client_service.get_client(db, client_id) @@ -255,7 +286,7 @@ async def read_client( ): # Verificar se o usuário tem acesso aos dados deste cliente await verify_user_client(payload, db, client_id) - + db_client = client_service.get_client(db, client_id) if db_client is None: raise HTTPException( @@ -273,7 +304,7 @@ async def update_client( ): # Verificar se o usuário tem acesso aos dados deste cliente await verify_user_client(payload, db, client_id) - + db_client = client_service.update_client(db, client_id, client) if db_client is None: raise HTTPException( @@ -290,7 +321,7 @@ async def delete_client( ): # Apenas administradores podem excluir clientes await verify_admin(payload) - + if not client_service.delete_client(db, client_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Cliente não encontrado" @@ -306,7 +337,7 @@ async def create_contact( ): # Verificar se o usuário tem acesso ao cliente do contato await verify_user_client(payload, db, contact.client_id) - + return contact_service.create_contact(db, contact) @@ -320,7 +351,7 @@ async def read_contacts( ): # Verificar se o usuário tem acesso aos dados deste cliente await verify_user_client(payload, db, client_id) - + return contact_service.get_contacts_by_client(db, client_id, skip, limit) @@ -335,10 +366,10 @@ async def read_contact( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" ) - + # Verificar se o usuário tem acesso ao cliente do contato await verify_user_client(payload, db, db_contact.client_id) - + return db_contact @@ -355,15 +386,15 @@ async def update_contact( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" ) - + # Verificar se o usuário tem acesso ao cliente do contato await verify_user_client(payload, db, db_current_contact.client_id) - + # Verificar se está tentando mudar o cliente if contact.client_id != db_current_contact.client_id: # Verificar se o usuário tem acesso ao novo cliente também await verify_user_client(payload, db, contact.client_id) - + db_contact = contact_service.update_contact(db, contact_id, contact) if db_contact is None: raise HTTPException( @@ -384,10 +415,10 @@ async def delete_contact( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" ) - + # Verificar se o usuário tem acesso ao cliente do contato await verify_user_client(payload, db, db_contact.client_id) - + if not contact_service.delete_contact(db, contact_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" @@ -403,7 +434,7 @@ async def create_agent( ): # Verificar se o usuário tem acesso ao cliente do agente await verify_user_client(payload, db, agent.client_id) - + return agent_service.create_agent(db, agent) @@ -417,7 +448,7 @@ async def read_agents( ): # Verificar se o usuário tem acesso aos dados deste cliente await verify_user_client(payload, db, client_id) - + return agent_service.get_agents_by_client(db, client_id, skip, limit) @@ -432,16 +463,16 @@ async def read_agent( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) - + # Verificar se o usuário tem acesso ao cliente do agente await verify_user_client(payload, db, db_agent.client_id) - + return db_agent @router.put("/agent/{agent_id}", response_model=Agent) async def update_agent( - agent_id: uuid.UUID, + agent_id: uuid.UUID, agent_data: Dict[str, Any], db: Session = Depends(get_db), payload: dict = Depends(get_jwt_token), @@ -452,15 +483,15 @@ async def update_agent( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) - + # Verificar se o usuário tem acesso ao cliente do agente await verify_user_client(payload, db, db_agent.client_id) - + # Se estiver tentando mudar o client_id, verificar permissão para o novo cliente também - if 'client_id' in agent_data and agent_data['client_id'] != str(db_agent.client_id): - new_client_id = uuid.UUID(agent_data['client_id']) + if "client_id" in agent_data and agent_data["client_id"] != str(db_agent.client_id): + new_client_id = uuid.UUID(agent_data["client_id"]) await verify_user_client(payload, db, new_client_id) - + return await agent_service.update_agent(db, agent_id, agent_data) @@ -476,10 +507,10 @@ async def delete_agent( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) - + # Verificar se o usuário tem acesso ao cliente do agente await verify_user_client(payload, db, db_agent.client_id) - + if not agent_service.delete_agent(db, agent_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" @@ -497,7 +528,7 @@ async def create_mcp_server( ): # Apenas administradores podem criar servidores MCP await verify_admin(payload) - + return mcp_server_service.create_mcp_server(db, server) @@ -536,7 +567,7 @@ async def update_mcp_server( ): # Apenas administradores podem atualizar servidores MCP await verify_admin(payload) - + db_server = mcp_server_service.update_mcp_server(db, server_id, server) if db_server is None: raise HTTPException( @@ -553,7 +584,7 @@ async def delete_mcp_server( ): # Apenas administradores podem excluir servidores MCP await verify_admin(payload) - + if not mcp_server_service.delete_mcp_server(db, server_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Servidor MCP não encontrado" @@ -569,7 +600,7 @@ async def create_tool( ): # Apenas administradores podem criar ferramentas await verify_admin(payload) - + return tool_service.create_tool(db, tool) @@ -608,7 +639,7 @@ async def update_tool( ): # Apenas administradores podem atualizar ferramentas await verify_admin(payload) - + db_tool = tool_service.update_tool(db, tool_id, tool) if db_tool is None: raise HTTPException( @@ -625,7 +656,7 @@ async def delete_tool( ): # Apenas administradores podem excluir ferramentas await verify_admin(payload) - + if not tool_service.delete_tool(db, tool_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Ferramenta não encontrada" diff --git a/src/models/models.py b/src/models/models.py index 1cfc3e35..51a79b0b 100644 --- a/src/models/models.py +++ b/src/models/models.py @@ -9,6 +9,7 @@ class Client(Base): id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) name = Column(String, nullable=False) + email = Column(String, unique=True, index=True, nullable=False) created_at = Column(DateTime(timezone=True), server_default=func.now()) updated_at = Column(DateTime(timezone=True), onupdate=func.now()) diff --git a/src/schemas/__pycache__/schemas.cpython-310.pyc b/src/schemas/__pycache__/schemas.cpython-310.pyc index d9b02f36..f07a7ae2 100644 Binary files a/src/schemas/__pycache__/schemas.cpython-310.pyc and b/src/schemas/__pycache__/schemas.cpython-310.pyc differ diff --git a/src/schemas/schemas.py b/src/schemas/schemas.py index df57f7bb..7c96052c 100644 --- a/src/schemas/schemas.py +++ b/src/schemas/schemas.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field, validator, EmailStr from typing import Optional, Dict, Any, Union, List from datetime import datetime from uuid import UUID @@ -8,6 +8,7 @@ from .agent_config import LLMConfig, SequentialConfig, ParallelConfig, LoopConfi class ClientBase(BaseModel): name: str + email: Optional[EmailStr] = None class ClientCreate(ClientBase): pass diff --git a/src/schemas/user.py b/src/schemas/user.py index 98fb41be..85ace718 100644 --- a/src/schemas/user.py +++ b/src/schemas/user.py @@ -10,6 +10,10 @@ class UserCreate(UserBase): password: str name: str # Para criação do cliente associado +class AdminUserCreate(UserBase): + password: str + name: str + class UserLogin(BaseModel): email: EmailStr password: str diff --git a/src/services/__pycache__/client_service.cpython-310.pyc b/src/services/__pycache__/client_service.cpython-310.pyc index a9fac2af..1ab19ca9 100644 Binary files a/src/services/__pycache__/client_service.cpython-310.pyc and b/src/services/__pycache__/client_service.cpython-310.pyc differ diff --git a/src/services/auth_service.py b/src/services/auth_service.py index 79cc38fc..8f6d3449 100644 --- a/src/services/auth_service.py +++ b/src/services/auth_service.py @@ -7,6 +7,7 @@ from fastapi import Depends, HTTPException, status from fastapi.security import OAuth2PasswordBearer from jose import JWTError, jwt from src.config.settings import settings +from src.config.database import get_db from datetime import datetime, timedelta import logging from typing import Optional @@ -146,7 +147,4 @@ def create_access_token(user: User) -> str: token_data["client_id"] = str(user.client_id) # Criar token - return create_jwt_token(token_data) - -# Dependência para obter a sessão do banco de dados -from src.config.database import get_db \ No newline at end of file + return create_jwt_token(token_data) \ No newline at end of file diff --git a/src/services/client_service.py b/src/services/client_service.py index 5a55274a..8419aa34 100644 --- a/src/services/client_service.py +++ b/src/services/client_service.py @@ -3,7 +3,9 @@ from sqlalchemy.exc import SQLAlchemyError from fastapi import HTTPException, status from src.models.models import Client from src.schemas.schemas import ClientCreate -from typing import List, Optional +from src.schemas.user import UserCreate +from src.services.user_service import create_user +from typing import List, Optional, Tuple import uuid import logging @@ -91,4 +93,46 @@ def delete_client(db: Session, client_id: uuid.UUID) -> bool: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Erro ao remover cliente" - ) \ No newline at end of file + ) + +def create_client_with_user(db: Session, client_data: ClientCreate, user_data: UserCreate) -> Tuple[Optional[Client], str]: + """ + Cria um novo cliente com um usuário associado + + Args: + db: Sessão do banco de dados + client_data: Dados do cliente a ser criado + user_data: Dados do usuário a ser criado + + Returns: + Tuple[Optional[Client], str]: Tupla com o cliente criado (ou None em caso de erro) e mensagem de status + """ + try: + # Iniciar transação - primeiro cria o cliente + client = Client(**client_data.model_dump()) + db.add(client) + db.flush() # Obter o ID do cliente sem confirmar a transação + + # Usar o ID do cliente para criar o usuário associado + user, message = create_user(db, user_data, is_admin=False, client_id=client.id) + + if not user: + # Se houve erro na criação do usuário, fazer rollback + db.rollback() + logger.error(f"Erro ao criar usuário para o cliente: {message}") + return None, f"Erro ao criar usuário: {message}" + + # Se tudo correu bem, confirmar a transação + db.commit() + logger.info(f"Cliente e usuário criados com sucesso: {client.id}") + return client, "Cliente e usuário criados com sucesso" + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao criar cliente com usuário: {str(e)}") + return None, f"Erro ao criar cliente com usuário: {str(e)}" + + except Exception as e: + db.rollback() + logger.error(f"Erro inesperado ao criar cliente com usuário: {str(e)}") + return None, f"Erro inesperado: {str(e)}" \ No newline at end of file diff --git a/src/services/email_service.py b/src/services/email_service.py index cc2ed178..af703e03 100644 --- a/src/services/email_service.py +++ b/src/services/email_service.py @@ -3,157 +3,204 @@ from sendgrid.helpers.mail import Mail, Email, To, Content from src.config.settings import settings import logging from datetime import datetime +from jinja2 import Environment, FileSystemLoader, select_autoescape +import os +from pathlib import Path logger = logging.getLogger(__name__) -def send_verification_email(email: str, token: str) -> bool: +# Configure Jinja2 to load templates +templates_dir = Path(__file__).parent.parent / "templates" / "emails" +os.makedirs(templates_dir, exist_ok=True) + +# Configure Jinja2 with the templates directory +env = Environment( + loader=FileSystemLoader(templates_dir), + autoescape=select_autoescape(['html', 'xml']) +) + +def _render_template(template_name: str, context: dict) -> str: """ - Envia um email de verificação para o usuário + Render a template with the provided data Args: - email: Email do destinatário - token: Token de verificação de email + template_name: Template file name + context: Data to render in the template Returns: - bool: True se o email foi enviado com sucesso, False caso contrário + str: Rendered HTML + """ + try: + template = env.get_template(f"{template_name}.html") + return template.render(**context) + except Exception as e: + logger.error(f"Error rendering template '{template_name}': {str(e)}") + return f"

Could not display email content. Please access {context.get('verification_link', '') or context.get('reset_link', '')}

" + +def send_verification_email(email: str, token: str) -> bool: + """ + Send a verification email to the user + + Args: + email: Recipient's email + token: Email verification token + + Returns: + bool: True if the email was sent successfully, False otherwise """ try: sg = sendgrid.SendGridAPIClient(api_key=settings.SENDGRID_API_KEY) from_email = Email(settings.EMAIL_FROM) to_email = To(email) - subject = "Verificação de Email - Evo AI" + subject = "Email Verification - Evo AI" verification_link = f"{settings.APP_URL}/auth/verify-email/{token}" - content = Content( - "text/html", - f""" - - - - - -
-
-

Evo AI

-
-
-

Bem-vindo à Plataforma Evo AI!

-

Obrigado por se cadastrar. Para verificar sua conta e começar a usar nossos serviços, - por favor clique no botão abaixo:

-

- Verificar meu Email -

-

Ou copie e cole o link abaixo no seu navegador:

-

{verification_link}

-

Este link é válido por 24 horas.

-

Se você não solicitou este email, por favor ignore-o.

-
- -
- - - """ - ) + html_content = _render_template('verification_email', { + 'verification_link': verification_link, + 'user_name': email.split('@')[0], # Use part of the email as temporary name + 'current_year': datetime.now().year + }) + + content = Content("text/html", html_content) mail = Mail(from_email, to_email, subject, content) response = sg.client.mail.send.post(request_body=mail.get()) if response.status_code >= 200 and response.status_code < 300: - logger.info(f"Email de verificação enviado para {email}") + logger.info(f"Verification email sent to {email}") return True else: - logger.error(f"Falha ao enviar email de verificação para {email}. Status: {response.status_code}") + logger.error(f"Failed to send verification email to {email}. Status: {response.status_code}") return False except Exception as e: - logger.error(f"Erro ao enviar email de verificação para {email}: {str(e)}") + logger.error(f"Error sending verification email to {email}: {str(e)}") return False def send_password_reset_email(email: str, token: str) -> bool: """ - Envia um email de redefinição de senha para o usuário + Send a password reset email to the user Args: - email: Email do destinatário - token: Token de redefinição de senha + email: Recipient's email + token: Password reset token Returns: - bool: True se o email foi enviado com sucesso, False caso contrário + bool: True if the email was sent successfully, False otherwise """ try: sg = sendgrid.SendGridAPIClient(api_key=settings.SENDGRID_API_KEY) from_email = Email(settings.EMAIL_FROM) to_email = To(email) - subject = "Redefinição de Senha - Evo AI" + subject = "Password Reset - Evo AI" reset_link = f"{settings.APP_URL}/reset-password?token={token}" - content = Content( - "text/html", - f""" - - - - - -
-
-

Evo AI

-
-
-

Redefinição de Senha

-

Recebemos uma solicitação para redefinir sua senha. Clique no botão abaixo - para criar uma nova senha:

-

- Redefinir minha Senha -

-

Ou copie e cole o link abaixo no seu navegador:

-

{reset_link}

-

Este link é válido por 1 hora.

-

Se você não solicitou esta alteração, por favor ignore este email - e entre em contato com o suporte imediatamente.

-
- -
- - - """ - ) + html_content = _render_template('password_reset', { + 'reset_link': reset_link, + 'user_name': email.split('@')[0], # Use part of the email as temporary name + 'current_year': datetime.now().year + }) + + content = Content("text/html", html_content) mail = Mail(from_email, to_email, subject, content) response = sg.client.mail.send.post(request_body=mail.get()) if response.status_code >= 200 and response.status_code < 300: - logger.info(f"Email de redefinição de senha enviado para {email}") + logger.info(f"Password reset email sent to {email}") return True else: - logger.error(f"Falha ao enviar email de redefinição de senha para {email}. Status: {response.status_code}") + logger.error(f"Failed to send password reset email to {email}. Status: {response.status_code}") return False except Exception as e: - logger.error(f"Erro ao enviar email de redefinição de senha para {email}: {str(e)}") + logger.error(f"Error sending password reset email to {email}: {str(e)}") + return False + +def send_welcome_email(email: str, user_name: str = None) -> bool: + """ + Send a welcome email to the user after verification + + Args: + email: Recipient's email + user_name: User's name (optional) + + Returns: + bool: True if the email was sent successfully, False otherwise + """ + try: + sg = sendgrid.SendGridAPIClient(api_key=settings.SENDGRID_API_KEY) + from_email = Email(settings.EMAIL_FROM) + to_email = To(email) + subject = "Welcome to Evo AI" + + dashboard_link = f"{settings.APP_URL}/dashboard" + + html_content = _render_template('welcome_email', { + 'dashboard_link': dashboard_link, + 'user_name': user_name or email.split('@')[0], + 'current_year': datetime.now().year + }) + + content = Content("text/html", html_content) + + mail = Mail(from_email, to_email, subject, content) + response = sg.client.mail.send.post(request_body=mail.get()) + + if response.status_code >= 200 and response.status_code < 300: + logger.info(f"Welcome email sent to {email}") + return True + else: + logger.error(f"Failed to send welcome email to {email}. Status: {response.status_code}") + return False + + except Exception as e: + logger.error(f"Error sending welcome email to {email}: {str(e)}") + return False + +def send_account_locked_email(email: str, reset_token: str, failed_attempts: int, time_period: str) -> bool: + """ + Send an email informing that the account has been locked after login attempts + + Args: + email: Recipient's email + reset_token: Token to reset the password + failed_attempts: Number of failed attempts + time_period: Time period of the attempts + + Returns: + bool: True if the email was sent successfully, False otherwise + """ + try: + sg = sendgrid.SendGridAPIClient(api_key=settings.SENDGRID_API_KEY) + from_email = Email(settings.EMAIL_FROM) + to_email = To(email) + subject = "Security Alert - Account Locked" + + reset_link = f"{settings.APP_URL}/reset-password?token={reset_token}" + + html_content = _render_template('account_locked', { + 'reset_link': reset_link, + 'user_name': email.split('@')[0], + 'failed_attempts': failed_attempts, + 'time_period': time_period, + 'current_year': datetime.now().year + }) + + content = Content("text/html", html_content) + + mail = Mail(from_email, to_email, subject, content) + response = sg.client.mail.send.post(request_body=mail.get()) + + if response.status_code >= 200 and response.status_code < 300: + logger.info(f"Account locked email sent to {email}") + return True + else: + logger.error(f"Failed to send account locked email to {email}. Status: {response.status_code}") + return False + + except Exception as e: + logger.error(f"Error sending account locked email to {email}: {str(e)}") return False \ No newline at end of file diff --git a/src/services/user_service.py b/src/services/user_service.py index 4480e54e..d78d9b65 100644 --- a/src/services/user_service.py +++ b/src/services/user_service.py @@ -11,7 +11,7 @@ from typing import Optional, Tuple logger = logging.getLogger(__name__) -def create_user(db: Session, user_data: UserCreate, is_admin: bool = False) -> Tuple[Optional[User], str]: +def create_user(db: Session, user_data: UserCreate, is_admin: bool = False, client_id: Optional[uuid.UUID] = None) -> Tuple[Optional[User], str]: """ Cria um novo usuário no sistema @@ -19,6 +19,7 @@ def create_user(db: Session, user_data: UserCreate, is_admin: bool = False) -> T db: Sessão do banco de dados user_data: Dados do usuário a ser criado is_admin: Se o usuário é um administrador + client_id: ID do cliente associado (opcional, será criado um novo se não fornecido) Returns: Tuple[Optional[User], str]: Tupla com o usuário criado (ou None em caso de erro) e mensagem de status @@ -36,21 +37,21 @@ def create_user(db: Session, user_data: UserCreate, is_admin: bool = False) -> T # Iniciar transação user = None - client_id = None + local_client_id = client_id try: - # Se não for admin, criar um cliente associado - if not is_admin: + # Se não for admin e não tiver client_id, criar um cliente associado + if not is_admin and local_client_id is None: client = Client(name=user_data.name) db.add(client) db.flush() # Obter o ID do cliente - client_id = client.id + local_client_id = client.id # Criar usuário user = User( email=user_data.email, password_hash=get_password_hash(user_data.password), - client_id=client_id, + client_id=local_client_id, is_admin=is_admin, is_active=False, # Inativo até verificar email email_verified=False, @@ -80,14 +81,14 @@ def create_user(db: Session, user_data: UserCreate, is_admin: bool = False) -> T def verify_email(db: Session, token: str) -> Tuple[bool, str]: """ - Verifica o email de um usuário usando o token fornecido + Verifica o email do usuário usando o token fornecido Args: db: Sessão do banco de dados token: Token de verificação Returns: - Tuple[bool, str]: Tupla com status da operação e mensagem + Tuple[bool, str]: Tupla com status da verificação e mensagem """ try: # Buscar usuário pelo token @@ -98,7 +99,18 @@ def verify_email(db: Session, token: str) -> Tuple[bool, str]: return False, "Token de verificação inválido" # Verificar se o token expirou - if user.verification_token_expiry < datetime.utcnow(): + now = datetime.utcnow() + expiry = user.verification_token_expiry + + # Garantir que ambas as datas sejam do mesmo tipo (aware ou naive) + if expiry.tzinfo is not None and now.tzinfo is None: + # Se expiry tem fuso e now não, converter now para ter fuso + now = now.replace(tzinfo=expiry.tzinfo) + elif now.tzinfo is not None and expiry.tzinfo is None: + # Se now tem fuso e expiry não, converter expiry para ter fuso + expiry = expiry.replace(tzinfo=now.tzinfo) + + if expiry < now: logger.warning(f"Tentativa de verificação com token expirado para usuário: {user.email}") return False, "Token de verificação expirado" @@ -299,4 +311,77 @@ def authenticate_user(db: Session, email: str, password: str) -> Optional[User]: return None if not user.is_active: return None - return user \ No newline at end of file + return user + +def get_admin_users(db: Session, skip: int = 0, limit: int = 100): + """ + Lista os usuários administradores + + Args: + db: Sessão do banco de dados + skip: Número de registros para pular + limit: Número máximo de registros para retornar + + Returns: + List[User]: Lista de usuários administradores + """ + try: + users = db.query(User).filter(User.is_admin == True).offset(skip).limit(limit).all() + logger.info(f"Listagem de administradores: {len(users)} encontrados") + return users + + except SQLAlchemyError as e: + logger.error(f"Erro ao listar administradores: {str(e)}") + return [] + + except Exception as e: + logger.error(f"Erro inesperado ao listar administradores: {str(e)}") + return [] + +def create_admin_user(db: Session, user_data: UserCreate) -> Tuple[Optional[User], str]: + """ + Cria um novo usuário administrador + + Args: + db: Sessão do banco de dados + user_data: Dados do usuário a ser criado + + Returns: + Tuple[Optional[User], str]: Tupla com o usuário criado (ou None em caso de erro) e mensagem de status + """ + return create_user(db, user_data, is_admin=True) + +def deactivate_user(db: Session, user_id: uuid.UUID) -> Tuple[bool, str]: + """ + Desativa um usuário (não exclui, apenas marca como inativo) + + Args: + db: Sessão do banco de dados + user_id: ID do usuário a ser desativado + + Returns: + Tuple[bool, str]: Tupla com status da operação e mensagem + """ + try: + # Buscar usuário pelo ID + user = db.query(User).filter(User.id == user_id).first() + + if not user: + logger.warning(f"Tentativa de desativação de usuário inexistente: {user_id}") + return False, "Usuário não encontrado" + + # Desativar usuário + user.is_active = False + + db.commit() + logger.info(f"Usuário desativado com sucesso: {user.email}") + return True, "Usuário desativado com sucesso" + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao desativar usuário: {str(e)}") + return False, f"Erro ao desativar usuário: {str(e)}" + + except Exception as e: + logger.error(f"Erro inesperado ao desativar usuário: {str(e)}") + return False, f"Erro inesperado: {str(e)}" \ No newline at end of file diff --git a/src/templates/emails/account_locked.html b/src/templates/emails/account_locked.html new file mode 100644 index 00000000..56277b89 --- /dev/null +++ b/src/templates/emails/account_locked.html @@ -0,0 +1,32 @@ +{% extends "base_email.html" %} + +{% block title %}Account Locked - Evo AI{% endblock %} + +{% block header %}Evo AI - Security{% endblock %} + +{% block content %} +

Security Alert: Your Account Has Been Locked

+

Hello {{ user_name }},

+

We detected multiple failed login attempts to your account on the Evo AI platform. To protect your information, we have temporarily locked access to your account.

+ +

What happened?

+

Our security system detected {{ failed_attempts }} failed login attempts with incorrect passwords in the last {{ time_period }}. This may indicate an unauthorized access attempt to your account.

+ +

What to do now?

+

To unlock your account, you need to reset your password:

+ +

+ Reset My Password +

+ +

The link above is valid for 24 hours. If you don't reset your password within this period, you will need to request a new reset link.

+ +

Important: If you haven't tried to log in recently, we recommend that you reset your password immediately and consider enabling two-factor authentication for greater security.

+ +

If you need help, please contact our support team.

+ +

Best regards,
+Evo AI Security Team

+{% endblock %} + +{% block footer_message %}If you don't recognize this activity, please contact support immediately.{% endblock %} \ No newline at end of file diff --git a/src/templates/emails/base_email.html b/src/templates/emails/base_email.html new file mode 100644 index 00000000..c96d28d5 --- /dev/null +++ b/src/templates/emails/base_email.html @@ -0,0 +1,83 @@ + + + + + + {% block title %}Evo AI{% endblock %} + + {% block additional_styles %}{% endblock %} + + +
+
+

{% block header %}Evo AI{% endblock %}

+
+
+ {% block content %}{% endblock %} +
+ +
+ + \ No newline at end of file diff --git a/src/templates/emails/password_reset.html b/src/templates/emails/password_reset.html new file mode 100644 index 00000000..bcdf2c78 --- /dev/null +++ b/src/templates/emails/password_reset.html @@ -0,0 +1,29 @@ +{% extends "base_email.html" %} + +{% block title %}Password Reset - Evo AI{% endblock %} + +{% block header %}Evo AI{% endblock %} + +{% block content %} +

Password Reset

+

Hello {{ user_name }},

+

We received a request to reset the password for your account on the Evo AI platform. If you didn't request this change, please ignore this email or contact our support team if you have any questions.

+ +

To reset your password, click the button below:

+ +

+ Reset My Password +

+ +

This reset link is valid for 24 hours. After this period, you will need to request a new password reset.

+ +

For security reasons, after resetting your password, you will be logged out of all active sessions and will need to log in again on all devices.

+ +

If you can't click the button above, copy and paste the following URL into your browser:

+

{{ reset_link }}

+ +

Best regards,
+Evo AI Team

+{% endblock %} + +{% block footer_message %}This is an automated email. Please do not reply to this message.{% endblock %} \ No newline at end of file diff --git a/src/templates/emails/verification_email.html b/src/templates/emails/verification_email.html new file mode 100644 index 00000000..93c35137 --- /dev/null +++ b/src/templates/emails/verification_email.html @@ -0,0 +1,31 @@ +{% extends "base_email.html" %} + +{% block title %}Email Verification - Evo AI{% endblock %} + +{% block header %}Evo AI{% endblock %} + +{% block content %} +

Email Verification

+

Hello {{ user_name }},

+

Thank you for registering on the Evo AI platform. To complete your registration and ensure the security of your account, we need to verify your email address.

+ +

Please click the button below to confirm your email:

+ +

+ Verify My Email +

+ +

This verification link is valid for 48 hours. If it expires, you can request a new verification email through our platform.

+ +

If you can't click the button above, copy and paste the following URL into your browser:

+

{{ verification_link }}

+ +

If you didn't create an account on Evo AI, please ignore this email or contact our support team.

+ +

We're excited to have you as part of our community!

+ +

Best regards,
+Evo AI Team

+{% endblock %} + +{% block footer_message %}This is an automated email. Please do not reply to this message.{% endblock %} \ No newline at end of file diff --git a/src/templates/emails/welcome_email.html b/src/templates/emails/welcome_email.html new file mode 100644 index 00000000..762370b7 --- /dev/null +++ b/src/templates/emails/welcome_email.html @@ -0,0 +1,31 @@ +{% extends "base_email.html" %} + +{% block title %}Welcome to Evo AI{% endblock %} + +{% block header %}Evo AI{% endblock %} + +{% block content %} +

Welcome to the Evo AI Platform!

+

Hello {{ user_name }},

+

We're thrilled to have you as part of our community. Your account has been successfully verified and you can now start using all the features of our platform.

+ +

Next steps:

+ + +

+ Access My Dashboard +

+ +

If you have any questions or need assistance, our support team is available to help you.

+ +

Make the most of the power of Evo AI!

+ +

Best regards,
+Evo AI Team

+{% endblock %} + +{% block footer_message %}This is an automated email. Please do not reply to this message. For support, use our help center.{% endblock %} \ No newline at end of file diff --git a/src/utils/security.py b/src/utils/security.py index c169c913..ecedb0c6 100644 --- a/src/utils/security.py +++ b/src/utils/security.py @@ -5,9 +5,19 @@ import string from jose import jwt from src.config.settings import settings import logging +import bcrypt +from dataclasses import dataclass logger = logging.getLogger(__name__) +# Corrigir erro do bcrypt com passlib +if not hasattr(bcrypt, '__about__'): + @dataclass + class BcryptAbout: + __version__: str = getattr(bcrypt, "__version__") + + setattr(bcrypt, "__about__", BcryptAbout()) + # Contexto para hash de senhas usando bcrypt pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")