diff --git a/.cursorrules b/.cursorrules index f4b2a5e0..e082fc47 100644 --- a/.cursorrules +++ b/.cursorrules @@ -1,105 +1,143 @@ -# A2A SaaS - Regras e Estrutura do Projeto +# Evo AI - Project Rules and Structure -## Tecnologias Principais -- FastAPI: Framework web para construção da API -- SQLAlchemy: ORM para interação com o banco de dados -- Alembic: Sistema de migrações do banco de dados -- PostgreSQL: Banco de dados principal -- Pydantic: Validação e serialização de dados -- Uvicorn: Servidor ASGI para execução da aplicação -- Redis: Cache e gerenciamento de sessões +## Main Technologies +- FastAPI: Web framework for building the API +- SQLAlchemy: ORM for database interaction +- Alembic: Database migration system +- PostgreSQL: Main database +- Pydantic: Data validation and serialization +- Uvicorn: ASGI server for application execution +- Redis: Cache and session management +- JWT: Secure token authentication +- Bcrypt: Secure password hashing +- SendGrid: Email service for verification -## Estrutura do Projeto +## Project Structure ``` src/ ├── api/ -│ └── routes.py # Definição das rotas da API +│ ├── routes.py # API routes definition +│ ├── auth_routes.py # Authentication routes (login, registration, etc.) +│ └── admin_routes.py # Protected admin routes ├── config/ -│ ├── database.py # Configuração do banco de dados -│ └── settings.py # Configurações gerais +│ ├── database.py # Database configuration +│ └── settings.py # General settings ├── core/ -│ └── middleware.py # Middleware de autenticação +│ ├── middleware.py # API Key middleware (legacy) +│ └── jwt_middleware.py # JWT authentication middleware ├── models/ -│ └── models.py # Modelos SQLAlchemy +│ └── models.py # SQLAlchemy models ├── schemas/ -│ └── schemas.py # Schemas Pydantic -└── services/ - ├── agent_service.py # Lógica de negócio para agentes - ├── client_service.py # Lógica de negócio para clientes - ├── contact_service.py # Lógica de negócio para contatos - ├── mcp_server_service.py # Lógica de negócio para servidores MCP - └── tool_service.py # Lógica de negócio para ferramentas +│ ├── schemas.py # Main Pydantic schemas +│ ├── user.py # User and authentication schemas +│ └── audit.py # Audit logs schemas +├── services/ +│ ├── agent_service.py # Business logic for agents +│ ├── client_service.py # Business logic for clients +│ ├── contact_service.py # Business logic for contacts +│ ├── mcp_server_service.py # Business logic for MCP servers +│ ├── tool_service.py # Business logic for tools +│ ├── user_service.py # User and authentication logic +│ ├── auth_service.py # JWT authentication logic +│ ├── email_service.py # Email sending service +│ └── audit_service.py # Audit logs logic +└── utils/ + └── security.py # Security utilities (JWT, hash) ``` -## Padrões de Código +## Code Standards ### Schemas (Pydantic) -- Usar `BaseModel` como base para todos os schemas -- Definir campos com tipos explícitos -- Usar `Optional` para campos opcionais -- Usar `Field` para validações e valores padrão -- Implementar `Config` com `from_attributes = True` para modelos +- Use `BaseModel` as base for all schemas +- Define fields with explicit types +- Use `Optional` for optional fields +- Use `Field` for validations and default values +- Implement `Config` with `from_attributes = True` for models +- Use `EmailStr` for email validation -### Serviços -- Tratamento de erros com `SQLAlchemyError` -- Logging consistente com mensagens em português -- Tipagem forte com `Optional` para retornos nulos -- Documentação com docstrings -- Rollback em caso de erro -- Retornos padronizados +### Services +- Error handling with `SQLAlchemyError` +- Consistent logging with messages in English +- Strong typing with `Optional` for null returns +- Documentation with docstrings +- Rollback in case of error +- Standardized returns +- Use transactions for multiple operations -### Rotas -- Status codes apropriados (201 para criação, 204 para deleção) -- Tratamento de erros com `HTTPException` -- Mensagens de erro em português -- Paginação nas listagens -- Validação de entrada com schemas -- Autenticação via API Key em todas as rotas +### Routes +- Appropriate status codes (201 for creation, 204 for deletion) +- Error handling with `HTTPException` +- Error messages in English +- Pagination for list endpoints +- Input validation with schemas +- JWT authentication for all protected routes +- Use of asynchronous functions with `async def` -### Migrações -- Usar Alembic para gerenciamento de migrações -- Nomes descritivos para as migrações -- Manter histórico de alterações -- Usar CASCADE quando necessário para remover dependências +### Migrations +- Use Alembic for migration management +- Descriptive names for migrations +- Maintain change history +- Use CASCADE when necessary to remove dependencies -### Autenticação -- Usar API Key para autenticação -- Gerar API Key automaticamente no primeiro acesso -- Armazenar API Key no arquivo .env -- Validar API Key em todas as rotas -- Logging de tentativas de acesso inválidas +### Authentication +- Use JWT for authentication with OAuth2PasswordBearer +- JWT tokens with expiration time defined in settings +- Access token containing essential user data (is_admin, client_id, etc.) +- Resource ownership verification based on client_id +- Protection of administrative routes with permission verification +- Email verification system via tokens +- Secure password recovery with one-time tokens -### Variáveis de Ambiente -- Usar arquivo .env para configurações sensíveis -- Manter .env.example atualizado -- Documentar todas as variáveis de ambiente -- Usar valores padrão seguros -- Validar variáveis obrigatórias +### Audit +- Record important administrative actions +- Automatic collection of contextual data (IP, user-agent) +- Relationship with user who performed the action +- Filtering and querying by different criteria -## Convenções -- Nomes de variáveis e funções em inglês -- Mensagens de log e erro em português -- Documentação em português -- Indentação com 4 espaços -- Máximo de 79 caracteres por linha +### Environment Variables +- Use .env file for sensitive settings +- Keep .env.example updated +- Document all environment variables +- Use safe default values +- Validate required variables +- Clear separation between development and production configurations -## Boas Práticas -- Sempre validar entrada de dados -- Implementar logging adequado -- Tratar todos os erros possíveis -- Manter consistência nos retornos -- Documentar funções e classes -- Seguir princípios SOLID -- Manter testes atualizados -- Proteger rotas com autenticação -- Usar variáveis de ambiente para configurações sensíveis +## Conventions +- Variable and function names in English +- Log and error messages in English +- Documentation in English +- Indentation with 4 spaces +- Maximum of 79 characters per line -## Comandos Úteis -- `make run`: Inicia o servidor -- `make alembic-revision message="descrição"`: Cria nova migração -- `make alembic-upgrade`: Aplica migrações pendentes -- `make alembic-downgrade`: Reverte última migração -- `make alembic-migrate`: Cria e aplica nova migração -- `make alembic-reset`: Reseta o banco de dados para o estado inicial -- `make alembic-upgrade-cascade`: Força upgrade removendo dependências -- `make clear-cache`: Limpa cache do projeto +## Best Practices +- Always validate input data +- Implement appropriate logging +- Handle all possible errors +- Maintain consistency in returns +- Document functions and classes +- Follow SOLID principles +- Keep tests updated +- Protect routes with JWT authentication +- Use environment variables for sensitive configurations +- Implement resource ownership verification +- Store passwords only with secure hash (bcrypt) +- Implement appropriate expiration for tokens + +## Security +- JWT tokens with limited lifetime +- Email verification with one-time tokens +- Secure password hashing with bcrypt and random salt +- Audit system for administrative actions +- Resource-based access control +- Clear separation between regular users and administrators +- Strict input validation with Pydantic + +## Useful Commands +- `make run`: Start the server +- `make alembic-revision message="description"`: Create new migration +- `make alembic-upgrade`: Apply pending migrations +- `make alembic-downgrade`: Revert last migration +- `make alembic-migrate`: Create and apply new migration +- `make alembic-reset`: Reset database to initial state +- `make alembic-upgrade-cascade`: Force upgrade removing dependencies +- `make clear-cache`: Clean project cache diff --git a/.env b/.env index e389366c..5eb09e10 100644 --- a/.env +++ b/.env @@ -23,11 +23,24 @@ REDIS_PASSWORD="" # TTL do cache de ferramentas em segundos (1 hora) TOOLS_CACHE_TTL=3600 -# Configurações da API -API_KEY="e9f6ef0a-425e-4d32-bdb6-1c917da6e649" -API_KEY_HEADER="X-API-Key" +# Configurações JWT +JWT_SECRET_KEY="sua-chave-secreta-jwt" +JWT_ALGORITHM="HS256" +JWT_EXPIRATION_TIME=30 # Em minutos + +# SendGrid +SENDGRID_API_KEY="sua-sendgrid-api-key" +EMAIL_FROM="noreply@yourdomain.com" +APP_URL="https://yourdomain.com" # Configurações do Servidor HOST="0.0.0.0" PORT=8000 DEBUG=false + +# Configurações de Seeders +ADMIN_EMAIL="admin@evoai.com" +ADMIN_INITIAL_PASSWORD="senhaforte123" +DEMO_EMAIL="demo@exemplo.com" +DEMO_PASSWORD="demo123" +DEMO_CLIENT_NAME="Cliente Demo" diff --git a/.env.example b/.env.example index 580cd674..2de4c294 100644 --- a/.env.example +++ b/.env.example @@ -14,11 +14,24 @@ REDIS_PASSWORD="sua-senha-redis" # TTL do cache de ferramentas em segundos (1 hora) TOOLS_CACHE_TTL=3600 -# Configurações da API -API_KEY="sua-api-key-gerada-automaticamente" -API_KEY_HEADER="X-API-Key" +# Configurações JWT +JWT_SECRET_KEY="sua-chave-secreta-jwt" +JWT_ALGORITHM="HS256" +JWT_EXPIRATION_TIME=30 # Em minutos + +# SendGrid +SENDGRID_API_KEY="sua-sendgrid-api-key" +EMAIL_FROM="noreply@yourdomain.com" +APP_URL="https://yourdomain.com" # Configurações do Servidor HOST="0.0.0.0" PORT=8000 DEBUG=false + +# Configurações de Seeders +ADMIN_EMAIL="admin@evoai.com" +ADMIN_INITIAL_PASSWORD="senhaforte123" +DEMO_EMAIL="demo@exemplo.com" +DEMO_PASSWORD="demo123" +DEMO_CLIENT_NAME="Cliente Demo" diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/INSTALLER b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/LICENSE.rst b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/LICENSE.rst deleted file mode 100644 index 598b8430..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/LICENSE.rst +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2005-2020, Ilya Etingof -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/METADATA b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/METADATA deleted file mode 100644 index 8d8613e6..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/METADATA +++ /dev/null @@ -1,228 +0,0 @@ -Metadata-Version: 2.1 -Name: pyasn1 -Version: 0.6.1 -Summary: Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208) -Home-page: https://github.com/pyasn1/pyasn1 -Author: Ilya Etingof -Author-email: etingof@gmail.com -Maintainer: pyasn1 maintenance organization -Maintainer-email: Christian Heimes -License: BSD-2-Clause -Project-URL: Documentation, https://pyasn1.readthedocs.io -Project-URL: Source, https://github.com/pyasn1/pyasn1 -Project-URL: Issues, https://github.com/pyasn1/pyasn1/issues -Project-URL: Changelog, https://pyasn1.readthedocs.io/en/latest/changelog.html -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Education -Classifier: Intended Audience :: Information Technology -Classifier: Intended Audience :: System Administrators -Classifier: Intended Audience :: Telecommunications Industry -Classifier: License :: OSI Approved :: BSD License -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Communications -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -License-File: LICENSE.rst - - -ASN.1 library for Python ------------------------- -[![PyPI](https://img.shields.io/pypi/v/pyasn1.svg?maxAge=2592000)](https://pypi.org/project/pyasn1) -[![Python Versions](https://img.shields.io/pypi/pyversions/pyasn1.svg)](https://pypi.org/project/pyasn1/) -[![Build status](https://github.com/pyasn1/pyasn1/actions/workflows/main.yml/badge.svg)](https://github.com/pyasn1/pyasn1/actions/workflows/main.yml) -[![Coverage Status](https://img.shields.io/codecov/c/github/pyasn1/pyasn1.svg)](https://codecov.io/github/pyasn1/pyasn1) -[![GitHub license](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/pyasn1/pyasn1/master/LICENSE.txt) - -This is a free and open source implementation of ASN.1 types and codecs -as a Python package. It has been first written to support particular -protocol (SNMP) but then generalized to be suitable for a wide range -of protocols based on -[ASN.1 specification](https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.208-198811-W!!PDF-E&type=items). - -**NOTE:** The package is now maintained by *Christian Heimes* and -*Simon Pichugin* in project https://github.com/pyasn1/pyasn1. - -Features --------- - -* Generic implementation of ASN.1 types (X.208) -* Standards compliant BER/CER/DER codecs -* Can operate on streams of serialized data -* Dumps/loads ASN.1 structures from Python types -* 100% Python, works with Python 3.8+ -* MT-safe -* Contributed ASN.1 compiler [Asn1ate](https://github.com/kimgr/asn1ate) - -Why using pyasn1 ----------------- - -ASN.1 solves the data serialisation problem. This solution was -designed long ago by the wise Ancients. Back then, they did not -have the luxury of wasting bits. That is why ASN.1 is designed -to serialise data structures of unbounded complexity into -something compact and efficient when it comes to processing -the data. - -That probably explains why many network protocols and file formats -still rely on the 30+ years old technology. Including a number of -high-profile Internet protocols and file formats. - -Quite a number of books cover the topic of ASN.1. -[Communication between heterogeneous systems](http://www.oss.com/asn1/dubuisson.html) -by Olivier Dubuisson is one of those high quality books freely -available on the Internet. - -The pyasn1 package is designed to help Python programmers tackling -network protocols and file formats at the comfort of their Python -prompt. The tool struggles to capture all aspects of a rather -complicated ASN.1 system and to represent it on the Python terms. - -How to use pyasn1 ------------------ - -With pyasn1 you can build Python objects from ASN.1 data structures. -For example, the following ASN.1 data structure: - -```bash -Record ::= SEQUENCE { - id INTEGER, - room [0] INTEGER OPTIONAL, - house [1] INTEGER DEFAULT 0 -} -``` - -Could be expressed in pyasn1 like this: - -```python -class Record(Sequence): - componentType = NamedTypes( - NamedType('id', Integer()), - OptionalNamedType( - 'room', Integer().subtype( - implicitTag=Tag(tagClassContext, tagFormatSimple, 0) - ) - ), - DefaultedNamedType( - 'house', Integer(0).subtype( - implicitTag=Tag(tagClassContext, tagFormatSimple, 1) - ) - ) - ) -``` - -It is in the spirit of ASN.1 to take abstract data description -and turn it into a programming language specific form. -Once you have your ASN.1 data structure expressed in Python, you -can use it along the lines of similar Python type (e.g. ASN.1 -`SET` is similar to Python `dict`, `SET OF` to `list`): - -```python ->>> record = Record() ->>> record['id'] = 123 ->>> record['room'] = 321 ->>> str(record) -Record: - id=123 - room=321 ->>> -``` - -Part of the power of ASN.1 comes from its serialisation features. You -can serialise your data structure and send it over the network. - -```python ->>> from pyasn1.codec.der.encoder import encode ->>> substrate = encode(record) ->>> hexdump(substrate) -00000: 30 07 02 01 7B 80 02 01 41 -``` - -Conversely, you can turn serialised ASN.1 content, as received from -network or read from a file, into a Python object which you can -introspect, modify, encode and send back. - -```python ->>> from pyasn1.codec.der.decoder import decode ->>> received_record, rest_of_substrate = decode(substrate, asn1Spec=Record()) ->>> ->>> for field in received_record: ->>> print('{} is {}'.format(field, received_record[field])) -id is 123 -room is 321 -house is 0 ->>> ->>> record == received_record -True ->>> received_record.update(room=123) ->>> substrate = encode(received_record) ->>> hexdump(substrate) -00000: 30 06 02 01 7B 80 01 7B -``` - -The pyasn1 classes struggle to emulate their Python prototypes (e.g. int, -list, dict etc.). But ASN.1 types exhibit more complicated behaviour. -To make life easier for a Pythonista, they can turn their pyasn1 -classes into Python built-ins: - -```python ->>> from pyasn1.codec.native.encoder import encode ->>> encode(record) -{'id': 123, 'room': 321, 'house': 0} -``` - -Or vice-versa -- you can initialize an ASN.1 structure from a tree of -Python objects: - -```python ->>> from pyasn1.codec.native.decoder import decode ->>> record = decode({'id': 123, 'room': 321, 'house': 0}, asn1Spec=Record()) ->>> str(record) -Record: - id=123 - room=321 ->>> -``` - -With ASN.1 design, serialisation codecs are decoupled from data objects, -so you could turn every single ASN.1 object into many different -serialised forms. As of this moment, pyasn1 supports BER, DER, CER and -Python built-ins codecs. The extremely compact PER encoding is expected -to be introduced in the upcoming pyasn1 release. - -More information on pyasn1 APIs can be found in the -[documentation](https://pyasn1.readthedocs.io/en/latest/pyasn1/contents.html), -compiled ASN.1 modules for different protocols and file formats -could be found in the pyasn1-modules -[repo](https://github.com/pyasn1/pyasn1-modules). - -How to get pyasn1 ------------------ - -The pyasn1 package is distributed under terms and conditions of 2-clause -BSD [license](https://pyasn1.readthedocs.io/en/latest/license.html). Source code is freely -available as a GitHub [repo](https://github.com/pyasn1/pyasn1). - -You could `pip install pyasn1` or download it from [PyPI](https://pypi.org/project/pyasn1). - -If something does not work as expected, -[open an issue](https://github.com/epyasn1/pyasn1/issues) at GitHub or -post your question [on Stack Overflow](https://stackoverflow.com/questions/ask) -or try browsing pyasn1 -[mailing list archives](https://sourceforge.net/p/pyasn1/mailman/pyasn1-users/). - -Copyright (c) 2005-2020, [Ilya Etingof](mailto:etingof@gmail.com). -All rights reserved. diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/RECORD b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/RECORD deleted file mode 100644 index f2987119..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/RECORD +++ /dev/null @@ -1,71 +0,0 @@ -pyasn1-0.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyasn1-0.6.1.dist-info/LICENSE.rst,sha256=Kq1fwA9wXEoa3bg-7RCmp10oajd58M-FGdh-YrxHNf0,1334 -pyasn1-0.6.1.dist-info/METADATA,sha256=8e1KBL3kvp1MlLUqCM1uOCMaBKxwlo4N0xHXk-_sd2Y,8383 -pyasn1-0.6.1.dist-info/RECORD,, -pyasn1-0.6.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91 -pyasn1-0.6.1.dist-info/top_level.txt,sha256=dnNEQt3nIDIO5mSCCOB5obQHrjDOUsRycdBujc2vrWE,7 -pyasn1-0.6.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pyasn1/__init__.py,sha256=tc4WulUv4ZkpkmVtee9-Fsgc6gi9jZFH1VIbAvSWj3s,66 -pyasn1/__pycache__/__init__.cpython-310.pyc,, -pyasn1/__pycache__/debug.cpython-310.pyc,, -pyasn1/__pycache__/error.cpython-310.pyc,, -pyasn1/codec/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 -pyasn1/codec/__pycache__/__init__.cpython-310.pyc,, -pyasn1/codec/__pycache__/streaming.cpython-310.pyc,, -pyasn1/codec/ber/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 -pyasn1/codec/ber/__pycache__/__init__.cpython-310.pyc,, -pyasn1/codec/ber/__pycache__/decoder.cpython-310.pyc,, -pyasn1/codec/ber/__pycache__/encoder.cpython-310.pyc,, -pyasn1/codec/ber/__pycache__/eoo.cpython-310.pyc,, -pyasn1/codec/ber/decoder.py,sha256=HZWc3M9406bhApuJF-TAYpRfLWvQT54CrREDqDMyU0Y,79192 -pyasn1/codec/ber/encoder.py,sha256=eO_--5b-0HXmPpIW2JhYlejU6V7FwdORmXFyCfKHyzI,29796 -pyasn1/codec/ber/eoo.py,sha256=dspLKc2xr_W5Tbcr2WcfLd_bJLhOjotq1YxKn3DCQNI,639 -pyasn1/codec/cer/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 -pyasn1/codec/cer/__pycache__/__init__.cpython-310.pyc,, -pyasn1/codec/cer/__pycache__/decoder.cpython-310.pyc,, -pyasn1/codec/cer/__pycache__/encoder.cpython-310.pyc,, -pyasn1/codec/cer/decoder.py,sha256=S279_LRjwHyTUBuv4LPYOpib1X4hLmBh_3et49ocm4A,4589 -pyasn1/codec/cer/encoder.py,sha256=vsGrgOHJokTeZqBJwNGokejvqH5EfTvy8hExd_j5bbY,9838 -pyasn1/codec/der/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 -pyasn1/codec/der/__pycache__/__init__.cpython-310.pyc,, -pyasn1/codec/der/__pycache__/decoder.cpython-310.pyc,, -pyasn1/codec/der/__pycache__/encoder.cpython-310.pyc,, -pyasn1/codec/der/decoder.py,sha256=GOpKZ1wFRYU0EEF3kSmIaMfe1h2w17VdGu57AHUqQFw,3428 -pyasn1/codec/der/encoder.py,sha256=ldxrpvXDFsxLxtvN7aiR61JNNtainNagZCSpsZM9DZs,3479 -pyasn1/codec/native/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 -pyasn1/codec/native/__pycache__/__init__.cpython-310.pyc,, -pyasn1/codec/native/__pycache__/decoder.cpython-310.pyc,, -pyasn1/codec/native/__pycache__/encoder.cpython-310.pyc,, -pyasn1/codec/native/decoder.py,sha256=2vK9B0AJzLT2exSNtlCUlYzZvm0E7IzUU8Ygg_lLxNo,9118 -pyasn1/codec/native/encoder.py,sha256=C24L5FkwhXPSRytaLlcL0uuYDTC2BXD75ZwH_bCqKX8,9184 -pyasn1/codec/streaming.py,sha256=Vp-VDh0SlA5h7T133rne9UNlJlqv2ohpUzVlSCGjq24,6377 -pyasn1/compat/__init__.py,sha256=-9FOJV1STFBatf2pVRiOYn14GmCKC8RY3TYCxOqfRXY,112 -pyasn1/compat/__pycache__/__init__.cpython-310.pyc,, -pyasn1/compat/__pycache__/integer.cpython-310.pyc,, -pyasn1/compat/integer.py,sha256=lMXqbJBTyjg34Rhx6JlFcXyoQxDaeXGxhaIIab86hX8,404 -pyasn1/debug.py,sha256=u-WmIFfewqp0041ezvtTjvhZcU9K14OI6p00ArXZ63g,3494 -pyasn1/error.py,sha256=e352oqW33seeh2MbIF27sFSgpiegjstabCMFx2piR0M,3258 -pyasn1/type/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 -pyasn1/type/__pycache__/__init__.cpython-310.pyc,, -pyasn1/type/__pycache__/base.cpython-310.pyc,, -pyasn1/type/__pycache__/char.cpython-310.pyc,, -pyasn1/type/__pycache__/constraint.cpython-310.pyc,, -pyasn1/type/__pycache__/error.cpython-310.pyc,, -pyasn1/type/__pycache__/namedtype.cpython-310.pyc,, -pyasn1/type/__pycache__/namedval.cpython-310.pyc,, -pyasn1/type/__pycache__/opentype.cpython-310.pyc,, -pyasn1/type/__pycache__/tag.cpython-310.pyc,, -pyasn1/type/__pycache__/tagmap.cpython-310.pyc,, -pyasn1/type/__pycache__/univ.cpython-310.pyc,, -pyasn1/type/__pycache__/useful.cpython-310.pyc,, -pyasn1/type/base.py,sha256=tjBRvXIQSiHES5-e5rBbsnn5CtIvBgCuflujDbdrtkM,22050 -pyasn1/type/char.py,sha256=Rvj5ypQLPNXcdHkfUV8nul1XX66R_Akn0g2HUyLj1qY,9438 -pyasn1/type/constraint.py,sha256=jmrt5esLa095XdfS0beqaoRuUjnuHiTKdkTdCcKx1FI,21915 -pyasn1/type/error.py,sha256=2kwYYkbd2jXIVEE56ThLRmBEOGZfafwogEOo-9RV_GY,259 -pyasn1/type/namedtype.py,sha256=jnTClIUoRZi025GTY9GlMlMI-j5dqEcv_ilzZ7i0hUQ,16179 -pyasn1/type/namedval.py,sha256=84u6wKOfte7U47aWrFqIZRM3tO2ryivpsBqVblPezuc,4899 -pyasn1/type/opentype.py,sha256=jjqSbTgAaCxlSHSf66YcLbrxtfh_98nAx2v8wzW35MU,2861 -pyasn1/type/tag.py,sha256=hqIuspUhc5QwN182LeQMc23W_vFNTgASvnUUSX4SPHM,9497 -pyasn1/type/tagmap.py,sha256=alJ9ZfDGTAsPeygHT6yONTagUkCjlgij82YXpPaQ_-8,3000 -pyasn1/type/univ.py,sha256=Bnu2gHdA84UXMLtgb4LXbHI5TYw-kKljlsJ7dkJ8KfI,109212 -pyasn1/type/useful.py,sha256=-J7ej0hqdjF29h150dtNmIIcGcMBg_y-nKqcozvk-48,5284 diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/WHEEL b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/WHEEL deleted file mode 100644 index 0fde4dd9..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (74.1.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/top_level.txt b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/top_level.txt deleted file mode 100644 index 38fe4145..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pyasn1 diff --git a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/zip-safe b/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/zip-safe deleted file mode 100644 index 8b137891..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1-0.6.1.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/.venv/lib/python3.10/site-packages/pyasn1/__init__.py b/.venv/lib/python3.10/site-packages/pyasn1/__init__.py index 7fa1d9e6..5a56a707 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/__init__.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/__init__.py @@ -1,2 +1,7 @@ +import sys + # https://www.python.org/dev/peps/pep-0396/ -__version__ = '0.6.1' +__version__ = '0.4.8' + +if sys.version_info[:2] < (2, 4): + raise RuntimeError('PyASN1 requires Python 2.4 or later') diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/decoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/decoder.py index 7e69ca15..5ff485fb 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/decoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/decoder.py @@ -1,23 +1,14 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -import io -import os -import sys -import warnings - from pyasn1 import debug from pyasn1 import error from pyasn1.codec.ber import eoo -from pyasn1.codec.streaming import asSeekableStream -from pyasn1.codec.streaming import isEndOfStream -from pyasn1.codec.streaming import peekIntoStream -from pyasn1.codec.streaming import readFromStream -from pyasn1.compat import _MISSING -from pyasn1.error import PyAsn1Error +from pyasn1.compat.integer import from_bytes +from pyasn1.compat.octets import oct2int, octs2ints, ints2octs, null from pyasn1.type import base from pyasn1.type import char from pyasn1.type import tag @@ -25,51 +16,33 @@ from pyasn1.type import tagmap from pyasn1.type import univ from pyasn1.type import useful -__all__ = ['StreamingDecoder', 'Decoder', 'decode'] +__all__ = ['decode'] LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER) noValue = base.noValue -SubstrateUnderrunError = error.SubstrateUnderrunError - -class AbstractPayloadDecoder(object): +class AbstractDecoder(object): protoComponent = None def valueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): - """Decode value with fixed byte length. - - The decoder is allowed to consume as many bytes as necessary. - """ - raise error.PyAsn1Error('SingleItemDecoder not implemented for %s' % (tagSet,)) # TODO: Seems more like an NotImplementedError? + raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,)) def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): - """Decode value with undefined length. + raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) - The decoder is allowed to consume as many bytes as necessary. - """ - raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) # TODO: Seems more like an NotImplementedError? +class AbstractSimpleDecoder(AbstractDecoder): @staticmethod - def _passAsn1Object(asn1Object, options): - if 'asn1Object' not in options: - options['asn1Object'] = asn1Object - - return options - - -class AbstractSimplePayloadDecoder(AbstractPayloadDecoder): - @staticmethod - def substrateCollector(asn1Object, substrate, length, options): - for chunk in readFromStream(substrate, length, options): - yield chunk + def substrateCollector(asn1Object, substrate, length): + return substrate[:length], substrate[length:] def _createComponent(self, asn1Spec, tagSet, value, **options): if options.get('native'): @@ -82,7 +55,7 @@ class AbstractSimplePayloadDecoder(AbstractPayloadDecoder): return asn1Spec.clone(value) -class RawPayloadDecoder(AbstractSimplePayloadDecoder): +class ExplicitTagDecoder(AbstractSimpleDecoder): protoComponent = univ.Any('') def valueDecoder(self, substrate, asn1Spec, @@ -90,43 +63,45 @@ class RawPayloadDecoder(AbstractSimplePayloadDecoder): decodeFun=None, substrateFun=None, **options): if substrateFun: - asn1Object = self._createComponent(asn1Spec, tagSet, '', **options) + return substrateFun( + self._createComponent(asn1Spec, tagSet, '', **options), + substrate, length + ) - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk + head, tail = substrate[:length], substrate[length:] - return + value, _ = decodeFun(head, asn1Spec, tagSet, length, **options) - for value in decodeFun(substrate, asn1Spec, tagSet, length, **options): - yield value + if LOG: + LOG('explicit tag container carries %d octets of trailing payload ' + '(will be lost!): %s' % (len(_), debug.hexdump(_))) + + return value, tail def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): if substrateFun: - asn1Object = self._createComponent(asn1Spec, tagSet, '', **options) + return substrateFun( + self._createComponent(asn1Spec, tagSet, '', **options), + substrate, length + ) - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk + value, substrate = decodeFun(substrate, asn1Spec, tagSet, length, **options) - return + eooMarker, substrate = decodeFun(substrate, allowEoo=True, **options) - while True: - for value in decodeFun( - substrate, asn1Spec, tagSet, length, - allowEoo=True, **options): - - if value is eoo.endOfOctets: - return - - yield value + if eooMarker is eoo.endOfOctets: + return value, substrate + else: + raise error.PyAsn1Error('Missing end-of-octets terminator') -rawPayloadDecoder = RawPayloadDecoder() +explicitTagDecoder = ExplicitTagDecoder() -class IntegerPayloadDecoder(AbstractSimplePayloadDecoder): +class IntegerDecoder(AbstractSimpleDecoder): protoComponent = univ.Integer(0) def valueDecoder(self, substrate, asn1Spec, @@ -137,28 +112,25 @@ class IntegerPayloadDecoder(AbstractSimplePayloadDecoder): if tagSet[0].tagFormat != tag.tagFormatSimple: raise error.PyAsn1Error('Simple tag format expected') - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk + head, tail = substrate[:length], substrate[length:] - if chunk: - value = int.from_bytes(bytes(chunk), 'big', signed=True) + if not head: + return self._createComponent(asn1Spec, tagSet, 0, **options), tail - else: - value = 0 + value = from_bytes(head, signed=True) - yield self._createComponent(asn1Spec, tagSet, value, **options) + return self._createComponent(asn1Spec, tagSet, value, **options), tail -class BooleanPayloadDecoder(IntegerPayloadDecoder): +class BooleanDecoder(IntegerDecoder): protoComponent = univ.Boolean(0) def _createComponent(self, asn1Spec, tagSet, value, **options): - return IntegerPayloadDecoder._createComponent( + return IntegerDecoder._createComponent( self, asn1Spec, tagSet, value and 1 or 0, **options) -class BitStringPayloadDecoder(AbstractSimplePayloadDecoder): +class BitStringDecoder(AbstractSimpleDecoder): protoComponent = univ.BitString(()) supportConstructedForm = True @@ -166,47 +138,27 @@ class BitStringPayloadDecoder(AbstractSimplePayloadDecoder): tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): + head, tail = substrate[:length], substrate[length:] if substrateFun: - asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options) + return substrateFun(self._createComponent( + asn1Spec, tagSet, noValue, **options), substrate, length) - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return - - if not length: - raise error.PyAsn1Error('Empty BIT STRING substrate') - - for chunk in isEndOfStream(substrate): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - - if chunk: + if not head: raise error.PyAsn1Error('Empty BIT STRING substrate') if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check? - for trailingBits in readFromStream(substrate, 1, options): - if isinstance(trailingBits, SubstrateUnderrunError): - yield trailingBits - - trailingBits = ord(trailingBits) + trailingBits = oct2int(head[0]) if trailingBits > 7: raise error.PyAsn1Error( 'Trailing bits overflow %s' % trailingBits ) - for chunk in readFromStream(substrate, length - 1, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - value = self.protoComponent.fromOctetString( - chunk, internalFormat=True, padding=trailingBits) + head[1:], internalFormat=True, padding=trailingBits) - yield self._createComponent(asn1Spec, tagSet, value, **options) - - return + return self._createComponent(asn1Spec, tagSet, value, **options), tail if not self.supportConstructedForm: raise error.PyAsn1Error('Constructed encoding form prohibited ' @@ -218,18 +170,13 @@ class BitStringPayloadDecoder(AbstractSimplePayloadDecoder): # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector - bitString = self.protoComponent.fromOctetString(b'', internalFormat=True) + bitString = self.protoComponent.fromOctetString(null, internalFormat=True) - current_position = substrate.tell() + while head: + component, head = decodeFun(head, self.protoComponent, + substrateFun=substrateFun, **options) - while substrate.tell() - current_position < length: - for component in decodeFun( - substrate, self.protoComponent, substrateFun=substrateFun, - **options): - if isinstance(component, SubstrateUnderrunError): - yield component - - trailingBits = component[0] + trailingBits = oct2int(component[0]) if trailingBits > 7: raise error.PyAsn1Error( 'Trailing bits overflow %s' % trailingBits @@ -240,7 +187,7 @@ class BitStringPayloadDecoder(AbstractSimplePayloadDecoder): prepend=bitString, padding=trailingBits ) - yield self._createComponent(asn1Spec, tagSet, bitString, **options) + return self._createComponent(asn1Spec, tagSet, bitString, **options), tail def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, @@ -248,34 +195,21 @@ class BitStringPayloadDecoder(AbstractSimplePayloadDecoder): **options): if substrateFun: - asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options) - - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return + return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), substrate, length) # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector - bitString = self.protoComponent.fromOctetString(b'', internalFormat=True) - - while True: # loop over fragments - - for component in decodeFun( - substrate, self.protoComponent, substrateFun=substrateFun, - allowEoo=True, **options): - - if component is eoo.endOfOctets: - break - - if isinstance(component, SubstrateUnderrunError): - yield component + bitString = self.protoComponent.fromOctetString(null, internalFormat=True) + while substrate: + component, substrate = decodeFun(substrate, self.protoComponent, + substrateFun=substrateFun, + allowEoo=True, **options) if component is eoo.endOfOctets: break - trailingBits = component[0] + trailingBits = oct2int(component[0]) if trailingBits > 7: raise error.PyAsn1Error( 'Trailing bits overflow %s' % trailingBits @@ -286,10 +220,13 @@ class BitStringPayloadDecoder(AbstractSimplePayloadDecoder): prepend=bitString, padding=trailingBits ) - yield self._createComponent(asn1Spec, tagSet, bitString, **options) + else: + raise error.SubstrateUnderrunError('No EOO seen before substrate ends') + + return self._createComponent(asn1Spec, tagSet, bitString, **options), substrate -class OctetStringPayloadDecoder(AbstractSimplePayloadDecoder): +class OctetStringDecoder(AbstractSimpleDecoder): protoComponent = univ.OctetString('') supportConstructedForm = True @@ -297,22 +234,14 @@ class OctetStringPayloadDecoder(AbstractSimplePayloadDecoder): tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): + head, tail = substrate[:length], substrate[length:] + if substrateFun: - asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options) - - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return + return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), + substrate, length) if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check? - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - - yield self._createComponent(asn1Spec, tagSet, chunk, **options) - - return + return self._createComponent(asn1Spec, tagSet, head, **options), tail if not self.supportConstructedForm: raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) @@ -323,20 +252,15 @@ class OctetStringPayloadDecoder(AbstractSimplePayloadDecoder): # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector - header = b'' - - original_position = substrate.tell() - # head = popSubstream(substrate, length) - while substrate.tell() - original_position < length: - for component in decodeFun( - substrate, self.protoComponent, substrateFun=substrateFun, - **options): - if isinstance(component, SubstrateUnderrunError): - yield component + header = null + while head: + component, head = decodeFun(head, self.protoComponent, + substrateFun=substrateFun, + **options) header += component - yield self._createComponent(asn1Spec, tagSet, header, **options) + return self._createComponent(asn1Spec, tagSet, header, **options), tail def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, @@ -344,38 +268,32 @@ class OctetStringPayloadDecoder(AbstractSimplePayloadDecoder): **options): if substrateFun and substrateFun is not self.substrateCollector: asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options) - - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return + return substrateFun(asn1Object, substrate, length) # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector - header = b'' - - while True: # loop over fragments - - for component in decodeFun( - substrate, self.protoComponent, substrateFun=substrateFun, - allowEoo=True, **options): - - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break + header = null + while substrate: + component, substrate = decodeFun(substrate, + self.protoComponent, + substrateFun=substrateFun, + allowEoo=True, **options) if component is eoo.endOfOctets: break header += component - yield self._createComponent(asn1Spec, tagSet, header, **options) + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + return self._createComponent(asn1Spec, tagSet, header, **options), substrate -class NullPayloadDecoder(AbstractSimplePayloadDecoder): +class NullDecoder(AbstractSimpleDecoder): protoComponent = univ.Null('') def valueDecoder(self, substrate, asn1Spec, @@ -386,19 +304,17 @@ class NullPayloadDecoder(AbstractSimplePayloadDecoder): if tagSet[0].tagFormat != tag.tagFormatSimple: raise error.PyAsn1Error('Simple tag format expected') - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk + head, tail = substrate[:length], substrate[length:] component = self._createComponent(asn1Spec, tagSet, '', **options) - if chunk: + if head: raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length) - yield component + return component, tail -class ObjectIdentifierPayloadDecoder(AbstractSimplePayloadDecoder): +class ObjectIdentifierDecoder(AbstractSimpleDecoder): protoComponent = univ.ObjectIdentifier(()) def valueDecoder(self, substrate, asn1Spec, @@ -408,18 +324,17 @@ class ObjectIdentifierPayloadDecoder(AbstractSimplePayloadDecoder): if tagSet[0].tagFormat != tag.tagFormatSimple: raise error.PyAsn1Error('Simple tag format expected') - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - - if not chunk: + head, tail = substrate[:length], substrate[length:] + if not head: raise error.PyAsn1Error('Empty substrate') + head = octs2ints(head) + oid = () index = 0 - substrateLen = len(chunk) + substrateLen = len(head) while index < substrateLen: - subId = chunk[index] + subId = head[index] index += 1 if subId < 128: oid += (subId,) @@ -433,7 +348,7 @@ class ObjectIdentifierPayloadDecoder(AbstractSimplePayloadDecoder): raise error.SubstrateUnderrunError( 'Short substrate for sub-OID past %s' % (oid,) ) - nextSubId = chunk[index] + nextSubId = head[index] index += 1 oid += ((subId << 7) + nextSubId,) elif subId == 128: @@ -451,60 +366,12 @@ class ObjectIdentifierPayloadDecoder(AbstractSimplePayloadDecoder): elif oid[0] >= 80: oid = (2, oid[0] - 80) + oid[1:] else: - raise error.PyAsn1Error('Malformed first OID octet: %s' % chunk[0]) + raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0]) - yield self._createComponent(asn1Spec, tagSet, oid, **options) + return self._createComponent(asn1Spec, tagSet, oid, **options), tail -class RelativeOIDPayloadDecoder(AbstractSimplePayloadDecoder): - protoComponent = univ.RelativeOID(()) - - def valueDecoder(self, substrate, asn1Spec, - tagSet=None, length=None, state=None, - decodeFun=None, substrateFun=None, - **options): - if tagSet[0].tagFormat != tag.tagFormatSimple: - raise error.PyAsn1Error('Simple tag format expected') - - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - - if not chunk: - raise error.PyAsn1Error('Empty substrate') - - reloid = () - index = 0 - substrateLen = len(chunk) - while index < substrateLen: - subId = chunk[index] - index += 1 - if subId < 128: - reloid += (subId,) - elif subId > 128: - # Construct subid from a number of octets - nextSubId = subId - subId = 0 - while nextSubId >= 128: - subId = (subId << 7) + (nextSubId & 0x7F) - if index >= substrateLen: - raise error.SubstrateUnderrunError( - 'Short substrate for sub-OID past %s' % (reloid,) - ) - nextSubId = chunk[index] - index += 1 - reloid += ((subId << 7) + nextSubId,) - elif subId == 128: - # ASN.1 spec forbids leading zeros (0x80) in OID - # encoding, tolerating it opens a vulnerability. See - # https://www.esat.kuleuven.be/cosic/publications/article-1432.pdf - # page 7 - raise error.PyAsn1Error('Invalid octet 0x80 in RELATIVE-OID encoding') - - yield self._createComponent(asn1Spec, tagSet, reloid, **options) - - -class RealPayloadDecoder(AbstractSimplePayloadDecoder): +class RealDecoder(AbstractSimpleDecoder): protoComponent = univ.Real() def valueDecoder(self, substrate, asn1Spec, @@ -514,18 +381,15 @@ class RealPayloadDecoder(AbstractSimplePayloadDecoder): if tagSet[0].tagFormat != tag.tagFormatSimple: raise error.PyAsn1Error('Simple tag format expected') - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk + head, tail = substrate[:length], substrate[length:] - if not chunk: - yield self._createComponent(asn1Spec, tagSet, 0.0, **options) - return + if not head: + return self._createComponent(asn1Spec, tagSet, 0.0, **options), tail - fo = chunk[0] - chunk = chunk[1:] + fo = oct2int(head[0]) + head = head[1:] if fo & 0x80: # binary encoding - if not chunk: + if not head: raise error.PyAsn1Error("Incomplete floating-point value") if LOG: @@ -534,19 +398,19 @@ class RealPayloadDecoder(AbstractSimplePayloadDecoder): n = (fo & 0x03) + 1 if n == 4: - n = chunk[0] - chunk = chunk[1:] + n = oct2int(head[0]) + head = head[1:] - eo, chunk = chunk[:n], chunk[n:] + eo, head = head[:n], head[n:] - if not eo or not chunk: + if not eo or not head: raise error.PyAsn1Error('Real exponent screwed') - e = eo[0] & 0x80 and -1 or 0 + e = oct2int(eo[0]) & 0x80 and -1 or 0 while eo: # exponent e <<= 8 - e |= eo[0] + e |= oct2int(eo[0]) eo = eo[1:] b = fo >> 4 & 0x03 # base bits @@ -561,10 +425,10 @@ class RealPayloadDecoder(AbstractSimplePayloadDecoder): e *= 4 p = 0 - while chunk: # value + while head: # value p <<= 8 - p |= chunk[0] - chunk = chunk[1:] + p |= oct2int(head[0]) + head = head[1:] if fo & 0x40: # sign bit p = -p @@ -580,7 +444,7 @@ class RealPayloadDecoder(AbstractSimplePayloadDecoder): value = fo & 0x01 and '-inf' or 'inf' elif fo & 0xc0 == 0: # character encoding - if not chunk: + if not head: raise error.PyAsn1Error("Incomplete floating-point value") if LOG: @@ -588,13 +452,13 @@ class RealPayloadDecoder(AbstractSimplePayloadDecoder): try: if fo & 0x3 == 0x1: # NR1 - value = (int(chunk), 10, 0) + value = (int(head), 10, 0) elif fo & 0x3 == 0x2: # NR2 - value = float(chunk) + value = float(head) elif fo & 0x3 == 0x3: # NR3 - value = float(chunk) + value = float(head) else: raise error.SubstrateUnderrunError( @@ -611,60 +475,50 @@ class RealPayloadDecoder(AbstractSimplePayloadDecoder): 'Unknown encoding (tag %s)' % fo ) - yield self._createComponent(asn1Spec, tagSet, value, **options) + return self._createComponent(asn1Spec, tagSet, value, **options), tail -class AbstractConstructedPayloadDecoder(AbstractPayloadDecoder): +class AbstractConstructedDecoder(AbstractDecoder): protoComponent = None -class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): +class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): protoRecordComponent = None protoSequenceComponent = None def _getComponentTagMap(self, asn1Object, idx): - raise NotImplementedError + raise NotImplementedError() def _getComponentPositionByType(self, asn1Object, tagSet, idx): - raise NotImplementedError - - def _decodeComponentsSchemaless( - self, substrate, tagSet=None, decodeFun=None, - length=None, **options): - - asn1Object = None + raise NotImplementedError() + def _decodeComponents(self, substrate, tagSet=None, decodeFun=None, **options): components = [] componentTypes = set() - original_position = substrate.tell() - - while length == -1 or substrate.tell() < original_position + length: - for component in decodeFun(substrate, **options): - if isinstance(component, SubstrateUnderrunError): - yield component - - if length == -1 and component is eoo.endOfOctets: + while substrate: + component, substrate = decodeFun(substrate, **options) + if component is eoo.endOfOctets: break components.append(component) componentTypes.add(component.tagSet) - # Now we have to guess is it SEQUENCE/SET or SEQUENCE OF/SET OF - # The heuristics is: - # * 1+ components of different types -> likely SEQUENCE/SET - # * otherwise -> likely SEQUENCE OF/SET OF - if len(componentTypes) > 1: - protoComponent = self.protoRecordComponent + # Now we have to guess is it SEQUENCE/SET or SEQUENCE OF/SET OF + # The heuristics is: + # * 1+ components of different types -> likely SEQUENCE/SET + # * otherwise -> likely SEQUENCE OF/SET OF + if len(componentTypes) > 1: + protoComponent = self.protoRecordComponent - else: - protoComponent = self.protoSequenceComponent + else: + protoComponent = self.protoSequenceComponent - asn1Object = protoComponent.clone( - # construct tagSet from base tag from prototype ASN.1 object - # and additional tags recovered from the substrate - tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags) - ) + asn1Object = protoComponent.clone( + # construct tagSet from base tag from prototype ASN.1 object + # and additional tags recovered from the substrate + tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags) + ) if LOG: LOG('guessed %r container type (pass `asn1Spec` to guide the ' @@ -677,7 +531,7 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): matchTags=False, matchConstraints=False ) - yield asn1Object + return asn1Object, substrate def valueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, @@ -686,9 +540,9 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): if tagSet[0].tagFormat != tag.tagFormatConstructed: raise error.PyAsn1Error('Constructed tag format expected') - original_position = substrate.tell() + head, tail = substrate[:length], substrate[length:] - if substrateFun: + if substrateFun is not None: if asn1Spec is not None: asn1Object = asn1Spec.clone() @@ -698,36 +552,23 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): else: asn1Object = self.protoRecordComponent, self.protoSequenceComponent - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return + return substrateFun(asn1Object, substrate, length) if asn1Spec is None: - for asn1Object in self._decodeComponentsSchemaless( - substrate, tagSet=tagSet, decodeFun=decodeFun, - length=length, **options): - if isinstance(asn1Object, SubstrateUnderrunError): - yield asn1Object + asn1Object, trailing = self._decodeComponents( + head, tagSet=tagSet, decodeFun=decodeFun, **options + ) - if substrate.tell() < original_position + length: + if trailing: if LOG: - for trailing in readFromStream(substrate, context=options): - if isinstance(trailing, SubstrateUnderrunError): - yield trailing - LOG('Unused trailing %d octets encountered: %s' % ( len(trailing), debug.hexdump(trailing))) - yield asn1Object - - return + return asn1Object, tail asn1Object = asn1Spec.clone() asn1Object.clear() - options = self._passAsn1Object(asn1Object, options) - if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId): namedTypes = asn1Spec.componentType @@ -742,7 +583,7 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): seenIndices = set() idx = 0 - while substrate.tell() - original_position < length: + while head: if not namedTypes: componentType = None @@ -765,9 +606,7 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): 'Excessive components decoded at %r' % (asn1Spec,) ) - for component in decodeFun(substrate, componentType, **options): - if isinstance(component, SubstrateUnderrunError): - yield component + component, head = decodeFun(head, componentType, **options) if not isDeterministic and namedTypes: if isSetType: @@ -854,30 +693,30 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): for pos, containerElement in enumerate( containerValue): - stream = asSeekableStream(containerValue[pos].asOctets()) - - for component in decodeFun(stream, asn1Spec=openType, **options): - if isinstance(component, SubstrateUnderrunError): - yield component + component, rest = decodeFun( + containerValue[pos].asOctets(), + asn1Spec=openType, **options + ) containerValue[pos] = component else: - stream = asSeekableStream(asn1Object.getComponentByPosition(idx).asOctets()) - - for component in decodeFun(stream, asn1Spec=openType, **options): - if isinstance(component, SubstrateUnderrunError): - yield component + component, rest = decodeFun( + asn1Object.getComponentByPosition(idx).asOctets(), + asn1Spec=openType, **options + ) asn1Object.setComponentByPosition(idx, component) else: inconsistency = asn1Object.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {asn1Object.__class__.__name__} is inconsistent") + raise inconsistency else: + asn1Object = asn1Spec.clone() + asn1Object.clear() + componentType = asn1Spec.componentType if LOG: @@ -885,11 +724,8 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): idx = 0 - while substrate.tell() - original_position < length: - for component in decodeFun(substrate, componentType, **options): - if isinstance(component, SubstrateUnderrunError): - yield component - + while head: + component, head = decodeFun(head, componentType, **options) asn1Object.setComponentByPosition( idx, component, verifyConstraints=False, @@ -898,7 +734,7 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): idx += 1 - yield asn1Object + return asn1Object, tail def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, @@ -917,27 +753,17 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): else: asn1Object = self.protoRecordComponent, self.protoSequenceComponent - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return + return substrateFun(asn1Object, substrate, length) if asn1Spec is None: - for asn1Object in self._decodeComponentsSchemaless( - substrate, tagSet=tagSet, decodeFun=decodeFun, - length=length, **dict(options, allowEoo=True)): - if isinstance(asn1Object, SubstrateUnderrunError): - yield asn1Object - - yield asn1Object - - return + return self._decodeComponents( + substrate, tagSet=tagSet, decodeFun=decodeFun, + **dict(options, allowEoo=True) + ) asn1Object = asn1Spec.clone() asn1Object.clear() - options = self._passAsn1Object(asn1Object, options) - if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId): namedTypes = asn1Object.componentType @@ -951,10 +777,8 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): asn1Spec)) seenIndices = set() - idx = 0 - - while True: # loop over components + while substrate: if len(namedTypes) <= idx: asn1Spec = None @@ -977,21 +801,13 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): 'Excessive components decoded at %r' % (asn1Object,) ) - for component in decodeFun(substrate, asn1Spec, allowEoo=True, **options): - - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break - + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True, **options) if component is eoo.endOfOctets: break if not isDeterministic and namedTypes: if isSetType: idx = namedTypes.getPositionByType(component.effectiveTagSet) - elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted: idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx) @@ -1004,14 +820,17 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): seenIndices.add(idx) idx += 1 + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + if LOG: LOG('seen component indices %s' % seenIndices) if namedTypes: if not namedTypes.requiredComponents.issubset(seenIndices): - raise error.PyAsn1Error( - 'ASN.1 object %s has uninitialized ' - 'components' % asn1Object.__class__.__name__) + raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) if namedTypes.hasOpenTypes: @@ -1073,37 +892,31 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): for pos, containerElement in enumerate( containerValue): - stream = asSeekableStream(containerValue[pos].asOctets()) - - for component in decodeFun(stream, asn1Spec=openType, - **dict(options, allowEoo=True)): - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break + component, rest = decodeFun( + containerValue[pos].asOctets(), + asn1Spec=openType, **dict(options, allowEoo=True) + ) containerValue[pos] = component else: - stream = asSeekableStream(asn1Object.getComponentByPosition(idx).asOctets()) - for component in decodeFun(stream, asn1Spec=openType, - **dict(options, allowEoo=True)): - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break + component, rest = decodeFun( + asn1Object.getComponentByPosition(idx).asOctets(), + asn1Spec=openType, **dict(options, allowEoo=True) + ) + if component is not eoo.endOfOctets: asn1Object.setComponentByPosition(idx, component) else: inconsistency = asn1Object.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {asn1Object.__class__.__name__} is inconsistent") + raise inconsistency else: + asn1Object = asn1Spec.clone() + asn1Object.clear() + componentType = asn1Spec.componentType if LOG: @@ -1111,16 +924,8 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): idx = 0 - while True: - - for component in decodeFun( - substrate, componentType, allowEoo=True, **options): - - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break + while substrate: + component, substrate = decodeFun(substrate, componentType, allowEoo=True, **options) if component is eoo.endOfOctets: break @@ -1133,42 +938,50 @@ class ConstructedPayloadDecoderBase(AbstractConstructedPayloadDecoder): idx += 1 - yield asn1Object + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + return asn1Object, substrate -class SequenceOrSequenceOfPayloadDecoder(ConstructedPayloadDecoderBase): +class SequenceOrSequenceOfDecoder(UniversalConstructedTypeDecoder): protoRecordComponent = univ.Sequence() protoSequenceComponent = univ.SequenceOf() -class SequencePayloadDecoder(SequenceOrSequenceOfPayloadDecoder): +class SequenceDecoder(SequenceOrSequenceOfDecoder): protoComponent = univ.Sequence() -class SequenceOfPayloadDecoder(SequenceOrSequenceOfPayloadDecoder): +class SequenceOfDecoder(SequenceOrSequenceOfDecoder): protoComponent = univ.SequenceOf() -class SetOrSetOfPayloadDecoder(ConstructedPayloadDecoderBase): +class SetOrSetOfDecoder(UniversalConstructedTypeDecoder): protoRecordComponent = univ.Set() protoSequenceComponent = univ.SetOf() -class SetPayloadDecoder(SetOrSetOfPayloadDecoder): +class SetDecoder(SetOrSetOfDecoder): protoComponent = univ.Set() -class SetOfPayloadDecoder(SetOrSetOfPayloadDecoder): + +class SetOfDecoder(SetOrSetOfDecoder): protoComponent = univ.SetOf() -class ChoicePayloadDecoder(ConstructedPayloadDecoderBase): +class ChoiceDecoder(AbstractConstructedDecoder): protoComponent = univ.Choice() def valueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): + head, tail = substrate[:length], substrate[length:] + if asn1Spec is None: asn1Object = self.protoComponent.clone(tagSet=tagSet) @@ -1176,31 +989,24 @@ class ChoicePayloadDecoder(ConstructedPayloadDecoderBase): asn1Object = asn1Spec.clone() if substrateFun: - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk - - return - - options = self._passAsn1Object(asn1Object, options) + return substrateFun(asn1Object, substrate, length) if asn1Object.tagSet == tagSet: if LOG: LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,)) - for component in decodeFun( - substrate, asn1Object.componentTagMap, **options): - if isinstance(component, SubstrateUnderrunError): - yield component + component, head = decodeFun( + head, asn1Object.componentTagMap, **options + ) else: if LOG: LOG('decoding %s as untagged CHOICE' % (tagSet,)) - for component in decodeFun( - substrate, asn1Object.componentTagMap, tagSet, length, - state, **options): - if isinstance(component, SubstrateUnderrunError): - yield component + component, head = decodeFun( + head, asn1Object.componentTagMap, + tagSet, length, state, **options + ) effectiveTagSet = component.effectiveTagSet @@ -1214,7 +1020,7 @@ class ChoicePayloadDecoder(ConstructedPayloadDecoderBase): innerFlag=False ) - yield asn1Object + return asn1Object, tail def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, @@ -1222,67 +1028,53 @@ class ChoicePayloadDecoder(ConstructedPayloadDecoderBase): **options): if asn1Spec is None: asn1Object = self.protoComponent.clone(tagSet=tagSet) - else: asn1Object = asn1Spec.clone() if substrateFun: - for chunk in substrateFun(asn1Object, substrate, length, options): - yield chunk + return substrateFun(asn1Object, substrate, length) - return + if asn1Object.tagSet == tagSet: + if LOG: + LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,)) - options = self._passAsn1Object(asn1Object, options) + component, substrate = decodeFun( + substrate, asn1Object.componentType.tagMapUnique, **options + ) - isTagged = asn1Object.tagSet == tagSet + # eat up EOO marker + eooMarker, substrate = decodeFun( + substrate, allowEoo=True, **options + ) + + if eooMarker is not eoo.endOfOctets: + raise error.PyAsn1Error('No EOO seen before substrate ends') + + else: + if LOG: + LOG('decoding %s as untagged CHOICE' % (tagSet,)) + + component, substrate = decodeFun( + substrate, asn1Object.componentType.tagMapUnique, + tagSet, length, state, **options + ) + + effectiveTagSet = component.effectiveTagSet if LOG: - LOG('decoding %s as %stagged CHOICE' % ( - tagSet, isTagged and 'explicitly ' or 'un')) + LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet)) - while True: + asn1Object.setComponentByType( + effectiveTagSet, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False, + innerFlag=False + ) - if isTagged: - iterator = decodeFun( - substrate, asn1Object.componentType.tagMapUnique, - **dict(options, allowEoo=True)) - - else: - iterator = decodeFun( - substrate, asn1Object.componentType.tagMapUnique, - tagSet, length, state, **dict(options, allowEoo=True)) - - for component in iterator: - - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break - - effectiveTagSet = component.effectiveTagSet - - if LOG: - LOG('decoded component %s, effective tag set ' - '%s' % (component, effectiveTagSet)) - - asn1Object.setComponentByType( - effectiveTagSet, component, - verifyConstraints=False, - matchTags=False, matchConstraints=False, - innerFlag=False - ) - - if not isTagged: - break - - if not isTagged or component is eoo.endOfOctets: - break - - yield asn1Object + return asn1Object, substrate -class AnyPayloadDecoder(AbstractSimplePayloadDecoder): +class AnyDecoder(AbstractSimpleDecoder): protoComponent = univ.Any() def valueDecoder(self, substrate, asn1Spec, @@ -1299,32 +1091,22 @@ class AnyPayloadDecoder(AbstractSimplePayloadDecoder): isUntagged = tagSet != asn1Spec.tagSet if isUntagged: - fullPosition = substrate.markedPosition - currentPosition = substrate.tell() + fullSubstrate = options['fullSubstrate'] - substrate.seek(fullPosition, os.SEEK_SET) - length += currentPosition - fullPosition + # untagged Any container, recover inner header substrate + length += len(fullSubstrate) - len(substrate) + substrate = fullSubstrate if LOG: - for chunk in peekIntoStream(substrate, length): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - LOG('decoding as untagged ANY, substrate ' - '%s' % debug.hexdump(chunk)) + LOG('decoding as untagged ANY, substrate %s' % debug.hexdump(substrate)) if substrateFun: - for chunk in substrateFun( - self._createComponent(asn1Spec, tagSet, noValue, **options), - substrate, length, options): - yield chunk + return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), + substrate, length) - return + head, tail = substrate[:length], substrate[length:] - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - - yield self._createComponent(asn1Spec, tagSet, chunk, **options) + return self._createComponent(asn1Spec, tagSet, head, **options), tail def indefLenValueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, @@ -1341,36 +1123,26 @@ class AnyPayloadDecoder(AbstractSimplePayloadDecoder): if isTagged: # tagged Any type -- consume header substrate - chunk = b'' + header = null if LOG: LOG('decoding as tagged ANY') else: - # TODO: Seems not to be tested - fullPosition = substrate.markedPosition - currentPosition = substrate.tell() + fullSubstrate = options['fullSubstrate'] - substrate.seek(fullPosition, os.SEEK_SET) - for chunk in readFromStream(substrate, currentPosition - fullPosition, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk + # untagged Any, recover header substrate + header = fullSubstrate[:-len(substrate)] if LOG: - LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(chunk)) + LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(header)) # Any components do not inherit initial tag asn1Spec = self.protoComponent if substrateFun and substrateFun is not self.substrateCollector: - asn1Object = self._createComponent( - asn1Spec, tagSet, noValue, **options) - - for chunk in substrateFun( - asn1Object, chunk + substrate, length + len(chunk), options): - yield chunk - - return + asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options) + return substrateFun(asn1Object, header + substrate, length + len(header)) if LOG: LOG('assembling constructed serialization') @@ -1378,135 +1150,131 @@ class AnyPayloadDecoder(AbstractSimplePayloadDecoder): # All inner fragments are of the same type, treat them as octet string substrateFun = self.substrateCollector - while True: # loop over fragments - - for component in decodeFun( - substrate, asn1Spec, substrateFun=substrateFun, - allowEoo=True, **options): - - if isinstance(component, SubstrateUnderrunError): - yield component - - if component is eoo.endOfOctets: - break - + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, + substrateFun=substrateFun, + allowEoo=True, **options) if component is eoo.endOfOctets: break - chunk += component - - if substrateFun: - yield chunk # TODO: Weird + header += component else: - yield self._createComponent(asn1Spec, tagSet, chunk, **options) + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + if substrateFun: + return header, substrate + + else: + return self._createComponent(asn1Spec, tagSet, header, **options), substrate # character string types -class UTF8StringPayloadDecoder(OctetStringPayloadDecoder): +class UTF8StringDecoder(OctetStringDecoder): protoComponent = char.UTF8String() -class NumericStringPayloadDecoder(OctetStringPayloadDecoder): +class NumericStringDecoder(OctetStringDecoder): protoComponent = char.NumericString() -class PrintableStringPayloadDecoder(OctetStringPayloadDecoder): +class PrintableStringDecoder(OctetStringDecoder): protoComponent = char.PrintableString() -class TeletexStringPayloadDecoder(OctetStringPayloadDecoder): +class TeletexStringDecoder(OctetStringDecoder): protoComponent = char.TeletexString() -class VideotexStringPayloadDecoder(OctetStringPayloadDecoder): +class VideotexStringDecoder(OctetStringDecoder): protoComponent = char.VideotexString() -class IA5StringPayloadDecoder(OctetStringPayloadDecoder): +class IA5StringDecoder(OctetStringDecoder): protoComponent = char.IA5String() -class GraphicStringPayloadDecoder(OctetStringPayloadDecoder): +class GraphicStringDecoder(OctetStringDecoder): protoComponent = char.GraphicString() -class VisibleStringPayloadDecoder(OctetStringPayloadDecoder): +class VisibleStringDecoder(OctetStringDecoder): protoComponent = char.VisibleString() -class GeneralStringPayloadDecoder(OctetStringPayloadDecoder): +class GeneralStringDecoder(OctetStringDecoder): protoComponent = char.GeneralString() -class UniversalStringPayloadDecoder(OctetStringPayloadDecoder): +class UniversalStringDecoder(OctetStringDecoder): protoComponent = char.UniversalString() -class BMPStringPayloadDecoder(OctetStringPayloadDecoder): +class BMPStringDecoder(OctetStringDecoder): protoComponent = char.BMPString() # "useful" types -class ObjectDescriptorPayloadDecoder(OctetStringPayloadDecoder): +class ObjectDescriptorDecoder(OctetStringDecoder): protoComponent = useful.ObjectDescriptor() -class GeneralizedTimePayloadDecoder(OctetStringPayloadDecoder): +class GeneralizedTimeDecoder(OctetStringDecoder): protoComponent = useful.GeneralizedTime() -class UTCTimePayloadDecoder(OctetStringPayloadDecoder): +class UTCTimeDecoder(OctetStringDecoder): protoComponent = useful.UTCTime() -TAG_MAP = { - univ.Integer.tagSet: IntegerPayloadDecoder(), - univ.Boolean.tagSet: BooleanPayloadDecoder(), - univ.BitString.tagSet: BitStringPayloadDecoder(), - univ.OctetString.tagSet: OctetStringPayloadDecoder(), - univ.Null.tagSet: NullPayloadDecoder(), - univ.ObjectIdentifier.tagSet: ObjectIdentifierPayloadDecoder(), - univ.RelativeOID.tagSet: RelativeOIDPayloadDecoder(), - univ.Enumerated.tagSet: IntegerPayloadDecoder(), - univ.Real.tagSet: RealPayloadDecoder(), - univ.Sequence.tagSet: SequenceOrSequenceOfPayloadDecoder(), # conflicts with SequenceOf - univ.Set.tagSet: SetOrSetOfPayloadDecoder(), # conflicts with SetOf - univ.Choice.tagSet: ChoicePayloadDecoder(), # conflicts with Any +tagMap = { + univ.Integer.tagSet: IntegerDecoder(), + univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Null.tagSet: NullDecoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(), + univ.Enumerated.tagSet: IntegerDecoder(), + univ.Real.tagSet: RealDecoder(), + univ.Sequence.tagSet: SequenceOrSequenceOfDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SetOrSetOfDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any # character string types - char.UTF8String.tagSet: UTF8StringPayloadDecoder(), - char.NumericString.tagSet: NumericStringPayloadDecoder(), - char.PrintableString.tagSet: PrintableStringPayloadDecoder(), - char.TeletexString.tagSet: TeletexStringPayloadDecoder(), - char.VideotexString.tagSet: VideotexStringPayloadDecoder(), - char.IA5String.tagSet: IA5StringPayloadDecoder(), - char.GraphicString.tagSet: GraphicStringPayloadDecoder(), - char.VisibleString.tagSet: VisibleStringPayloadDecoder(), - char.GeneralString.tagSet: GeneralStringPayloadDecoder(), - char.UniversalString.tagSet: UniversalStringPayloadDecoder(), - char.BMPString.tagSet: BMPStringPayloadDecoder(), + char.UTF8String.tagSet: UTF8StringDecoder(), + char.NumericString.tagSet: NumericStringDecoder(), + char.PrintableString.tagSet: PrintableStringDecoder(), + char.TeletexString.tagSet: TeletexStringDecoder(), + char.VideotexString.tagSet: VideotexStringDecoder(), + char.IA5String.tagSet: IA5StringDecoder(), + char.GraphicString.tagSet: GraphicStringDecoder(), + char.VisibleString.tagSet: VisibleStringDecoder(), + char.GeneralString.tagSet: GeneralStringDecoder(), + char.UniversalString.tagSet: UniversalStringDecoder(), + char.BMPString.tagSet: BMPStringDecoder(), # useful types - useful.ObjectDescriptor.tagSet: ObjectDescriptorPayloadDecoder(), - useful.GeneralizedTime.tagSet: GeneralizedTimePayloadDecoder(), - useful.UTCTime.tagSet: UTCTimePayloadDecoder() + useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(), + useful.UTCTime.tagSet: UTCTimeDecoder() } # Type-to-codec map for ambiguous ASN.1 types -TYPE_MAP = { - univ.Set.typeId: SetPayloadDecoder(), - univ.SetOf.typeId: SetOfPayloadDecoder(), - univ.Sequence.typeId: SequencePayloadDecoder(), - univ.SequenceOf.typeId: SequenceOfPayloadDecoder(), - univ.Choice.typeId: ChoicePayloadDecoder(), - univ.Any.typeId: AnyPayloadDecoder() +typeMap = { + univ.Set.typeId: SetDecoder(), + univ.SetOf.typeId: SetOfDecoder(), + univ.Sequence.typeId: SequenceDecoder(), + univ.SequenceOf.typeId: SequenceOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AnyDecoder() } # Put in non-ambiguous types for faster codec lookup -for typeDecoder in TAG_MAP.values(): +for typeDecoder in tagMap.values(): if typeDecoder.protoComponent is not None: typeId = typeDecoder.protoComponent.__class__.typeId - if typeId is not None and typeId not in TYPE_MAP: - TYPE_MAP[typeId] = typeDecoder + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder (stDecodeTag, @@ -1521,81 +1289,65 @@ for typeDecoder in TAG_MAP.values(): stStop) = [x for x in range(10)] -EOO_SENTINEL = bytes((0, 0)) - - -class SingleItemDecoder(object): +class Decoder(object): defaultErrorState = stErrorCondition #defaultErrorState = stDumpRawValue - defaultRawDecoder = AnyPayloadDecoder() - + defaultRawDecoder = AnyDecoder() supportIndefLength = True - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored): - self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP - self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP - + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap # Tag & TagSet objects caches - self._tagCache = {} - self._tagSetCache = {} + self.__tagCache = {} + self.__tagSetCache = {} + self.__eooSentinel = ints2octs((0, 0)) def __call__(self, substrate, asn1Spec=None, tagSet=None, length=None, state=stDecodeTag, decodeFun=None, substrateFun=None, **options): - allowEoo = options.pop('allowEoo', False) - if LOG: - LOG('decoder called at scope %s with state %d, working with up ' - 'to %s octets of substrate: ' - '%s' % (debug.scope, state, length, substrate)) + LOG('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) + + allowEoo = options.pop('allowEoo', False) # Look for end-of-octets sentinel if allowEoo and self.supportIndefLength: - - for eoo_candidate in readFromStream(substrate, 2, options): - if isinstance(eoo_candidate, SubstrateUnderrunError): - yield eoo_candidate - - if eoo_candidate == EOO_SENTINEL: + if substrate[:2] == self.__eooSentinel: if LOG: LOG('end-of-octets sentinel found') - yield eoo.endOfOctets - return - - else: - substrate.seek(-2, os.SEEK_CUR) - - tagMap = self._tagMap - typeMap = self._typeMap - tagCache = self._tagCache - tagSetCache = self._tagSetCache + return eoo.endOfOctets, substrate[2:] value = noValue - substrate.markedPosition = substrate.tell() + tagMap = self.__tagMap + typeMap = self.__typeMap + tagCache = self.__tagCache + tagSetCache = self.__tagSetCache + + fullSubstrate = substrate while state is not stStop: if state is stDecodeTag: + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on tag decoding' + ) + # Decode tag isShortTag = True - - for firstByte in readFromStream(substrate, 1, options): - if isinstance(firstByte, SubstrateUnderrunError): - yield firstByte - - firstOctet = ord(firstByte) + firstOctet = substrate[0] + substrate = substrate[1:] try: lastTag = tagCache[firstOctet] except KeyError: - integerTag = firstOctet + integerTag = oct2int(firstOctet) tagClass = integerTag & 0xC0 tagFormat = integerTag & 0x20 tagId = integerTag & 0x1F @@ -1605,23 +1357,21 @@ class SingleItemDecoder(object): lengthOctetIdx = 0 tagId = 0 - while True: - for integerByte in readFromStream(substrate, 1, options): - if isinstance(integerByte, SubstrateUnderrunError): - yield integerByte + try: + while True: + integerTag = oct2int(substrate[lengthOctetIdx]) + lengthOctetIdx += 1 + tagId <<= 7 + tagId |= (integerTag & 0x7F) + if not integerTag & 0x80: + break - if not integerByte: - raise error.SubstrateUnderrunError( - 'Short octet stream on long tag decoding' - ) + substrate = substrate[lengthOctetIdx:] - integerTag = ord(integerByte) - lengthOctetIdx += 1 - tagId <<= 7 - tagId |= (integerTag & 0x7F) - - if not integerTag & 0x80: - break + except IndexError: + raise error.SubstrateUnderrunError( + 'Short octet stream on long tag decoding' + ) lastTag = tag.Tag( tagClass=tagClass, tagFormat=tagFormat, tagId=tagId @@ -1653,22 +1403,21 @@ class SingleItemDecoder(object): if state is stDecodeLength: # Decode length - for firstOctet in readFromStream(substrate, 1, options): - if isinstance(firstOctet, SubstrateUnderrunError): - yield firstOctet + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on length decoding' + ) - firstOctet = ord(firstOctet) + firstOctet = oct2int(substrate[0]) if firstOctet < 128: + size = 1 length = firstOctet elif firstOctet > 128: size = firstOctet & 0x7F # encoded in size bytes - for encodedLength in readFromStream(substrate, size, options): - if isinstance(encodedLength, SubstrateUnderrunError): - yield encodedLength - encodedLength = list(encodedLength) + encodedLength = octs2ints(substrate[1:size + 1]) # missing check on maximum size, which shouldn't be a # problem, we can handle more than is possible if len(encodedLength) != size: @@ -1682,16 +1431,24 @@ class SingleItemDecoder(object): length |= lengthOctet size += 1 - else: # 128 means indefinite + else: + size = 1 length = -1 - if length == -1 and not self.supportIndefLength: - raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + substrate = substrate[size:] + + if length == -1: + if not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + + else: + if len(substrate) < length: + raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate))) state = stGetValueDecoder if LOG: - LOG('value length decoded into %d' % length) + LOG('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) if state is stGetValueDecoder: if asn1Spec is None: @@ -1808,46 +1565,28 @@ class SingleItemDecoder(object): if state is stDecodeValue: if not options.get('recursiveFlag', True) and not substrateFun: # deprecate this - def substrateFun(asn1Object, _substrate, _length, _options): - """Legacy hack to keep the recursiveFlag=False option supported. + substrateFun = lambda a, b, c: (a, b[:c]) - The decode(..., substrateFun=userCallback) option was introduced in 0.1.4 as a generalization - of the old recursiveFlag=False option. Users should pass their callback instead of using - recursiveFlag. - """ - yield asn1Object - - original_position = substrate.tell() + options.update(fullSubstrate=fullSubstrate) if length == -1: # indef length - for value in concreteDecoder.indefLenValueDecoder( - substrate, asn1Spec, - tagSet, length, stGetValueDecoder, - self, substrateFun, **options): - if isinstance(value, SubstrateUnderrunError): - yield value + value, substrate = concreteDecoder.indefLenValueDecoder( + substrate, asn1Spec, + tagSet, length, stGetValueDecoder, + self, substrateFun, + **options + ) else: - for value in concreteDecoder.valueDecoder( - substrate, asn1Spec, - tagSet, length, stGetValueDecoder, - self, substrateFun, **options): - if isinstance(value, SubstrateUnderrunError): - yield value - - bytesRead = substrate.tell() - original_position - if not substrateFun and bytesRead != length: - raise PyAsn1Error( - "Read %s bytes instead of expected %s." % (bytesRead, length)) - elif substrateFun and bytesRead > length: - # custom substrateFun may be used for partial decoding, reading less is expected there - raise PyAsn1Error( - "Read %s bytes are more than expected %s." % (bytesRead, length)) + value, substrate = concreteDecoder.valueDecoder( + substrate, asn1Spec, + tagSet, length, stGetValueDecoder, + self, substrateFun, + **options + ) if LOG: - LOG('codec %s yields type %s, value:\n%s\n...' % ( - concreteDecoder.__class__.__name__, value.__class__.__name__, - isinstance(value, base.Asn1Item) and value.prettyPrint() or value)) + LOG('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or '')) state = stStop break @@ -1857,7 +1596,7 @@ class SingleItemDecoder(object): tagSet[0].tagFormat == tag.tagFormatConstructed and tagSet[0].tagClass != tag.tagClassUniversal): # Assume explicit tagging - concreteDecoder = rawPayloadDecoder + concreteDecoder = explicitTagDecoder state = stDecodeValue else: @@ -1884,247 +1623,9 @@ class SingleItemDecoder(object): debug.scope.pop() LOG('decoder left scope %s, call completed' % debug.scope) - yield value + return value, substrate -class StreamingDecoder(object): - """Create an iterator that turns BER/CER/DER byte stream into ASN.1 objects. - - On each iteration, consume whatever BER/CER/DER serialization is - available in the `substrate` stream-like object and turns it into - one or more, possibly nested, ASN.1 objects. - - Parameters - ---------- - substrate: :py:class:`file`, :py:class:`io.BytesIO` - BER/CER/DER serialization in form of a byte stream - - Keyword Args - ------------ - asn1Spec: :py:class:`~pyasn1.type.base.PyAsn1Item` - A pyasn1 type object to act as a template guiding the decoder. - Depending on the ASN.1 structure being decoded, `asn1Spec` may - or may not be required. One of the reasons why `asn1Spec` may - me required is that ASN.1 structure is encoded in the *IMPLICIT* - tagging mode. - - Yields - ------ - : :py:class:`~pyasn1.type.base.PyAsn1Item`, :py:class:`~pyasn1.error.SubstrateUnderrunError` - Decoded ASN.1 object (possibly, nested) or - :py:class:`~pyasn1.error.SubstrateUnderrunError` object indicating - insufficient BER/CER/DER serialization on input to fully recover ASN.1 - objects from it. - - In the latter case the caller is advised to ensure some more data in - the input stream, then call the iterator again. The decoder will resume - the decoding process using the newly arrived data. - - The `context` property of :py:class:`~pyasn1.error.SubstrateUnderrunError` - object might hold a reference to the partially populated ASN.1 object - being reconstructed. - - Raises - ------ - ~pyasn1.error.PyAsn1Error, ~pyasn1.error.EndOfStreamError - `PyAsn1Error` on deserialization error, `EndOfStreamError` on - premature stream closure. - - Examples - -------- - Decode BER serialisation without ASN.1 schema - - .. code-block:: pycon - - >>> stream = io.BytesIO( - ... b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03') - >>> - >>> for asn1Object in StreamingDecoder(stream): - ... print(asn1Object) - >>> - SequenceOf: - 1 2 3 - - Decode BER serialisation with ASN.1 schema - - .. code-block:: pycon - - >>> stream = io.BytesIO( - ... b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03') - >>> - >>> schema = SequenceOf(componentType=Integer()) - >>> - >>> decoder = StreamingDecoder(stream, asn1Spec=schema) - >>> for asn1Object in decoder: - ... print(asn1Object) - >>> - SequenceOf: - 1 2 3 - """ - - SINGLE_ITEM_DECODER = SingleItemDecoder - - def __init__(self, substrate, asn1Spec=None, **options): - self._singleItemDecoder = self.SINGLE_ITEM_DECODER(**options) - self._substrate = asSeekableStream(substrate) - self._asn1Spec = asn1Spec - self._options = options - - def __iter__(self): - while True: - for asn1Object in self._singleItemDecoder( - self._substrate, self._asn1Spec, **self._options): - yield asn1Object - - for chunk in isEndOfStream(self._substrate): - if isinstance(chunk, SubstrateUnderrunError): - yield - - break - - if chunk: - break - - -class Decoder(object): - """Create a BER decoder object. - - Parse BER/CER/DER octet-stream into one, possibly nested, ASN.1 object. - """ - STREAMING_DECODER = StreamingDecoder - - @classmethod - def __call__(cls, substrate, asn1Spec=None, **options): - """Turns BER/CER/DER octet stream into an ASN.1 object. - - Takes BER/CER/DER octet-stream in form of :py:class:`bytes` - and decode it into an ASN.1 object - (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which - may be a scalar or an arbitrary nested structure. - - Parameters - ---------- - substrate: :py:class:`bytes` - BER/CER/DER octet-stream to parse - - Keyword Args - ------------ - asn1Spec: :py:class:`~pyasn1.type.base.PyAsn1Item` - A pyasn1 type object (:py:class:`~pyasn1.type.base.PyAsn1Item` - derivative) to act as a template guiding the decoder. - Depending on the ASN.1 structure being decoded, `asn1Spec` may or - may not be required. Most common reason for it to require is that - ASN.1 structure is encoded in *IMPLICIT* tagging mode. - - substrateFun: :py:class:`Union[ - Callable[[pyasn1.type.base.PyAsn1Item, bytes, int], - Tuple[pyasn1.type.base.PyAsn1Item, bytes]], - Callable[[pyasn1.type.base.PyAsn1Item, io.BytesIO, int, dict], - Generator[Union[pyasn1.type.base.PyAsn1Item, - pyasn1.error.SubstrateUnderrunError], - None, None]] - ]` - User callback meant to generalize special use cases like non-recursive or - partial decoding. A 3-arg non-streaming variant is supported for backwards - compatiblilty in addition to the newer 4-arg streaming variant. - The callback will receive the uninitialized object recovered from substrate - as 1st argument, the uninterpreted payload as 2nd argument, and the length - of the uninterpreted payload as 3rd argument. The streaming variant will - additionally receive the decode(..., **options) kwargs as 4th argument. - The non-streaming variant shall return an object that will be propagated - as decode() return value as 1st item, and the remainig payload for further - decode passes as 2nd item. - The streaming variant shall yield an object that will be propagated as - decode() return value, and leave the remaining payload in the stream. - - Returns - ------- - : :py:class:`tuple` - A tuple of :py:class:`~pyasn1.type.base.PyAsn1Item` object - recovered from BER/CER/DER substrate and the unprocessed trailing - portion of the `substrate` (may be empty) - - Raises - ------ - : :py:class:`~pyasn1.error.PyAsn1Error` - :py:class:`~pyasn1.error.SubstrateUnderrunError` on insufficient - input or :py:class:`~pyasn1.error.PyAsn1Error` on decoding error. - - Examples - -------- - Decode BER/CER/DER serialisation without ASN.1 schema - - .. code-block:: pycon - - >>> s, unprocessed = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03') - >>> str(s) - SequenceOf: - 1 2 3 - - Decode BER/CER/DER serialisation with ASN.1 schema - - .. code-block:: pycon - - >>> seq = SequenceOf(componentType=Integer()) - >>> s, unprocessed = decode( - b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq) - >>> str(s) - SequenceOf: - 1 2 3 - - """ - substrate = asSeekableStream(substrate) - - if "substrateFun" in options: - origSubstrateFun = options["substrateFun"] - - def substrateFunWrapper(asn1Object, substrate, length, options=None): - """Support both 0.4 and 0.5 style APIs. - - substrateFun API has changed in 0.5 for use with streaming decoders. To stay backwards compatible, - we first try if we received a streaming user callback. If that fails,we assume we've received a - non-streaming v0.4 user callback and convert it for streaming on the fly - """ - try: - substrate_gen = origSubstrateFun(asn1Object, substrate, length, options) - except TypeError as _value: - if _value.__traceback__.tb_next: - # Traceback depth > 1 means TypeError from inside user provided function - raise - # invariant maintained at Decoder.__call__ entry - assert isinstance(substrate, io.BytesIO) # nosec assert_used - substrate_gen = Decoder._callSubstrateFunV4asV5(origSubstrateFun, asn1Object, substrate, length) - for value in substrate_gen: - yield value - - options["substrateFun"] = substrateFunWrapper - - streamingDecoder = cls.STREAMING_DECODER( - substrate, asn1Spec, **options) - - for asn1Object in streamingDecoder: - if isinstance(asn1Object, SubstrateUnderrunError): - raise error.SubstrateUnderrunError('Short substrate on input') - - try: - tail = next(readFromStream(substrate)) - - except error.EndOfStreamError: - tail = b'' - - return asn1Object, tail - - @staticmethod - def _callSubstrateFunV4asV5(substrateFunV4, asn1Object, substrate, length): - substrate_bytes = substrate.read() - if length == -1: - length = len(substrate_bytes) - value, nextSubstrate = substrateFunV4(asn1Object, substrate_bytes, length) - nbytes = substrate.write(nextSubstrate) - substrate.truncate() - substrate.seek(-nbytes, os.SEEK_CUR) - yield value - #: Turns BER octet stream into an ASN.1 object. #: #: Takes BER octet-stream and decode it into an ASN.1 object @@ -2133,7 +1634,7 @@ class Decoder(object): #: #: Parameters #: ---------- -#: substrate: :py:class:`bytes` +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) #: BER octet-stream #: #: Keyword Args @@ -2154,11 +1655,6 @@ class Decoder(object): #: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError #: On decoding errors #: -#: Notes -#: ----- -#: This function is deprecated. Please use :py:class:`Decoder` or -#: :py:class:`StreamingDecoder` class instance. -#: #: Examples #: -------- #: Decode BER serialisation without ASN.1 schema @@ -2180,10 +1676,7 @@ class Decoder(object): #: SequenceOf: #: 1 2 3 #: -decode = Decoder() +decode = Decoder(tagMap, typeMap) -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +# XXX +# non-recursive decoding; return position rather than substrate diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/encoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/encoder.py index d16fb1f8..778aa867 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/encoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/encoder.py @@ -1,23 +1,23 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import sys -import warnings from pyasn1 import debug from pyasn1 import error from pyasn1.codec.ber import eoo -from pyasn1.compat import _MISSING from pyasn1.compat.integer import to_bytes +from pyasn1.compat.octets import (int2oct, oct2int, ints2octs, null, + str2octs, isOctetsType) from pyasn1.type import char from pyasn1.type import tag from pyasn1.type import univ from pyasn1.type import useful -__all__ = ['Encoder', 'encode'] +__all__ = ['encode'] LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER) @@ -27,7 +27,7 @@ class AbstractItemEncoder(object): # An outcome of otherwise legit call `encodeFun(eoo.endOfOctets)` eooIntegerSubstrate = (0, 0) - eooOctetsSubstrate = bytes(eooIntegerSubstrate) + eooOctetsSubstrate = ints2octs(eooIntegerSubstrate) # noinspection PyMethodMayBeStatic def encodeTag(self, singleTag, isConstructed): @@ -89,7 +89,7 @@ class AbstractItemEncoder(object): defMode = options.get('defMode', True) - substrate = b'' + substrate = null for idx, singleTag in enumerate(tagSet.superTags): @@ -102,9 +102,10 @@ class AbstractItemEncoder(object): value, asn1Spec, encodeFun, **options ) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: + exc = sys.exc_info() raise error.PyAsn1Error( - 'Error encoding %r: %s' % (value, exc)) + 'Error encoding %r: %s' % (value, exc[1])) if LOG: LOG('encoded %svalue %s into %s' % ( @@ -125,16 +126,16 @@ class AbstractItemEncoder(object): if LOG: LOG('encoded %stag %s into %s' % ( isConstructed and 'constructed ' or '', - singleTag, debug.hexdump(bytes(header)))) + singleTag, debug.hexdump(ints2octs(header)))) header += self.encodeLength(len(substrate), defModeOverride) if LOG: LOG('encoded %s octets (tag + payload) into %s' % ( - len(substrate), debug.hexdump(bytes(header)))) + len(substrate), debug.hexdump(ints2octs(header)))) if isOctets: - substrate = bytes(header) + substrate + substrate = ints2octs(header) + substrate if not defModeOverride: substrate += self.eooOctetsSubstrate @@ -146,14 +147,14 @@ class AbstractItemEncoder(object): substrate += self.eooIntegerSubstrate if not isOctets: - substrate = bytes(substrate) + substrate = ints2octs(substrate) return substrate class EndOfOctetsEncoder(AbstractItemEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): - return b'', False, True + return null, False, True class BooleanEncoder(AbstractItemEncoder): @@ -198,7 +199,7 @@ class BitStringEncoder(AbstractItemEncoder): maxChunkSize = options.get('maxChunkSize', 0) if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8: substrate = alignedValue.asOctets() - return bytes((len(substrate) * 8 - valueLength,)) + substrate, False, True + return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True if LOG: LOG('encoding into up to %s-octet chunks' % maxChunkSize) @@ -215,7 +216,7 @@ class BitStringEncoder(AbstractItemEncoder): alignedValue = alignedValue.clone(tagSet=tagSet) stop = 0 - substrate = b'' + substrate = null while stop < valueLength: start = stop stop = min(start + maxChunkSize * 8, valueLength) @@ -231,7 +232,7 @@ class OctetStringEncoder(AbstractItemEncoder): if asn1Spec is None: substrate = value.asOctets() - elif not isinstance(value, bytes): + elif not isOctetsType(value): substrate = asn1Spec.clone(value).asOctets() else: @@ -259,7 +260,7 @@ class OctetStringEncoder(AbstractItemEncoder): asn1Spec = value.clone(tagSet=tagSet) - elif not isinstance(value, bytes): + elif not isOctetsType(value): baseTag = asn1Spec.tagSet.baseTag # strip off explicit tags @@ -272,7 +273,7 @@ class OctetStringEncoder(AbstractItemEncoder): asn1Spec = asn1Spec.clone(tagSet=tagSet) pos = 0 - substrate = b'' + substrate = null while True: chunk = value[pos:pos + maxChunkSize] @@ -289,7 +290,7 @@ class NullEncoder(AbstractItemEncoder): supportIndefLenMode = False def encodeValue(self, value, asn1Spec, encodeFun, **options): - return b'', False, True + return null, False, True class ObjectIdentifierEncoder(AbstractItemEncoder): @@ -351,41 +352,8 @@ class ObjectIdentifierEncoder(AbstractItemEncoder): return octets, False, False -class RelativeOIDEncoder(AbstractItemEncoder): - supportIndefLenMode = False - - def encodeValue(self, value, asn1Spec, encodeFun, **options): - if asn1Spec is not None: - value = asn1Spec.clone(value) - - octets = () - - # Cycle through subIds - for subOid in value.asTuple(): - if 0 <= subOid <= 127: - # Optimize for the common case - octets += (subOid,) - - elif subOid > 127: - # Pack large Sub-Object IDs - res = (subOid & 0x7f,) - subOid >>= 7 - - while subOid: - res = (0x80 | (subOid & 0x7f),) + res - subOid >>= 7 - - # Add packed Sub-Object ID to resulted RELATIVE-OID - octets += res - - else: - raise error.PyAsn1Error('Negative RELATIVE-OID arc %s at %s' % (subOid, value)) - - return octets, False, False - - class RealEncoder(AbstractItemEncoder): - supportIndefLenMode = False + supportIndefLenMode = 0 binEncBase = 2 # set to None to choose encoding base automatically @staticmethod @@ -462,13 +430,13 @@ class RealEncoder(AbstractItemEncoder): m, b, e = value if not m: - return b'', False, True + return null, False, True if b == 10: if LOG: LOG('encoding REAL into character form') - return b'\x03%dE%s%d' % (m, e == 0 and b'+' or b'', e), False, True + return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True elif b == 2: fo = 0x80 # binary encoding @@ -505,20 +473,20 @@ class RealEncoder(AbstractItemEncoder): raise error.PyAsn1Error('Scale factor overflow') # bug if raised fo |= sf << 2 - eo = b'' + eo = null if e == 0 or e == -1: - eo = bytes((e & 0xff,)) + eo = int2oct(e & 0xff) else: while e not in (0, -1): - eo = bytes((e & 0xff,)) + eo + eo = int2oct(e & 0xff) + eo e >>= 8 - if e == 0 and eo and eo[0] & 0x80: - eo = bytes((0,)) + eo + if e == 0 and eo and oct2int(eo[0]) & 0x80: + eo = int2oct(0) + eo - if e == -1 and eo and not (eo[0] & 0x80): - eo = bytes((0xff,)) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): + eo = int2oct(0xff) + eo n = len(eo) if n > 0xff: @@ -535,15 +503,15 @@ class RealEncoder(AbstractItemEncoder): else: fo |= 3 - eo = bytes((n & 0xff,)) + eo + eo = int2oct(n & 0xff) + eo - po = b'' + po = null while m: - po = bytes((m & 0xff,)) + po + po = int2oct(m & 0xff) + po m >>= 8 - substrate = bytes((fo,)) + eo + po + substrate = int2oct(fo) + eo + po return substrate, False, True @@ -558,7 +526,7 @@ class SequenceEncoder(AbstractItemEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): - substrate = b'' + substrate = null omitEmptyOptionals = options.get( 'omitEmptyOptionals', self.omitEmptyOptionals) @@ -571,8 +539,7 @@ class SequenceEncoder(AbstractItemEncoder): # instance of ASN.1 schema inconsistency = value.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {value.__class__.__name__} is inconsistent") + raise inconsistency namedTypes = value.componentType @@ -680,8 +647,7 @@ class SequenceOfEncoder(AbstractItemEncoder): if asn1Spec is None: inconsistency = value.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {value.__class__.__name__} is inconsistent") + raise inconsistency else: asn1Spec = asn1Spec.componentType @@ -709,7 +675,7 @@ class SequenceOfEncoder(AbstractItemEncoder): chunks = self._encodeComponents( value, asn1Spec, encodeFun, **options) - return b''.join(chunks), True, True + return null.join(chunks), True, True class ChoiceEncoder(AbstractItemEncoder): @@ -734,13 +700,13 @@ class AnyEncoder(OctetStringEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): if asn1Spec is None: value = value.asOctets() - elif not isinstance(value, bytes): + elif not isOctetsType(value): value = asn1Spec.clone(value).asOctets() return value, not options.get('defMode', True), True -TAG_MAP = { +tagMap = { eoo.endOfOctets.tagSet: EndOfOctetsEncoder(), univ.Boolean.tagSet: BooleanEncoder(), univ.Integer.tagSet: IntegerEncoder(), @@ -748,7 +714,6 @@ TAG_MAP = { univ.OctetString.tagSet: OctetStringEncoder(), univ.Null.tagSet: NullEncoder(), univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), - univ.RelativeOID.tagSet: RelativeOIDEncoder(), univ.Enumerated.tagSet: IntegerEncoder(), univ.Real.tagSet: RealEncoder(), # Sequence & Set have same tags as SequenceOf & SetOf @@ -774,14 +739,13 @@ TAG_MAP = { } # Put in ambiguous & non-ambiguous types for faster codec lookup -TYPE_MAP = { +typeMap = { univ.Boolean.typeId: BooleanEncoder(), univ.Integer.typeId: IntegerEncoder(), univ.BitString.typeId: BitStringEncoder(), univ.OctetString.typeId: OctetStringEncoder(), univ.Null.typeId: NullEncoder(), univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(), - univ.RelativeOID.typeId: RelativeOIDEncoder(), univ.Enumerated.typeId: IntegerEncoder(), univ.Real.typeId: RealEncoder(), # Sequence & Set have same tags as SequenceOf & SetOf @@ -810,16 +774,14 @@ TYPE_MAP = { } -class SingleItemEncoder(object): +class Encoder(object): fixedDefLengthMode = None fixedChunkSize = None - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored): - self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP - self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap def __call__(self, value, asn1Spec=None, **options): try: @@ -833,11 +795,8 @@ class SingleItemEncoder(object): 'and "asn1Spec" not given' % (value,)) if LOG: - LOG('encoder called in %sdef mode, chunk size %s for type %s, ' - 'value:\n%s' % (not options.get('defMode', True) and 'in' or '', - options.get('maxChunkSize', 0), - asn1Spec is None and value.prettyPrintType() or - asn1Spec.prettyPrintType(), value)) + LOG('encoder called in %sdef mode, chunk size %s for ' + 'type %s, value:\n%s' % (not options.get('defMode', True) and 'in' or '', options.get('maxChunkSize', 0), asn1Spec is None and value.prettyPrintType() or asn1Spec.prettyPrintType(), value)) if self.fixedDefLengthMode is not None: options.update(defMode=self.fixedDefLengthMode) @@ -845,12 +804,12 @@ class SingleItemEncoder(object): if self.fixedChunkSize is not None: options.update(maxChunkSize=self.fixedChunkSize) + try: - concreteEncoder = self._typeMap[typeId] + concreteEncoder = self.__typeMap[typeId] if LOG: - LOG('using value codec %s chosen by type ID ' - '%s' % (concreteEncoder.__class__.__name__, typeId)) + LOG('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId)) except KeyError: if asn1Spec is None: @@ -862,38 +821,21 @@ class SingleItemEncoder(object): baseTagSet = tag.TagSet(tagSet.baseTag, tagSet.baseTag) try: - concreteEncoder = self._tagMap[baseTagSet] + concreteEncoder = self.__tagMap[baseTagSet] except KeyError: raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet)) if LOG: - LOG('using value codec %s chosen by tagSet ' - '%s' % (concreteEncoder.__class__.__name__, tagSet)) + LOG('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet)) substrate = concreteEncoder.encode(value, asn1Spec, self, **options) if LOG: - LOG('codec %s built %s octets of substrate: %s\nencoder ' - 'completed' % (concreteEncoder, len(substrate), - debug.hexdump(substrate))) + LOG('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate))) return substrate - -class Encoder(object): - SINGLE_ITEM_ENCODER = SingleItemEncoder - - def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **options): - self._singleItemEncoder = self.SINGLE_ITEM_ENCODER( - tagMap=tagMap, typeMap=typeMap, **options - ) - - def __call__(self, pyObject, asn1Spec=None, **options): - return self._singleItemEncoder( - pyObject, asn1Spec=asn1Spec, **options) - - #: Turns ASN.1 object into BER octet stream. #: #: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) @@ -918,7 +860,7 @@ class Encoder(object): #: #: Returns #: ------- -#: : :py:class:`bytes` +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) #: Given ASN.1 object encoded into BER octetstream #: #: Raises @@ -945,10 +887,4 @@ class Encoder(object): #: >>> encode(seq) #: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03' #: -encode = Encoder() - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +encode = Encoder(tagMap, typeMap) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/eoo.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/eoo.py index 8c91a3d2..48eb859e 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/eoo.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/ber/eoo.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # from pyasn1.type import base from pyasn1.type import tag diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/decoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/decoder.py index d6890f01..3e86fd0b 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/decoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/decoder.py @@ -1,93 +1,64 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -import warnings - from pyasn1 import error -from pyasn1.codec.streaming import readFromStream from pyasn1.codec.ber import decoder +from pyasn1.compat.octets import oct2int from pyasn1.type import univ -__all__ = ['decode', 'StreamingDecoder'] - -SubstrateUnderrunError = error.SubstrateUnderrunError +__all__ = ['decode'] -class BooleanPayloadDecoder(decoder.AbstractSimplePayloadDecoder): +class BooleanDecoder(decoder.AbstractSimpleDecoder): protoComponent = univ.Boolean(0) def valueDecoder(self, substrate, asn1Spec, tagSet=None, length=None, state=None, decodeFun=None, substrateFun=None, **options): - - if length != 1: + head, tail = substrate[:length], substrate[length:] + if not head or length != 1: raise error.PyAsn1Error('Not single-octet Boolean payload') - - for chunk in readFromStream(substrate, length, options): - if isinstance(chunk, SubstrateUnderrunError): - yield chunk - - byte = chunk[0] - + byte = oct2int(head[0]) # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 # in https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf if byte == 0xff: value = 1 - elif byte == 0x00: value = 0 - else: raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte) - - yield self._createComponent(asn1Spec, tagSet, value, **options) - + return self._createComponent(asn1Spec, tagSet, value, **options), tail # TODO: prohibit non-canonical encoding -BitStringPayloadDecoder = decoder.BitStringPayloadDecoder -OctetStringPayloadDecoder = decoder.OctetStringPayloadDecoder -RealPayloadDecoder = decoder.RealPayloadDecoder +BitStringDecoder = decoder.BitStringDecoder +OctetStringDecoder = decoder.OctetStringDecoder +RealDecoder = decoder.RealDecoder -TAG_MAP = decoder.TAG_MAP.copy() -TAG_MAP.update( - {univ.Boolean.tagSet: BooleanPayloadDecoder(), - univ.BitString.tagSet: BitStringPayloadDecoder(), - univ.OctetString.tagSet: OctetStringPayloadDecoder(), - univ.Real.tagSet: RealPayloadDecoder()} +tagMap = decoder.tagMap.copy() +tagMap.update( + {univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Real.tagSet: RealDecoder()} ) -TYPE_MAP = decoder.TYPE_MAP.copy() +typeMap = decoder.typeMap.copy() # Put in non-ambiguous types for faster codec lookup -for typeDecoder in TAG_MAP.values(): +for typeDecoder in tagMap.values(): if typeDecoder.protoComponent is not None: typeId = typeDecoder.protoComponent.__class__.typeId - if typeId is not None and typeId not in TYPE_MAP: - TYPE_MAP[typeId] = typeDecoder - - -class SingleItemDecoder(decoder.SingleItemDecoder): - __doc__ = decoder.SingleItemDecoder.__doc__ - - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - -class StreamingDecoder(decoder.StreamingDecoder): - __doc__ = decoder.StreamingDecoder.__doc__ - - SINGLE_ITEM_DECODER = SingleItemDecoder + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder class Decoder(decoder.Decoder): - __doc__ = decoder.Decoder.__doc__ - - STREAMING_DECODER = StreamingDecoder + pass #: Turns CER octet stream into an ASN.1 object. @@ -98,7 +69,7 @@ class Decoder(decoder.Decoder): #: #: Parameters #: ---------- -#: substrate: :py:class:`bytes` +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) #: CER octet-stream #: #: Keyword Args @@ -140,10 +111,4 @@ class Decoder(decoder.Decoder): #: SequenceOf: #: 1 2 3 #: -decode = Decoder() - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +decode = Decoder(tagMap, decoder.typeMap) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/encoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/encoder.py index e16e9ece..935b6965 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/encoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/cer/encoder.py @@ -1,17 +1,16 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -import warnings - from pyasn1 import error from pyasn1.codec.ber import encoder +from pyasn1.compat.octets import str2octs, null from pyasn1.type import univ from pyasn1.type import useful -__all__ = ['Encoder', 'encode'] +__all__ = ['encode'] class BooleanEncoder(encoder.IntegerEncoder): @@ -117,7 +116,7 @@ class SetOfEncoder(encoder.SequenceOfEncoder): # sort by serialised and padded components if len(chunks) > 1: - zero = b'\x00' + zero = str2octs('\x00') maxLen = max(map(len, chunks)) paddedChunks = [ (x.ljust(maxLen, zero), x) for x in chunks @@ -126,19 +125,19 @@ class SetOfEncoder(encoder.SequenceOfEncoder): chunks = [x[1] for x in paddedChunks] - return b''.join(chunks), True, True + return null.join(chunks), True, True class SequenceOfEncoder(encoder.SequenceOfEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): if options.get('ifNotEmpty', False) and not len(value): - return b'', True, True + return null, True, True chunks = self._encodeComponents( value, asn1Spec, encodeFun, **options) - return b''.join(chunks), True, True + return null.join(chunks), True, True class SetEncoder(encoder.SequenceEncoder): @@ -163,7 +162,7 @@ class SetEncoder(encoder.SequenceEncoder): def encodeValue(self, value, asn1Spec, encodeFun, **options): - substrate = b'' + substrate = null comps = [] compsMap = {} @@ -172,8 +171,7 @@ class SetEncoder(encoder.SequenceEncoder): # instance of ASN.1 schema inconsistency = value.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {value.__class__.__name__} is inconsistent") + raise inconsistency namedTypes = value.componentType @@ -236,9 +234,8 @@ class SequenceEncoder(encoder.SequenceEncoder): omitEmptyOptionals = True -TAG_MAP = encoder.TAG_MAP.copy() - -TAG_MAP.update({ +tagMap = encoder.tagMap.copy() +tagMap.update({ univ.Boolean.tagSet: BooleanEncoder(), univ.Real.tagSet: RealEncoder(), useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(), @@ -248,9 +245,8 @@ TAG_MAP.update({ univ.Sequence.typeId: SequenceEncoder() }) -TYPE_MAP = encoder.TYPE_MAP.copy() - -TYPE_MAP.update({ +typeMap = encoder.typeMap.copy() +typeMap.update({ univ.Boolean.typeId: BooleanEncoder(), univ.Real.typeId: RealEncoder(), useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(), @@ -263,18 +259,10 @@ TYPE_MAP.update({ }) -class SingleItemEncoder(encoder.SingleItemEncoder): +class Encoder(encoder.Encoder): fixedDefLengthMode = False fixedChunkSize = 1000 - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - -class Encoder(encoder.Encoder): - SINGLE_ITEM_ENCODER = SingleItemEncoder - - #: Turns ASN.1 object into CER octet stream. #: #: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) @@ -293,7 +281,7 @@ class Encoder(encoder.Encoder): #: #: Returns #: ------- -#: : :py:class:`bytes` +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) #: Given ASN.1 object encoded into BER octet-stream #: #: Raises @@ -320,12 +308,6 @@ class Encoder(encoder.Encoder): #: >>> encode(seq) #: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00' #: -encode = Encoder() +encode = Encoder(tagMap, typeMap) # EncoderFactory queries class instance and builds a map of tags -> encoders - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/der/decoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/der/decoder.py index d31a8565..1a13fdb5 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/der/decoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/der/decoder.py @@ -1,64 +1,44 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -import warnings - from pyasn1.codec.cer import decoder from pyasn1.type import univ -__all__ = ['decode', 'StreamingDecoder'] +__all__ = ['decode'] -class BitStringPayloadDecoder(decoder.BitStringPayloadDecoder): +class BitStringDecoder(decoder.BitStringDecoder): supportConstructedForm = False -class OctetStringPayloadDecoder(decoder.OctetStringPayloadDecoder): +class OctetStringDecoder(decoder.OctetStringDecoder): supportConstructedForm = False - # TODO: prohibit non-canonical encoding -RealPayloadDecoder = decoder.RealPayloadDecoder +RealDecoder = decoder.RealDecoder -TAG_MAP = decoder.TAG_MAP.copy() -TAG_MAP.update( - {univ.BitString.tagSet: BitStringPayloadDecoder(), - univ.OctetString.tagSet: OctetStringPayloadDecoder(), - univ.Real.tagSet: RealPayloadDecoder()} +tagMap = decoder.tagMap.copy() +tagMap.update( + {univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Real.tagSet: RealDecoder()} ) -TYPE_MAP = decoder.TYPE_MAP.copy() +typeMap = decoder.typeMap.copy() # Put in non-ambiguous types for faster codec lookup -for typeDecoder in TAG_MAP.values(): +for typeDecoder in tagMap.values(): if typeDecoder.protoComponent is not None: typeId = typeDecoder.protoComponent.__class__.typeId - if typeId is not None and typeId not in TYPE_MAP: - TYPE_MAP[typeId] = typeDecoder - - -class SingleItemDecoder(decoder.SingleItemDecoder): - __doc__ = decoder.SingleItemDecoder.__doc__ - - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - supportIndefLength = False - - -class StreamingDecoder(decoder.StreamingDecoder): - __doc__ = decoder.StreamingDecoder.__doc__ - - SINGLE_ITEM_DECODER = SingleItemDecoder + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder class Decoder(decoder.Decoder): - __doc__ = decoder.Decoder.__doc__ - - STREAMING_DECODER = StreamingDecoder + supportIndefLength = False #: Turns DER octet stream into an ASN.1 object. @@ -69,7 +49,7 @@ class Decoder(decoder.Decoder): #: #: Parameters #: ---------- -#: substrate: :py:class:`bytes` +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) #: DER octet-stream #: #: Keyword Args @@ -111,10 +91,4 @@ class Decoder(decoder.Decoder): #: SequenceOf: #: 1 2 3 #: -decode = Decoder() - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +decode = Decoder(tagMap, typeMap) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/der/encoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/der/encoder.py index f9a36102..90e982da 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/der/encoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/der/encoder.py @@ -1,16 +1,14 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -import warnings - from pyasn1 import error from pyasn1.codec.cer import encoder from pyasn1.type import univ -__all__ = ['Encoder', 'encode'] +__all__ = ['encode'] class SetEncoder(encoder.SetEncoder): @@ -44,34 +42,23 @@ class SetEncoder(encoder.SetEncoder): else: return compType.tagSet - -TAG_MAP = encoder.TAG_MAP.copy() - -TAG_MAP.update({ +tagMap = encoder.tagMap.copy() +tagMap.update({ # Set & SetOf have same tags univ.Set.tagSet: SetEncoder() }) -TYPE_MAP = encoder.TYPE_MAP.copy() - -TYPE_MAP.update({ +typeMap = encoder.typeMap.copy() +typeMap.update({ # Set & SetOf have same tags univ.Set.typeId: SetEncoder() }) -class SingleItemEncoder(encoder.SingleItemEncoder): +class Encoder(encoder.Encoder): fixedDefLengthMode = True fixedChunkSize = 0 - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - -class Encoder(encoder.Encoder): - SINGLE_ITEM_ENCODER = SingleItemEncoder - - #: Turns ASN.1 object into DER octet stream. #: #: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) @@ -90,7 +77,7 @@ class Encoder(encoder.Encoder): #: #: Returns #: ------- -#: : :py:class:`bytes` +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) #: Given ASN.1 object encoded into BER octet-stream #: #: Raises @@ -117,10 +104,4 @@ class Encoder(encoder.Encoder): #: >>> encode(seq) #: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03' #: -encode = Encoder() - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +encode = Encoder(tagMap, typeMap) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/native/decoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/native/decoder.py index 9ac01ff6..104b92e6 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/native/decoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/native/decoder.py @@ -1,14 +1,11 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -import warnings - from pyasn1 import debug from pyasn1 import error -from pyasn1.compat import _MISSING from pyasn1.type import base from pyasn1.type import char from pyasn1.type import tag @@ -20,17 +17,17 @@ __all__ = ['decode'] LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER) -class AbstractScalarPayloadDecoder(object): +class AbstractScalarDecoder(object): def __call__(self, pyObject, asn1Spec, decodeFun=None, **options): return asn1Spec.clone(pyObject) -class BitStringPayloadDecoder(AbstractScalarPayloadDecoder): +class BitStringDecoder(AbstractScalarDecoder): def __call__(self, pyObject, asn1Spec, decodeFun=None, **options): return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject)) -class SequenceOrSetPayloadDecoder(object): +class SequenceOrSetDecoder(object): def __call__(self, pyObject, asn1Spec, decodeFun=None, **options): asn1Value = asn1Spec.clone() @@ -43,7 +40,7 @@ class SequenceOrSetPayloadDecoder(object): return asn1Value -class SequenceOfOrSetOfPayloadDecoder(object): +class SequenceOfOrSetOfDecoder(object): def __call__(self, pyObject, asn1Spec, decodeFun=None, **options): asn1Value = asn1Spec.clone() @@ -53,7 +50,7 @@ class SequenceOfOrSetOfPayloadDecoder(object): return asn1Value -class ChoicePayloadDecoder(object): +class ChoiceDecoder(object): def __call__(self, pyObject, asn1Spec, decodeFun=None, **options): asn1Value = asn1Spec.clone() @@ -67,134 +64,112 @@ class ChoicePayloadDecoder(object): return asn1Value -TAG_MAP = { - univ.Integer.tagSet: AbstractScalarPayloadDecoder(), - univ.Boolean.tagSet: AbstractScalarPayloadDecoder(), - univ.BitString.tagSet: BitStringPayloadDecoder(), - univ.OctetString.tagSet: AbstractScalarPayloadDecoder(), - univ.Null.tagSet: AbstractScalarPayloadDecoder(), - univ.ObjectIdentifier.tagSet: AbstractScalarPayloadDecoder(), - univ.RelativeOID.tagSet: AbstractScalarPayloadDecoder(), - univ.Enumerated.tagSet: AbstractScalarPayloadDecoder(), - univ.Real.tagSet: AbstractScalarPayloadDecoder(), - univ.Sequence.tagSet: SequenceOrSetPayloadDecoder(), # conflicts with SequenceOf - univ.Set.tagSet: SequenceOrSetPayloadDecoder(), # conflicts with SetOf - univ.Choice.tagSet: ChoicePayloadDecoder(), # conflicts with Any +tagMap = { + univ.Integer.tagSet: AbstractScalarDecoder(), + univ.Boolean.tagSet: AbstractScalarDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: AbstractScalarDecoder(), + univ.Null.tagSet: AbstractScalarDecoder(), + univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(), + univ.Enumerated.tagSet: AbstractScalarDecoder(), + univ.Real.tagSet: AbstractScalarDecoder(), + univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any # character string types - char.UTF8String.tagSet: AbstractScalarPayloadDecoder(), - char.NumericString.tagSet: AbstractScalarPayloadDecoder(), - char.PrintableString.tagSet: AbstractScalarPayloadDecoder(), - char.TeletexString.tagSet: AbstractScalarPayloadDecoder(), - char.VideotexString.tagSet: AbstractScalarPayloadDecoder(), - char.IA5String.tagSet: AbstractScalarPayloadDecoder(), - char.GraphicString.tagSet: AbstractScalarPayloadDecoder(), - char.VisibleString.tagSet: AbstractScalarPayloadDecoder(), - char.GeneralString.tagSet: AbstractScalarPayloadDecoder(), - char.UniversalString.tagSet: AbstractScalarPayloadDecoder(), - char.BMPString.tagSet: AbstractScalarPayloadDecoder(), + char.UTF8String.tagSet: AbstractScalarDecoder(), + char.NumericString.tagSet: AbstractScalarDecoder(), + char.PrintableString.tagSet: AbstractScalarDecoder(), + char.TeletexString.tagSet: AbstractScalarDecoder(), + char.VideotexString.tagSet: AbstractScalarDecoder(), + char.IA5String.tagSet: AbstractScalarDecoder(), + char.GraphicString.tagSet: AbstractScalarDecoder(), + char.VisibleString.tagSet: AbstractScalarDecoder(), + char.GeneralString.tagSet: AbstractScalarDecoder(), + char.UniversalString.tagSet: AbstractScalarDecoder(), + char.BMPString.tagSet: AbstractScalarDecoder(), # useful types - useful.ObjectDescriptor.tagSet: AbstractScalarPayloadDecoder(), - useful.GeneralizedTime.tagSet: AbstractScalarPayloadDecoder(), - useful.UTCTime.tagSet: AbstractScalarPayloadDecoder() + useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(), + useful.GeneralizedTime.tagSet: AbstractScalarDecoder(), + useful.UTCTime.tagSet: AbstractScalarDecoder() } # Put in ambiguous & non-ambiguous types for faster codec lookup -TYPE_MAP = { - univ.Integer.typeId: AbstractScalarPayloadDecoder(), - univ.Boolean.typeId: AbstractScalarPayloadDecoder(), - univ.BitString.typeId: BitStringPayloadDecoder(), - univ.OctetString.typeId: AbstractScalarPayloadDecoder(), - univ.Null.typeId: AbstractScalarPayloadDecoder(), - univ.ObjectIdentifier.typeId: AbstractScalarPayloadDecoder(), - univ.RelativeOID.typeId: AbstractScalarPayloadDecoder(), - univ.Enumerated.typeId: AbstractScalarPayloadDecoder(), - univ.Real.typeId: AbstractScalarPayloadDecoder(), +typeMap = { + univ.Integer.typeId: AbstractScalarDecoder(), + univ.Boolean.typeId: AbstractScalarDecoder(), + univ.BitString.typeId: BitStringDecoder(), + univ.OctetString.typeId: AbstractScalarDecoder(), + univ.Null.typeId: AbstractScalarDecoder(), + univ.ObjectIdentifier.typeId: AbstractScalarDecoder(), + univ.Enumerated.typeId: AbstractScalarDecoder(), + univ.Real.typeId: AbstractScalarDecoder(), # ambiguous base types - univ.Set.typeId: SequenceOrSetPayloadDecoder(), - univ.SetOf.typeId: SequenceOfOrSetOfPayloadDecoder(), - univ.Sequence.typeId: SequenceOrSetPayloadDecoder(), - univ.SequenceOf.typeId: SequenceOfOrSetOfPayloadDecoder(), - univ.Choice.typeId: ChoicePayloadDecoder(), - univ.Any.typeId: AbstractScalarPayloadDecoder(), + univ.Set.typeId: SequenceOrSetDecoder(), + univ.SetOf.typeId: SequenceOfOrSetOfDecoder(), + univ.Sequence.typeId: SequenceOrSetDecoder(), + univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AbstractScalarDecoder(), # character string types - char.UTF8String.typeId: AbstractScalarPayloadDecoder(), - char.NumericString.typeId: AbstractScalarPayloadDecoder(), - char.PrintableString.typeId: AbstractScalarPayloadDecoder(), - char.TeletexString.typeId: AbstractScalarPayloadDecoder(), - char.VideotexString.typeId: AbstractScalarPayloadDecoder(), - char.IA5String.typeId: AbstractScalarPayloadDecoder(), - char.GraphicString.typeId: AbstractScalarPayloadDecoder(), - char.VisibleString.typeId: AbstractScalarPayloadDecoder(), - char.GeneralString.typeId: AbstractScalarPayloadDecoder(), - char.UniversalString.typeId: AbstractScalarPayloadDecoder(), - char.BMPString.typeId: AbstractScalarPayloadDecoder(), + char.UTF8String.typeId: AbstractScalarDecoder(), + char.NumericString.typeId: AbstractScalarDecoder(), + char.PrintableString.typeId: AbstractScalarDecoder(), + char.TeletexString.typeId: AbstractScalarDecoder(), + char.VideotexString.typeId: AbstractScalarDecoder(), + char.IA5String.typeId: AbstractScalarDecoder(), + char.GraphicString.typeId: AbstractScalarDecoder(), + char.VisibleString.typeId: AbstractScalarDecoder(), + char.GeneralString.typeId: AbstractScalarDecoder(), + char.UniversalString.typeId: AbstractScalarDecoder(), + char.BMPString.typeId: AbstractScalarDecoder(), # useful types - useful.ObjectDescriptor.typeId: AbstractScalarPayloadDecoder(), - useful.GeneralizedTime.typeId: AbstractScalarPayloadDecoder(), - useful.UTCTime.typeId: AbstractScalarPayloadDecoder() + useful.ObjectDescriptor.typeId: AbstractScalarDecoder(), + useful.GeneralizedTime.typeId: AbstractScalarDecoder(), + useful.UTCTime.typeId: AbstractScalarDecoder() } -class SingleItemDecoder(object): +class Decoder(object): - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored): - self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP - self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap): + self.__tagMap = tagMap + self.__typeMap = typeMap def __call__(self, pyObject, asn1Spec, **options): if LOG: debug.scope.push(type(pyObject).__name__) - LOG('decoder called at scope %s, working with ' - 'type %s' % (debug.scope, type(pyObject).__name__)) + LOG('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__)) if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item): - raise error.PyAsn1Error( - 'asn1Spec is not valid (should be an instance of an ASN.1 ' - 'Item, not %s)' % asn1Spec.__class__.__name__) + raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__) try: - valueDecoder = self._typeMap[asn1Spec.typeId] + valueDecoder = self.__typeMap[asn1Spec.typeId] except KeyError: # use base type for codec lookup to recover untagged types baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag) try: - valueDecoder = self._tagMap[baseTagSet] - + valueDecoder = self.__tagMap[baseTagSet] except KeyError: raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet) if LOG: - LOG('calling decoder %s on Python type %s ' - '<%s>' % (type(valueDecoder).__name__, - type(pyObject).__name__, repr(pyObject))) + LOG('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject))) value = valueDecoder(pyObject, asn1Spec, self, **options) if LOG: - LOG('decoder %s produced ASN.1 type %s ' - '<%s>' % (type(valueDecoder).__name__, - type(value).__name__, repr(value))) + LOG('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value))) debug.scope.pop() return value -class Decoder(object): - SINGLE_ITEM_DECODER = SingleItemDecoder - - def __init__(self, **options): - self._singleItemDecoder = self.SINGLE_ITEM_DECODER(**options) - - def __call__(self, pyObject, asn1Spec=None, **kwargs): - return self._singleItemDecoder(pyObject, asn1Spec=asn1Spec, **kwargs) - - #: Turns Python objects of built-in types into ASN.1 objects. #: #: Takes Python objects of built-in types and turns them into a tree of @@ -235,10 +210,4 @@ class Decoder(object): #: SequenceOf: #: 1 2 3 #: -decode = Decoder() - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +decode = Decoder(tagMap, typeMap) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/native/encoder.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/native/encoder.py index 6219ae45..4318abde 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/native/encoder.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/codec/native/encoder.py @@ -1,15 +1,17 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -from collections import OrderedDict -import warnings +try: + from collections import OrderedDict + +except ImportError: + OrderedDict = dict from pyasn1 import debug from pyasn1 import error -from pyasn1.compat import _MISSING from pyasn1.type import base from pyasn1.type import char from pyasn1.type import tag @@ -61,11 +63,6 @@ class ObjectIdentifierEncoder(AbstractItemEncoder): return str(value) -class RelativeOIDEncoder(AbstractItemEncoder): - def encode(self, value, encodeFun, **options): - return str(value) - - class RealEncoder(AbstractItemEncoder): def encode(self, value, encodeFun, **options): return float(value) @@ -77,8 +74,7 @@ class SetEncoder(AbstractItemEncoder): def encode(self, value, encodeFun, **options): inconsistency = value.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {value.__class__.__name__} is inconsistent") + raise inconsistency namedTypes = value.componentType substrate = self.protoDict() @@ -98,8 +94,7 @@ class SequenceOfEncoder(AbstractItemEncoder): def encode(self, value, encodeFun, **options): inconsistency = value.isInconsistent if inconsistency: - raise error.PyAsn1Error( - f"ASN.1 object {value.__class__.__name__} is inconsistent") + raise inconsistency return [encodeFun(x, **options) for x in value] @@ -112,14 +107,13 @@ class AnyEncoder(AbstractItemEncoder): return value.asOctets() -TAG_MAP = { +tagMap = { univ.Boolean.tagSet: BooleanEncoder(), univ.Integer.tagSet: IntegerEncoder(), univ.BitString.tagSet: BitStringEncoder(), univ.OctetString.tagSet: OctetStringEncoder(), univ.Null.tagSet: NullEncoder(), univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), - univ.RelativeOID.tagSet: RelativeOIDEncoder(), univ.Enumerated.tagSet: IntegerEncoder(), univ.Real.tagSet: RealEncoder(), # Sequence & Set have same tags as SequenceOf & SetOf @@ -144,15 +138,15 @@ TAG_MAP = { useful.UTCTime.tagSet: OctetStringEncoder() } + # Put in ambiguous & non-ambiguous types for faster codec lookup -TYPE_MAP = { +typeMap = { univ.Boolean.typeId: BooleanEncoder(), univ.Integer.typeId: IntegerEncoder(), univ.BitString.typeId: BitStringEncoder(), univ.OctetString.typeId: OctetStringEncoder(), univ.Null.typeId: NullEncoder(), univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(), - univ.RelativeOID.typeId: RelativeOIDEncoder(), univ.Enumerated.typeId: IntegerEncoder(), univ.Real.typeId: RealEncoder(), # Sequence & Set have same tags as SequenceOf & SetOf @@ -181,66 +175,48 @@ TYPE_MAP = { } -class SingleItemEncoder(object): +class Encoder(object): - TAG_MAP = TAG_MAP - TYPE_MAP = TYPE_MAP - - def __init__(self, tagMap=_MISSING, typeMap=_MISSING, **ignored): - self._tagMap = tagMap if tagMap is not _MISSING else self.TAG_MAP - self._typeMap = typeMap if typeMap is not _MISSING else self.TYPE_MAP + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap def __call__(self, value, **options): if not isinstance(value, base.Asn1Item): - raise error.PyAsn1Error( - 'value is not valid (should be an instance of an ASN.1 Item)') + raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)') if LOG: debug.scope.push(type(value).__name__) - LOG('encoder called for type %s ' - '<%s>' % (type(value).__name__, value.prettyPrint())) + LOG('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint())) tagSet = value.tagSet try: - concreteEncoder = self._typeMap[value.typeId] + concreteEncoder = self.__typeMap[value.typeId] except KeyError: # use base type for codec lookup to recover untagged types - baseTagSet = tag.TagSet( - value.tagSet.baseTag, value.tagSet.baseTag) + baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag) try: - concreteEncoder = self._tagMap[baseTagSet] + concreteEncoder = self.__tagMap[baseTagSet] except KeyError: raise error.PyAsn1Error('No encoder for %s' % (value,)) if LOG: - LOG('using value codec %s chosen by ' - '%s' % (concreteEncoder.__class__.__name__, tagSet)) + LOG('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) pyObject = concreteEncoder.encode(value, self, **options) if LOG: - LOG('encoder %s produced: ' - '%s' % (type(concreteEncoder).__name__, repr(pyObject))) + LOG('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject))) debug.scope.pop() return pyObject -class Encoder(object): - SINGLE_ITEM_ENCODER = SingleItemEncoder - - def __init__(self, **options): - self._singleItemEncoder = self.SINGLE_ITEM_ENCODER(**options) - - def __call__(self, pyObject, asn1Spec=None, **options): - return self._singleItemEncoder( - pyObject, asn1Spec=asn1Spec, **options) - - #: Turns ASN.1 object into a Python built-in type object(s). #: #: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) @@ -248,7 +224,8 @@ class Encoder(object): #: of those. #: #: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict` -#: is used to preserve ordering of the components in ASN.1 SEQUENCE. +#: can be produced (whenever available) to preserve ordering of the components +#: in ASN.1 SEQUENCE. #: #: Parameters #: ---------- @@ -276,10 +253,4 @@ class Encoder(object): #: >>> encode(seq) #: [1, 2, 3] #: -encode = SingleItemEncoder() - -def __getattr__(attr: str): - if newAttr := {"tagMap": "TAG_MAP", "typeMap": "TYPE_MAP"}.get(attr): - warnings.warn(f"{attr} is deprecated. Please use {newAttr} instead.", DeprecationWarning) - return globals()[newAttr] - raise AttributeError(attr) +encode = Encoder(tagMap, typeMap) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/codec/streaming.py b/.venv/lib/python3.10/site-packages/pyasn1/codec/streaming.py deleted file mode 100644 index c35f2489..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1/codec/streaming.py +++ /dev/null @@ -1,234 +0,0 @@ -# -# This file is part of pyasn1 software. -# -# Copyright (c) 2005-2019, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html -# -import io -import os - -from pyasn1 import error -from pyasn1.type import univ - -class CachingStreamWrapper(io.IOBase): - """Wrapper around non-seekable streams. - - Note that the implementation is tied to the decoder, - not checking for dangerous arguments for the sake - of performance. - - The read bytes are kept in an internal cache until - setting _markedPosition which may reset the cache. - """ - def __init__(self, raw): - self._raw = raw - self._cache = io.BytesIO() - self._markedPosition = 0 - - def peek(self, n): - result = self.read(n) - self._cache.seek(-len(result), os.SEEK_CUR) - return result - - def seekable(self): - return True - - def seek(self, n=-1, whence=os.SEEK_SET): - # Note that this not safe for seeking forward. - return self._cache.seek(n, whence) - - def read(self, n=-1): - read_from_cache = self._cache.read(n) - if n != -1: - n -= len(read_from_cache) - if not n: # 0 bytes left to read - return read_from_cache - - read_from_raw = self._raw.read(n) - - self._cache.write(read_from_raw) - - return read_from_cache + read_from_raw - - @property - def markedPosition(self): - """Position where the currently processed element starts. - - This is used for back-tracking in SingleItemDecoder.__call__ - and (indefLen)ValueDecoder and should not be used for other purposes. - The client is not supposed to ever seek before this position. - """ - return self._markedPosition - - @markedPosition.setter - def markedPosition(self, value): - # By setting the value, we ensure we won't seek back before it. - # `value` should be the same as the current position - # We don't check for this for performance reasons. - self._markedPosition = value - - # Whenever we set _marked_position, we know for sure - # that we will not return back, and thus it is - # safe to drop all cached data. - if self._cache.tell() > io.DEFAULT_BUFFER_SIZE: - self._cache = io.BytesIO(self._cache.read()) - self._markedPosition = 0 - - def tell(self): - return self._cache.tell() - - -def asSeekableStream(substrate): - """Convert object to seekable byte-stream. - - Parameters - ---------- - substrate: :py:class:`bytes` or :py:class:`io.IOBase` or :py:class:`univ.OctetString` - - Returns - ------- - : :py:class:`io.IOBase` - - Raises - ------ - : :py:class:`~pyasn1.error.PyAsn1Error` - If the supplied substrate cannot be converted to a seekable stream. - """ - if isinstance(substrate, io.BytesIO): - return substrate - - elif isinstance(substrate, bytes): - return io.BytesIO(substrate) - - elif isinstance(substrate, univ.OctetString): - return io.BytesIO(substrate.asOctets()) - - try: - if substrate.seekable(): # Will fail for most invalid types - return substrate - else: - return CachingStreamWrapper(substrate) - - except AttributeError: - raise error.UnsupportedSubstrateError( - "Cannot convert " + substrate.__class__.__name__ + - " to a seekable bit stream.") - - -def isEndOfStream(substrate): - """Check whether we have reached the end of a stream. - - Although it is more effective to read and catch exceptions, this - function - - Parameters - ---------- - substrate: :py:class:`IOBase` - Stream to check - - Returns - ------- - : :py:class:`bool` - """ - if isinstance(substrate, io.BytesIO): - cp = substrate.tell() - substrate.seek(0, os.SEEK_END) - result = substrate.tell() == cp - substrate.seek(cp, os.SEEK_SET) - yield result - - else: - received = substrate.read(1) - if received is None: - yield - - if received: - substrate.seek(-1, os.SEEK_CUR) - - yield not received - - -def peekIntoStream(substrate, size=-1): - """Peek into stream. - - Parameters - ---------- - substrate: :py:class:`IOBase` - Stream to read from. - - size: :py:class:`int` - How many bytes to peek (-1 = all available) - - Returns - ------- - : :py:class:`bytes` or :py:class:`str` - The return type depends on Python major version - """ - if hasattr(substrate, "peek"): - received = substrate.peek(size) - if received is None: - yield - - while len(received) < size: - yield - - yield received - - else: - current_position = substrate.tell() - try: - for chunk in readFromStream(substrate, size): - yield chunk - - finally: - substrate.seek(current_position) - - -def readFromStream(substrate, size=-1, context=None): - """Read from the stream. - - Parameters - ---------- - substrate: :py:class:`IOBase` - Stream to read from. - - Keyword parameters - ------------------ - size: :py:class:`int` - How many bytes to read (-1 = all available) - - context: :py:class:`dict` - Opaque caller context will be attached to exception objects created - by this function. - - Yields - ------ - : :py:class:`bytes` or :py:class:`str` or :py:class:`SubstrateUnderrunError` - Read data or :py:class:`~pyasn1.error.SubstrateUnderrunError` - object if no `size` bytes is readily available in the stream. The - data type depends on Python major version - - Raises - ------ - : :py:class:`~pyasn1.error.EndOfStreamError` - Input stream is exhausted - """ - while True: - # this will block unless stream is non-blocking - received = substrate.read(size) - if received is None: # non-blocking stream can do this - yield error.SubstrateUnderrunError(context=context) - - elif not received and size != 0: # end-of-stream - raise error.EndOfStreamError(context=context) - - elif len(received) < size: - substrate.seek(-len(received), os.SEEK_CUR) - - # behave like a non-blocking stream - yield error.SubstrateUnderrunError(context=context) - - else: - break - - yield received diff --git a/.venv/lib/python3.10/site-packages/pyasn1/compat/__init__.py b/.venv/lib/python3.10/site-packages/pyasn1/compat/__init__.py index d3e676ac..8c3066b2 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/compat/__init__.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/compat/__init__.py @@ -1,4 +1 @@ # This file is necessary to make this directory a package. - -# sentinal for missing argument -_MISSING = object() diff --git a/.venv/lib/python3.10/site-packages/pyasn1/compat/integer.py b/.venv/lib/python3.10/site-packages/pyasn1/compat/integer.py index 306210e5..4b31791d 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/compat/integer.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/compat/integer.py @@ -1,13 +1,110 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # -def to_bytes(value, signed=False, length=0): - length = max(value.bit_length(), length) +import sys - if signed and length % 8 == 0: - length += 1 +try: + import platform - return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed) + implementation = platform.python_implementation() + +except (ImportError, AttributeError): + implementation = 'CPython' + +from pyasn1.compat.octets import oct2int, null, ensureString + +if sys.version_info[0:2] < (3, 2) or implementation != 'CPython': + from binascii import a2b_hex, b2a_hex + + if sys.version_info[0] > 2: + long = int + + def from_bytes(octets, signed=False): + if not octets: + return 0 + + value = long(b2a_hex(ensureString(octets)), 16) + + if signed and oct2int(octets[0]) & 0x80: + return value - (1 << len(octets) * 8) + + return value + + def to_bytes(value, signed=False, length=0): + if value < 0: + if signed: + bits = bitLength(value) + + # two's complement form + maxValue = 1 << bits + valueToEncode = (value + maxValue) % maxValue + + else: + raise OverflowError('can\'t convert negative int to unsigned') + elif value == 0 and length == 0: + return null + else: + bits = 0 + valueToEncode = value + + hexValue = hex(valueToEncode)[2:] + if hexValue.endswith('L'): + hexValue = hexValue[:-1] + + if len(hexValue) & 1: + hexValue = '0' + hexValue + + # padding may be needed for two's complement encoding + if value != valueToEncode or length: + hexLength = len(hexValue) * 4 + + padLength = max(length, bits) + + if padLength > hexLength: + hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue + elif length and hexLength - length > 7: + raise OverflowError('int too big to convert') + + firstOctet = int(hexValue[:2], 16) + + if signed: + if firstOctet & 0x80: + if value >= 0: + hexValue = '00' + hexValue + elif value < 0: + hexValue = 'ff' + hexValue + + octets_value = a2b_hex(hexValue) + + return octets_value + + def bitLength(number): + # bits in unsigned number + hexValue = hex(abs(number)) + bits = len(hexValue) - 2 + if hexValue.endswith('L'): + bits -= 1 + if bits & 1: + bits += 1 + bits *= 4 + # TODO: strip lhs zeros + return bits + +else: + + def from_bytes(octets, signed=False): + return int.from_bytes(bytes(octets), 'big', signed=signed) + + def to_bytes(value, signed=False, length=0): + length = max(value.bit_length(), length) + + if signed and length % 8 == 0: + length += 1 + + return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed) + + def bitLength(number): + return int(number).bit_length() diff --git a/.venv/lib/python3.10/site-packages/pyasn1/debug.py b/.venv/lib/python3.10/site-packages/pyasn1/debug.py index 07194235..8707aa88 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/debug.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/debug.py @@ -1,14 +1,15 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import logging import sys from pyasn1 import __version__ from pyasn1 import error +from pyasn1.compat.octets import octs2ints __all__ = ['Debug', 'setLogger', 'hexdump'] @@ -54,6 +55,16 @@ class Printer(object): return '' +if hasattr(logging, 'NullHandler'): + NullHandler = logging.NullHandler + +else: + # Python 2.6 and older + class NullHandler(logging.Handler): + def emit(self, record): + pass + + class Debug(object): defaultPrinter = Printer() @@ -64,7 +75,7 @@ class Debug(object): # route our logs to parent logger self._printer = Printer( logger=logging.getLogger(options['loggerName']), - handler=logging.NullHandler() + handler=NullHandler() ) elif 'printer' in options: @@ -126,7 +137,7 @@ def registerLoggee(module, name='LOG', flags=DEBUG_NONE): def hexdump(octets): return ' '.join( ['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x) - for n, x in zip(range(len(octets)), octets)] + for n, x in zip(range(len(octets)), octs2ints(octets))] ) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/error.py b/.venv/lib/python3.10/site-packages/pyasn1/error.py index 75c9a3f4..4f48db25 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/error.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/error.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # @@ -12,36 +12,7 @@ class PyAsn1Error(Exception): `PyAsn1Error` is the base exception class (based on :class:`Exception`) that represents all possible ASN.1 related errors. - - Parameters - ---------- - args: - Opaque positional parameters - - Keyword Args - ------------ - kwargs: - Opaque keyword parameters - """ - def __init__(self, *args, **kwargs): - self._args = args - self._kwargs = kwargs - - @property - def context(self): - """Return exception context - - When exception object is created, the caller can supply some opaque - context for the upper layers to better understand the cause of the - exception. - - Returns - ------- - : :py:class:`dict` - Dict holding context specific data - """ - return self._kwargs.get('context', {}) class ValueConstraintError(PyAsn1Error): @@ -63,18 +34,6 @@ class SubstrateUnderrunError(PyAsn1Error): """ -class EndOfStreamError(SubstrateUnderrunError): - """ASN.1 data structure deserialization error - - The `EndOfStreamError` exception indicates the condition of the input - stream has been closed. - """ - - -class UnsupportedSubstrateError(PyAsn1Error): - """Unsupported substrate type to parse as ASN.1 data.""" - - class PyAsn1UnicodeError(PyAsn1Error, UnicodeError): """Unicode text processing error diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/base.py b/.venv/lib/python3.10/site-packages/pyasn1/type/base.py index aa86e520..994f1c99 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/base.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/base.py @@ -1,12 +1,13 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import sys from pyasn1 import error +from pyasn1.compat import calling from pyasn1.type import constraint from pyasn1.type import tag from pyasn1.type import tagmap @@ -140,7 +141,7 @@ class Asn1Type(Asn1Item): return True def prettyPrint(self, scope=0): - raise NotImplementedError + raise NotImplementedError() # backward compatibility @@ -178,31 +179,31 @@ class NoValue(object): Any operation attempted on the *noValue* object will raise the *PyAsn1Error* exception. """ - skipMethods = { - '__slots__', - # attributes - '__getattribute__', - '__getattr__', - '__setattr__', - '__delattr__', - # class instance - '__class__', - '__init__', - '__del__', - '__new__', - '__repr__', - '__qualname__', - '__objclass__', - 'im_class', - '__sizeof__', - # pickle protocol - '__reduce__', - '__reduce_ex__', - '__getnewargs__', - '__getinitargs__', - '__getstate__', - '__setstate__', - } + skipMethods = set( + ('__slots__', + # attributes + '__getattribute__', + '__getattr__', + '__setattr__', + '__delattr__', + # class instance + '__class__', + '__init__', + '__del__', + '__new__', + '__repr__', + '__qualname__', + '__objclass__', + 'im_class', + '__sizeof__', + # pickle protocol + '__reduce__', + '__reduce_ex__', + '__getnewargs__', + '__getinitargs__', + '__getstate__', + '__setstate__') + ) _instance = None @@ -219,7 +220,7 @@ class NoValue(object): if (name not in cls.skipMethods and name.startswith('__') and name.endswith('__') and - callable(getattr(typ, name)))] + calling.callable(getattr(typ, name)))] for name in set(op_names): setattr(cls, name, getPlug(name)) @@ -267,8 +268,9 @@ class SimpleAsn1Type(Asn1Type): try: self.subtypeSpec(value) - except error.PyAsn1Error as exValue: - raise type(exValue)('%s at %s' % (exValue, self.__class__.__name__)) + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) self._value = value @@ -289,9 +291,7 @@ class SimpleAsn1Type(Asn1Type): return '<%s>' % representation def __eq__(self, other): - if self is other: - return True - return self._value == other + return self is other and True or self._value == other def __ne__(self, other): return self._value != other @@ -308,8 +308,12 @@ class SimpleAsn1Type(Asn1Type): def __ge__(self, other): return self._value >= other - def __bool__(self): - return bool(self._value) + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._value and True or False + else: + def __bool__(self): + return self._value and True or False def __hash__(self): return hash(self._value) @@ -560,8 +564,12 @@ class ConstructedAsn1Type(Asn1Type): def __ge__(self, other): return self.components >= other - def __bool__(self): - return bool(self.components) + if sys.version_info[0] <= 2: + def __nonzero__(self): + return bool(self.components) + else: + def __bool__(self): + return bool(self.components) @property def components(self): diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/char.py b/.venv/lib/python3.10/site-packages/pyasn1/type/char.py index ec65f006..06074da0 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/char.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/char.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import sys @@ -22,14 +22,15 @@ class AbstractCharacterString(univ.OctetString): """Creates |ASN.1| schema or value object. |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, - its objects are immutable and duck-type :class:`bytes`. - When used in octet-stream context, |ASN.1| type assumes + its objects are immutable and duck-type Python 2 :class:`str` or Python 3 + :class:`bytes`. When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding. Keyword Args ------------ - value: :class:`str`, :class:`bytes` or |ASN.1| object - :class:`str`, alternatively :class:`bytes` + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + :class:`unicode` object (Python 2) or :class:`str` (Python 3), + alternatively :class:`str` (Python 2) or :class:`bytes` (Python 3) representing octet-stream of serialised unicode string (note `encoding` parameter) or |ASN.1| class instance. If `value` is not given, schema object will be created. @@ -43,8 +44,8 @@ class AbstractCharacterString(univ.OctetString): instantiation. encoding: :py:class:`str` - Unicode codec ID to encode/decode - :class:`str` the payload when |ASN.1| object is used + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used in octet-stream context. Raises @@ -53,42 +54,88 @@ class AbstractCharacterString(univ.OctetString): On constraint violation or bad initializer. """ - def __str__(self): - return str(self._value) + if sys.version_info[0] <= 2: + def __str__(self): + try: + # `str` is Py2 text representation + return self._value.encode(self.encoding) - def __bytes__(self): - try: - return self._value.encode(self.encoding) - except UnicodeEncodeError as exc: - raise error.PyAsn1UnicodeEncodeError( - "Can't encode string '%s' with codec " - "%s" % (self._value, self.encoding), exc - ) + except UnicodeEncodeError: + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with codec " + "%s" % (self._value, self.encoding), exc + ) - def prettyIn(self, value): - try: - if isinstance(value, str): - return value - elif isinstance(value, bytes): - return value.decode(self.encoding) - elif isinstance(value, (tuple, list)): - return self.prettyIn(bytes(value)) - elif isinstance(value, univ.OctetString): - return value.asOctets().decode(self.encoding) - else: - return str(value) + def __unicode__(self): + return unicode(self._value) - except (UnicodeDecodeError, LookupError) as exc: - raise error.PyAsn1UnicodeDecodeError( - "Can't decode string '%s' with codec " - "%s" % (value, self.encoding), exc - ) + def prettyIn(self, value): + try: + if isinstance(value, unicode): + return value + elif isinstance(value, str): + return value.decode(self.encoding) + elif isinstance(value, (tuple, list)): + return self.prettyIn(''.join([chr(x) for x in value])) + elif isinstance(value, univ.OctetString): + return value.asOctets().decode(self.encoding) + else: + return unicode(value) - def asOctets(self, padding=True): - return bytes(self) + except (UnicodeDecodeError, LookupError): + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with codec " + "%s" % (value, self.encoding), exc + ) - def asNumbers(self, padding=True): - return tuple(bytes(self)) + def asOctets(self, padding=True): + return str(self) + + def asNumbers(self, padding=True): + return tuple([ord(x) for x in str(self)]) + + else: + def __str__(self): + # `unicode` is Py3 text representation + return str(self._value) + + def __bytes__(self): + try: + return self._value.encode(self.encoding) + except UnicodeEncodeError: + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with codec " + "%s" % (self._value, self.encoding), exc + ) + + def prettyIn(self, value): + try: + if isinstance(value, str): + return value + elif isinstance(value, bytes): + return value.decode(self.encoding) + elif isinstance(value, (tuple, list)): + return self.prettyIn(bytes(value)) + elif isinstance(value, univ.OctetString): + return value.asOctets().decode(self.encoding) + else: + return str(value) + + except (UnicodeDecodeError, LookupError): + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with codec " + "%s" % (value, self.encoding), exc + ) + + def asOctets(self, padding=True): + return bytes(self) + + def asNumbers(self, padding=True): + return tuple(bytes(self)) # # See OctetString.prettyPrint() for the explanation diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/constraint.py b/.venv/lib/python3.10/site-packages/pyasn1/type/constraint.py index 02368d0a..8f152e9e 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/constraint.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/constraint.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # # Original concept and code by Mike C. Fletcher. # @@ -31,9 +31,9 @@ class AbstractConstraint(object): try: self._testValue(value, idx) - except error.ValueConstraintError as exc: + except error.ValueConstraintError: raise error.ValueConstraintError( - '%s failed at: %r' % (self, exc) + '%s failed at: %r' % (self, sys.exc_info()[1]) ) def __repr__(self): @@ -46,9 +46,7 @@ class AbstractConstraint(object): return '<%s>' % representation def __eq__(self, other): - if self is other: - return True - return self._values == other + return self is other and True or self._values == other def __ne__(self, other): return self._values != other @@ -65,8 +63,12 @@ class AbstractConstraint(object): def __ge__(self, other): return self._values >= other - def __bool__(self): - return bool(self._values) + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._values and True or False + else: + def __bool__(self): + return self._values and True or False def __hash__(self): return self.__hash @@ -148,6 +150,9 @@ class SingleValueConstraint(AbstractConstraint): def __iter__(self): return iter(self._set) + def __sub__(self, constraint): + return self.__class__(*(self._set.difference(constraint))) + def __add__(self, constraint): return self.__class__(*(self._set.union(constraint))) diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/error.py b/.venv/lib/python3.10/site-packages/pyasn1/type/error.py index 0ff082ab..80fcf3bd 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/error.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/error.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # from pyasn1.error import PyAsn1Error diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/namedtype.py b/.venv/lib/python3.10/site-packages/pyasn1/type/namedtype.py index 5f6c4ca3..cbc14293 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/namedtype.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/namedtype.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import sys @@ -13,6 +13,13 @@ from pyasn1.type import tagmap __all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType', 'NamedTypes'] +try: + any + +except NameError: + any = lambda x: bool(filter(bool, x)) + + class NamedType(object): """Create named field object for a constructed ASN.1 type. @@ -204,8 +211,12 @@ class NamedTypes(object): def __iter__(self): return (x[0] for x in self.__namedTypes) - def __bool__(self): - return self.__namedTypesLen > 0 + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self.__namedTypesLen > 0 + else: + def __bool__(self): + return self.__namedTypesLen > 0 def __len__(self): return self.__namedTypesLen diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/namedval.py b/.venv/lib/python3.10/site-packages/pyasn1/type/namedval.py index 46a6496d..42475978 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/namedval.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/namedval.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # # ASN.1 named integers # diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/opentype.py b/.venv/lib/python3.10/site-packages/pyasn1/type/opentype.py index 5a15f896..29645f0f 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/opentype.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/opentype.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # __all__ = ['OpenType'] diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/tag.py b/.venv/lib/python3.10/site-packages/pyasn1/type/tag.py index ccb8b00c..b88a7341 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/tag.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/tag.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -98,7 +98,7 @@ class Tag(object): elif idx == 2: return self.__tagId else: - raise IndexError + raise IndexError() def __iter__(self): yield self.__tagClass diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/tagmap.py b/.venv/lib/python3.10/site-packages/pyasn1/type/tagmap.py index 7f8a955a..6f5163b4 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/tagmap.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/tagmap.py @@ -1,8 +1,8 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # from pyasn1 import error @@ -46,7 +46,7 @@ class TagMap(object): return self.__presentTypes[tagSet] except KeyError: if self.__defaultType is None: - raise + raise KeyError() elif tagSet in self.__skipTypes: raise error.PyAsn1Error('Key in negative map') else: diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/univ.py b/.venv/lib/python3.10/site-packages/pyasn1/type/univ.py index 9aff5e69..aa688b22 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/univ.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/univ.py @@ -1,15 +1,17 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import math import sys from pyasn1 import error from pyasn1.codec.ber import eoo +from pyasn1.compat import binary from pyasn1.compat import integer +from pyasn1.compat import octets from pyasn1.type import base from pyasn1.type import constraint from pyasn1.type import namedtype @@ -164,23 +166,40 @@ class Integer(base.SimpleAsn1Type): def __rfloordiv__(self, value): return self.clone(value // self._value) - def __truediv__(self, value): - return Real(self._value / value) + if sys.version_info[0] <= 2: + def __div__(self, value): + if isinstance(value, float): + return Real(self._value / value) + else: + return self.clone(self._value / value) - def __rtruediv__(self, value): - return Real(value / self._value) + def __rdiv__(self, value): + if isinstance(value, float): + return Real(value / self._value) + else: + return self.clone(value / self._value) + else: + def __truediv__(self, value): + return Real(self._value / value) - def __divmod__(self, value): - return self.clone(divmod(self._value, value)) + def __rtruediv__(self, value): + return Real(value / self._value) - def __rdivmod__(self, value): - return self.clone(divmod(value, self._value)) + def __divmod__(self, value): + return self.clone(divmod(self._value, value)) - __hash__ = base.SimpleAsn1Type.__hash__ + def __rdivmod__(self, value): + return self.clone(divmod(value, self._value)) + + __hash__ = base.SimpleAsn1Type.__hash__ def __int__(self): return int(self._value) + if sys.version_info[0] <= 2: + def __long__(self): + return long(self._value) + def __float__(self): return float(self._value) @@ -212,8 +231,9 @@ class Integer(base.SimpleAsn1Type): def __ceil__(self): return math.ceil(self._value) - def __trunc__(self): - return self.clone(math.trunc(self._value)) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): + return self.clone(math.trunc(self._value)) def __lt__(self, value): return self._value < value @@ -241,9 +261,9 @@ class Integer(base.SimpleAsn1Type): try: return self.namedValues[value] - except KeyError as exc: + except KeyError: raise error.PyAsn1Error( - 'Can\'t coerce %r into integer: %s' % (value, exc) + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) ) def prettyOut(self, value): @@ -322,18 +342,23 @@ class Boolean(Integer): # Optimization for faster codec lookup typeId = Integer.getTypeId() +if sys.version_info[0] < 3: + SizedIntegerBase = long +else: + SizedIntegerBase = int -class SizedInteger(int): + +class SizedInteger(SizedIntegerBase): bitLength = leadingZeroBits = None def setBitLength(self, bitLength): self.bitLength = bitLength - self.leadingZeroBits = max(bitLength - self.bit_length(), 0) + self.leadingZeroBits = max(bitLength - integer.bitLength(self), 0) return self def __len__(self): if self.bitLength is None: - self.setBitLength(self.bit_length()) + self.setBitLength(integer.bitLength(self)) return self.bitLength @@ -528,18 +553,22 @@ class BitString(base.SimpleAsn1Type): return self.clone(SizedInteger(self._value >> count).setBitLength(max(0, len(self._value) - count))) def __int__(self): - return int(self._value) + return self._value def __float__(self): return float(self._value) + if sys.version_info[0] < 3: + def __long__(self): + return self._value + def asNumbers(self): """Get |ASN.1| value as a sequence of 8-bit integers. If |ASN.1| object length is not a multiple of 8, result will be left-padded with zeros. """ - return tuple(self.asOctets()) + return tuple(octets.octs2ints(self.asOctets())) def asOctets(self): """Get |ASN.1| value as a sequence of octets. @@ -557,7 +586,7 @@ class BitString(base.SimpleAsn1Type): def asBinary(self): """Get |ASN.1| value as a text string of bits. """ - binString = bin(self._value)[2:] + binString = binary.bin(self._value)[2:] return '0' * (len(self._value) - len(binString)) + binString @classmethod @@ -572,8 +601,8 @@ class BitString(base.SimpleAsn1Type): try: value = SizedInteger(value, 16).setBitLength(len(value) * 4) - except ValueError as exc: - raise error.PyAsn1Error('%s.fromHexString() error: %s' % (cls.__name__, exc)) + except ValueError: + raise error.PyAsn1Error('%s.fromHexString() error: %s' % (cls.__name__, sys.exc_info()[1])) if prepend is not None: value = SizedInteger( @@ -597,8 +626,8 @@ class BitString(base.SimpleAsn1Type): try: value = SizedInteger(value or '0', 2).setBitLength(len(value)) - except ValueError as exc: - raise error.PyAsn1Error('%s.fromBinaryString() error: %s' % (cls.__name__, exc)) + except ValueError: + raise error.PyAsn1Error('%s.fromBinaryString() error: %s' % (cls.__name__, sys.exc_info()[1])) if prepend is not None: value = SizedInteger( @@ -616,10 +645,10 @@ class BitString(base.SimpleAsn1Type): Parameters ---------- - value: :class:`bytes` - Text string like b'\\\\x01\\\\xff' + value: :class:`str` (Py2) or :class:`bytes` (Py3) + Text string like '\\\\x01\\\\xff' (Py2) or b'\\\\x01\\\\xff' (Py3) """ - value = SizedInteger(int.from_bytes(bytes(value), 'big') >> padding).setBitLength(len(value) * 8 - padding) + value = SizedInteger(integer.from_bytes(value) >> padding).setBitLength(len(value) * 8 - padding) if prepend is not None: value = SizedInteger( @@ -634,7 +663,7 @@ class BitString(base.SimpleAsn1Type): def prettyIn(self, value): if isinstance(value, SizedInteger): return value - elif isinstance(value, str): + elif octets.isStringType(value): if not value: return SizedInteger(0).setBitLength(0) @@ -681,7 +710,7 @@ class BitString(base.SimpleAsn1Type): elif isinstance(value, BitString): return SizedInteger(value).setBitLength(len(value)) - elif isinstance(value, int): + elif isinstance(value, intTypes): return SizedInteger(value) else: @@ -690,18 +719,32 @@ class BitString(base.SimpleAsn1Type): ) +try: + # noinspection PyStatementEffect + all + +except NameError: # Python 2.4 + # noinspection PyShadowingBuiltins + def all(iterable): + for element in iterable: + if not element: + return False + return True + + class OctetString(base.SimpleAsn1Type): """Create |ASN.1| schema or value object. |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its - objects are immutable and duck-type :class:`bytes`. - When used in Unicode context, |ASN.1| type + objects are immutable and duck-type Python 2 :class:`str` or + Python 3 :class:`bytes`. When used in Unicode context, |ASN.1| type assumes "|encoding|" serialisation. Keyword Args ------------ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object - :class:`bytes`, alternatively :class:`str` + class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively + class:`unicode` object (Python 2) or :class:`str` (Python 3) representing character string to be serialised into octets (note `encoding` parameter) or |ASN.1| object. If `value` is not given, schema object will be created. @@ -715,8 +758,8 @@ class OctetString(base.SimpleAsn1Type): instantiation. encoding: :py:class:`str` - Unicode codec ID to encode/decode - :class:`str` the payload when |ASN.1| object is used + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used in text string context. binValue: :py:class:`str` @@ -793,50 +836,101 @@ class OctetString(base.SimpleAsn1Type): base.SimpleAsn1Type.__init__(self, value, **kwargs) - def prettyIn(self, value): - if isinstance(value, bytes): - return value + if sys.version_info[0] <= 2: + def prettyIn(self, value): + if isinstance(value, str): + return value - elif isinstance(value, str): + elif isinstance(value, unicode): + try: + return value.encode(self.encoding) + + except (LookupError, UnicodeEncodeError): + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with codec " + "%s" % (value, self.encoding), exc + ) + + elif isinstance(value, (tuple, list)): + try: + return ''.join([chr(x) for x in value]) + + except ValueError: + raise error.PyAsn1Error( + "Bad %s initializer '%s'" % (self.__class__.__name__, value) + ) + + else: + return str(value) + + def __str__(self): + return str(self._value) + + def __unicode__(self): try: - return value.encode(self.encoding) + return self._value.decode(self.encoding) - except UnicodeEncodeError as exc: - raise error.PyAsn1UnicodeEncodeError( - "Can't encode string '%s' with '%s' " - "codec" % (value, self.encoding), exc + except UnicodeDecodeError: + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with codec " + "%s" % (self._value, self.encoding), exc ) - elif isinstance(value, OctetString): # a shortcut, bytes() would work the same way - return value.asOctets() - elif isinstance(value, base.SimpleAsn1Type): # this mostly targets Integer objects - return self.prettyIn(str(value)) + def asOctets(self): + return str(self._value) - elif isinstance(value, (tuple, list)): - return self.prettyIn(bytes(value)) + def asNumbers(self): + return tuple([ord(x) for x in self._value]) - else: - return bytes(value) + else: + def prettyIn(self, value): + if isinstance(value, bytes): + return value - def __str__(self): - try: - return self._value.decode(self.encoding) + elif isinstance(value, str): + try: + return value.encode(self.encoding) - except UnicodeDecodeError as exc: - raise error.PyAsn1UnicodeDecodeError( - "Can't decode string '%s' with '%s' codec at " - "'%s'" % (self._value, self.encoding, - self.__class__.__name__), exc - ) + except UnicodeEncodeError: + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeEncodeError( + "Can't encode string '%s' with '%s' " + "codec" % (value, self.encoding), exc + ) + elif isinstance(value, OctetString): # a shortcut, bytes() would work the same way + return value.asOctets() - def __bytes__(self): - return bytes(self._value) + elif isinstance(value, base.SimpleAsn1Type): # this mostly targets Integer objects + return self.prettyIn(str(value)) - def asOctets(self): - return bytes(self._value) + elif isinstance(value, (tuple, list)): + return self.prettyIn(bytes(value)) - def asNumbers(self): - return tuple(self._value) + else: + return bytes(value) + + def __str__(self): + try: + return self._value.decode(self.encoding) + + except UnicodeDecodeError: + exc = sys.exc_info()[1] + raise error.PyAsn1UnicodeDecodeError( + "Can't decode string '%s' with '%s' codec at " + "'%s'" % (self._value, self.encoding, + self.__class__.__name__), exc + ) + + def __bytes__(self): + return bytes(self._value) + + def asOctets(self): + return bytes(self._value) + + def asNumbers(self): + return tuple(self._value) # # Normally, `.prettyPrint()` is called from `__str__()`. Historically, @@ -905,7 +999,7 @@ class OctetString(base.SimpleAsn1Type): r.append(byte) - return bytes(r) + return octets.ints2octs(r) @staticmethod def fromHexString(value): @@ -927,7 +1021,7 @@ class OctetString(base.SimpleAsn1Type): if p: r.append(int(p + '0', 16)) - return bytes(r) + return octets.ints2octs(r) # Immutable sequence object protocol @@ -1008,7 +1102,7 @@ class Null(OctetString): tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05) ) - subtypeSpec = OctetString.subtypeSpec + constraint.SingleValueConstraint(b'') + subtypeSpec = OctetString.subtypeSpec + constraint.SingleValueConstraint(octets.str2octs('')) # Optimization for faster codec lookup typeId = OctetString.getTypeId() @@ -1017,7 +1111,14 @@ class Null(OctetString): if value: return value - return b'' + return octets.str2octs('') + +if sys.version_info[0] <= 2: + intTypes = (int, long) +else: + intTypes = (int,) + +numericTypes = intTypes + (float,) class ObjectIdentifier(base.SimpleAsn1Type): @@ -1129,25 +1230,24 @@ class ObjectIdentifier(base.SimpleAsn1Type): def prettyIn(self, value): if isinstance(value, ObjectIdentifier): return tuple(value) - elif isinstance(value, str): + elif octets.isStringType(value): if '-' in value: raise error.PyAsn1Error( - # sys.exc_info in case prettyIn was called while handling an exception 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) ) try: return tuple([int(subOid) for subOid in value.split('.') if subOid]) - except ValueError as exc: + except ValueError: raise error.PyAsn1Error( - 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, exc) + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) ) try: tupleOfInts = tuple([int(subOid) for subOid in value if subOid >= 0]) - except (ValueError, TypeError) as exc: + except (ValueError, TypeError): raise error.PyAsn1Error( - 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, exc) + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) ) if len(tupleOfInts) == len(value): @@ -1159,135 +1259,6 @@ class ObjectIdentifier(base.SimpleAsn1Type): return '.'.join([str(x) for x in value]) -class RelativeOID(base.SimpleAsn1Type): - """Create |ASN.1| schema or value object. - |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its - objects are immutable and duck-type Python :class:`tuple` objects - (tuple of non-negative integers). - Keyword Args - ------------ - value: :class:`tuple`, :class:`str` or |ASN.1| object - Python sequence of :class:`int` or :class:`str` literal or |ASN.1| object. - If `value` is not given, schema object will be created. - tagSet: :py:class:`~pyasn1.type.tag.TagSet` - Object representing non-default ASN.1 tag(s) - subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` - Object representing non-default ASN.1 subtype constraint(s). Constraints - verification for |ASN.1| type occurs automatically on object - instantiation. - Raises - ------ - ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error - On constraint violation or bad initializer. - Examples - -------- - .. code-block:: python - class RelOID(RelativeOID): - ''' - ASN.1 specification: - id-pad-null RELATIVE-OID ::= { 0 } - id-pad-once RELATIVE-OID ::= { 5 6 } - id-pad-twice RELATIVE-OID ::= { 5 6 7 } - ''' - id_pad_null = RelOID('0') - id_pad_once = RelOID('5.6') - id_pad_twice = id_pad_once + (7,) - """ - #: Set (on class, not on instance) or return a - #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) - #: associated with |ASN.1| type. - tagSet = tag.initTagSet( - tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0d) - ) - - #: Set (on class, not on instance) or return a - #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object - #: imposing constraints on |ASN.1| type initialization values. - subtypeSpec = constraint.ConstraintsIntersection() - - # Optimization for faster codec lookup - typeId = base.SimpleAsn1Type.getTypeId() - - def __add__(self, other): - return self.clone(self._value + other) - - def __radd__(self, other): - return self.clone(other + self._value) - - def asTuple(self): - return self._value - - # Sequence object protocol - - def __len__(self): - return len(self._value) - - def __getitem__(self, i): - if i.__class__ is slice: - return self.clone(self._value[i]) - else: - return self._value[i] - - def __iter__(self): - return iter(self._value) - - def __contains__(self, value): - return value in self._value - - def index(self, suboid): - return self._value.index(suboid) - - def isPrefixOf(self, other): - """Indicate if this |ASN.1| object is a prefix of other |ASN.1| object. - Parameters - ---------- - other: |ASN.1| object - |ASN.1| object - Returns - ------- - : :class:`bool` - :obj:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object - or :obj:`False` otherwise. - """ - l = len(self) - if l <= len(other): - if self._value[:l] == other[:l]: - return True - return False - - def prettyIn(self, value): - if isinstance(value, RelativeOID): - return tuple(value) - elif isinstance(value, str): - if '-' in value: - raise error.PyAsn1Error( - # sys.exc_info in case prettyIn was called while handling an exception - 'Malformed RELATIVE-OID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) - ) - try: - return tuple([int(subOid) for subOid in value.split('.') if subOid]) - except ValueError as exc: - raise error.PyAsn1Error( - 'Malformed RELATIVE-OID %s at %s: %s' % (value, self.__class__.__name__, exc) - ) - - try: - tupleOfInts = tuple([int(subOid) for subOid in value if subOid >= 0]) - - except (ValueError, TypeError) as exc: - raise error.PyAsn1Error( - 'Malformed RELATIVE-OID %s at %s: %s' % (value, self.__class__.__name__, exc) - ) - - if len(tupleOfInts) == len(value): - return tupleOfInts - - raise error.PyAsn1Error('Malformed RELATIVE-OID %s at %s' % (value, self.__class__.__name__)) - - def prettyOut(self, value): - return '.'.join([str(x) for x in value]) - - class Real(base.SimpleAsn1Type): """Create |ASN.1| schema or value object. @@ -1368,9 +1339,9 @@ class Real(base.SimpleAsn1Type): def prettyIn(self, value): if isinstance(value, tuple) and len(value) == 3: - if (not isinstance(value[0], (int, float)) or - not isinstance(value[1], int) or - not isinstance(value[2], int)): + if (not isinstance(value[0], numericTypes) or + not isinstance(value[1], intTypes) or + not isinstance(value[2], intTypes)): raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,)) if (isinstance(value[0], float) and self._inf and value[0] in self._inf): @@ -1382,10 +1353,10 @@ class Real(base.SimpleAsn1Type): if value[1] == 10: value = self.__normalizeBase10(value) return value - elif isinstance(value, int): + elif isinstance(value, intTypes): return self.__normalizeBase10((value, 10, 0)) - elif isinstance(value, float) or isinstance(value, str): - if isinstance(value, str): + elif isinstance(value, float) or octets.isStringType(value): + if octets.isStringType(value): try: value = float(value) except ValueError: @@ -1472,21 +1443,32 @@ class Real(base.SimpleAsn1Type): def __rpow__(self, value): return self.clone(pow(value, float(self))) - def __truediv__(self, value): - return self.clone(float(self) / value) + if sys.version_info[0] <= 2: + def __div__(self, value): + return self.clone(float(self) / value) - def __rtruediv__(self, value): - return self.clone(value / float(self)) + def __rdiv__(self, value): + return self.clone(value / float(self)) + else: + def __truediv__(self, value): + return self.clone(float(self) / value) - def __divmod__(self, value): - return self.clone(float(self) // value) + def __rtruediv__(self, value): + return self.clone(value / float(self)) - def __rdivmod__(self, value): - return self.clone(value // float(self)) + def __divmod__(self, value): + return self.clone(float(self) // value) + + def __rdivmod__(self, value): + return self.clone(value // float(self)) def __int__(self): return int(float(self)) + if sys.version_info[0] <= 2: + def __long__(self): + return long(float(self)) + def __float__(self): if self._value in self._inf: return self._value @@ -1517,8 +1499,9 @@ class Real(base.SimpleAsn1Type): def __ceil__(self): return self.clone(math.ceil(float(self))) - def __trunc__(self): - return self.clone(math.trunc(float(self))) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): + return self.clone(math.trunc(float(self))) def __lt__(self, value): return float(self) < value @@ -1538,10 +1521,14 @@ class Real(base.SimpleAsn1Type): def __ge__(self, value): return float(self) >= value - def __bool__(self): - return bool(float(self)) + if sys.version_info[0] <= 2: + def __nonzero__(self): + return bool(float(self)) + else: + def __bool__(self): + return bool(float(self)) - __hash__ = base.SimpleAsn1Type.__hash__ + __hash__ = base.SimpleAsn1Type.__hash__ def __getitem__(self, idx): if self._value in self._inf: @@ -1686,15 +1673,15 @@ class SequenceOfAndSetOfBase(base.ConstructedAsn1Type): try: return self.getComponentByPosition(idx) - except error.PyAsn1Error as exc: - raise IndexError(exc) + except error.PyAsn1Error: + raise IndexError(sys.exc_info()[1]) def __setitem__(self, idx, value): try: self.setComponentByPosition(idx, value) - except error.PyAsn1Error as exc: - raise IndexError(exc) + except error.PyAsn1Error: + raise IndexError(sys.exc_info()[1]) def append(self, value): if self._componentValues is noValue: @@ -1727,8 +1714,8 @@ class SequenceOfAndSetOfBase(base.ConstructedAsn1Type): try: return indices[values.index(value, start, stop)] - except error.PyAsn1Error as exc: - raise ValueError(exc) + except error.PyAsn1Error: + raise ValueError(sys.exc_info()[1]) def reverse(self): self._componentValues.reverse() @@ -2085,7 +2072,8 @@ class SequenceOfAndSetOfBase(base.ConstructedAsn1Type): # Represent SequenceOf/SetOf as a bare dict to constraints chain self.subtypeSpec(mapping) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: + exc = sys.exc_info()[1] return exc return False @@ -2236,38 +2224,38 @@ class SequenceAndSetBase(base.ConstructedAsn1Type): self._dynamicNames = self._componentTypeLen or self.DynamicNames() def __getitem__(self, idx): - if isinstance(idx, str): + if octets.isStringType(idx): try: return self.getComponentByName(idx) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: # duck-typing dict - raise KeyError(exc) + raise KeyError(sys.exc_info()[1]) else: try: return self.getComponentByPosition(idx) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: # duck-typing list - raise IndexError(exc) + raise IndexError(sys.exc_info()[1]) def __setitem__(self, idx, value): - if isinstance(idx, str): + if octets.isStringType(idx): try: self.setComponentByName(idx, value) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: # duck-typing dict - raise KeyError(exc) + raise KeyError(sys.exc_info()[1]) else: try: self.setComponentByPosition(idx, value) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: # duck-typing list - raise IndexError(exc) + raise IndexError(sys.exc_info()[1]) def __contains__(self, key): if self._componentTypeLen: @@ -2716,7 +2704,8 @@ class SequenceAndSetBase(base.ConstructedAsn1Type): # Represent Sequence/Set as a bare dict to constraints chain self.subtypeSpec(mapping) - except error.PyAsn1Error as exc: + except error.PyAsn1Error: + exc = sys.exc_info()[1] return exc return False @@ -3025,8 +3014,12 @@ class Choice(Set): return self._componentValues[self._currentIdx] >= other return NotImplemented - def __bool__(self): - return bool(self._componentValues) + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._componentValues and True or False + else: + def __bool__(self): + return self._componentValues and True or False def __len__(self): return self._currentIdx is not None and 1 or 0 @@ -3231,14 +3224,15 @@ class Any(OctetString): """Create |ASN.1| schema or value object. |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, - its objects are immutable and duck-type :class:`bytes`. - When used in Unicode context, |ASN.1| type assumes + its objects are immutable and duck-type Python 2 :class:`str` or Python 3 + :class:`bytes`. When used in Unicode context, |ASN.1| type assumes "|encoding|" serialisation. Keyword Args ------------ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object - :class:`bytes`, alternatively :class:`str` + :class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively + :class:`unicode` object (Python 2) or :class:`str` (Python 3) representing character string to be serialised into octets (note `encoding` parameter) or |ASN.1| object. If `value` is not given, schema object will be created. @@ -3252,8 +3246,8 @@ class Any(OctetString): instantiation. encoding: :py:class:`str` - Unicode codec ID to encode/decode - :class:`str` the payload when |ASN.1| object is used + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used in text string context. binValue: :py:class:`str` diff --git a/.venv/lib/python3.10/site-packages/pyasn1/type/useful.py b/.venv/lib/python3.10/site-packages/pyasn1/type/useful.py index a8ae8740..7536b95c 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1/type/useful.py +++ b/.venv/lib/python3.10/site-packages/pyasn1/type/useful.py @@ -1,12 +1,14 @@ # # This file is part of pyasn1 software. # -# Copyright (c) 2005-2020, Ilya Etingof -# License: https://pyasn1.readthedocs.io/en/latest/license.html +# Copyright (c) 2005-2019, Ilya Etingof +# License: http://snmplabs.com/pyasn1/license.html # import datetime from pyasn1 import error +from pyasn1.compat import dateandtime +from pyasn1.compat import string from pyasn1.type import char from pyasn1.type import tag from pyasn1.type import univ @@ -72,9 +74,9 @@ class TimeMixIn(object): elif '-' in text or '+' in text: if '+' in text: - text, plusminus, tz = text.partition('+') + text, plusminus, tz = string.partition(text, '+') else: - text, plusminus, tz = text.partition('-') + text, plusminus, tz = string.partition(text, '-') if self._shortTZ and len(tz) == 2: tz += '00' @@ -97,9 +99,9 @@ class TimeMixIn(object): if '.' in text or ',' in text: if '.' in text: - text, _, ms = text.partition('.') + text, _, ms = string.partition(text, '.') else: - text, _, ms = text.partition(',') + text, _, ms = string.partition(text, ',') try: ms = int(ms) * 1000 @@ -116,7 +118,7 @@ class TimeMixIn(object): text += '00' try: - dt = datetime.datetime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S') + dt = dateandtime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S') except ValueError: raise error.PyAsn1Error('malformed datetime format %s' % self) diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/INSTALLER b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/METADATA b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/METADATA deleted file mode 100644 index 55fc9058..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/METADATA +++ /dev/null @@ -1,73 +0,0 @@ -Metadata-Version: 2.4 -Name: pyasn1_modules -Version: 0.4.2 -Summary: A collection of ASN.1-based protocols modules -Home-page: https://github.com/pyasn1/pyasn1-modules -Author: Ilya Etingof -Author-email: etingof@gmail.com -Maintainer: pyasn1 maintenance organization -Maintainer-email: Christian Heimes -License: BSD -Project-URL: Source, https://github.com/pyasn1/pyasn1-modules -Project-URL: Issues, https://github.com/pyasn1/pyasn1-modules/issues -Project-URL: Changelog, https://github.com/pyasn1/pyasn1-modules/blob/master/CHANGES.txt -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Education -Classifier: Intended Audience :: Information Technology -Classifier: Intended Audience :: System Administrators -Classifier: Intended Audience :: Telecommunications Industry -Classifier: License :: OSI Approved :: BSD License -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Communications -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -License-File: LICENSE.txt -Requires-Dist: pyasn1<0.7.0,>=0.6.1 -Dynamic: license-file - - -ASN.1 modules for Python ------------------------- -[![PyPI](https://img.shields.io/pypi/v/pyasn1-modules.svg?maxAge=2592000)](https://pypi.org/project/pyasn1-modules) -[![Python Versions](https://img.shields.io/pypi/pyversions/pyasn1-modules.svg)](https://pypi.org/project/pyasn1-modules/) -[![Build status](https://github.com/pyasn1/pyasn1-modules/actions/workflows/main.yml/badge.svg)](https://github.com/pyasn1/pyasn1-modules/actions/workflows/main.yml) -[![Coverage Status](https://img.shields.io/codecov/c/github/pyasn1/pyasn1-modules.svg)](https://codecov.io/github/pyasn1/pyasn1-modules) -[![GitHub license](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/pyasn1/pyasn1-modules/master/LICENSE.txt) - -The `pyasn1-modules` package contains a collection of -[ASN.1](https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.208-198811-W!!PDF-E&type=items) -data structures expressed as Python classes based on [pyasn1](https://github.com/pyasn1/pyasn1) -data model. - -If ASN.1 module you need is not present in this collection, try using -[Asn1ate](https://github.com/kimgr/asn1ate) tool that compiles ASN.1 documents -into pyasn1 code. - -**NOTE:** The package is now maintained by *Christian Heimes* and -*Simon Pichugin* in project https://github.com/pyasn1/pyasn1-modules. - -Feedback --------- - -If something does not work as expected, -[open an issue](https://github.com/pyasn1/pyasn1-modules/issues) at GitHub -or post your question [on Stack Overflow](https://stackoverflow.com/questions/ask) - -New modules contributions are welcome via GitHub pull requests. - -Copyright (c) 2005-2020, [Ilya Etingof](mailto:etingof@gmail.com). -All rights reserved. diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/RECORD b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/RECORD deleted file mode 100644 index 5e0ac740..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/RECORD +++ /dev/null @@ -1,271 +0,0 @@ -pyasn1_modules-0.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pyasn1_modules-0.4.2.dist-info/METADATA,sha256=FiZm11KX383QEWy-qxNNePq4CLima_pcHXSDLZt1R3E,3484 -pyasn1_modules-0.4.2.dist-info/RECORD,, -pyasn1_modules-0.4.2.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 -pyasn1_modules-0.4.2.dist-info/licenses/LICENSE.txt,sha256=Kq1fwA9wXEoa3bg-7RCmp10oajd58M-FGdh-YrxHNf0,1334 -pyasn1_modules-0.4.2.dist-info/top_level.txt,sha256=e_AojfE1DNY4M8P9LAS7qh8Fx3eOmovobqkr7NEjlg4,15 -pyasn1_modules-0.4.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -pyasn1_modules/__init__.py,sha256=JKAoFkSDJbt4rg_rDDaTin-JVwKNKIwMg3OSPhlNAiM,65 -pyasn1_modules/__pycache__/__init__.cpython-310.pyc,, -pyasn1_modules/__pycache__/pem.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc1155.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc1157.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc1901.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc1902.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc1905.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2251.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2314.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2315.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2437.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2459.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2511.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2560.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2631.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2634.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2876.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2985.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc2986.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3058.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3114.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3125.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3161.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3274.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3279.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3280.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3281.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3370.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3412.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3414.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3447.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3537.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3560.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3565.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3657.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3709.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3739.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3770.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3779.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3820.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc3852.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4010.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4043.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4055.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4073.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4108.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4210.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4211.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4334.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4357.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4387.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4476.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4490.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4491.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4683.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc4985.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5035.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5083.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5084.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5126.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5208.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5275.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5280.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5480.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5636.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5639.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5649.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5652.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5697.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5751.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5752.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5753.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5755.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5913.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5914.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5915.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5916.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5917.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5924.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5934.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5940.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5958.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc5990.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6010.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6019.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6031.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6032.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6120.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6170.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6187.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6210.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6211.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6402.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6482.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6486.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6487.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6664.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6955.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc6960.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7030.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7191.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7229.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7292.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7296.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7508.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7585.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7633.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7773.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7894.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7906.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc7914.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8017.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8018.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8103.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8209.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8226.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8358.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8360.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8398.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8410.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8418.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8419.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8479.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8494.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8520.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8619.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8649.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8692.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8696.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8702.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8708.cpython-310.pyc,, -pyasn1_modules/__pycache__/rfc8769.cpython-310.pyc,, -pyasn1_modules/pem.py,sha256=J9x8rEx_jhdXBKeM4X3JS5ucRNsdQyaNJrs4Dfog0MM,1821 -pyasn1_modules/rfc1155.py,sha256=yG3A0ClwC3diCdMxEywvLvdc8eBtL_fh9yIdImYij2E,2683 -pyasn1_modules/rfc1157.py,sha256=JBkbJD7LcyGH4vzxYIXgAw58X15Wx6RrL59NMBLqYRU,3554 -pyasn1_modules/rfc1901.py,sha256=qpv7P8cC0kcy33rZ5_Et0AXgJlOB6Rsv6gpMpWnZBGk,646 -pyasn1_modules/rfc1902.py,sha256=VfB6zThzmIzBPcAkosPVfNQH0D-MtKYarL7BvgMJS0U,3705 -pyasn1_modules/rfc1905.py,sha256=q---r52j2AyQ4HL3hRbWBcNGNLTahDsAGAJiCYzsW0Q,4831 -pyasn1_modules/rfc2251.py,sha256=jHH9A0FTDwNp4lJLCr24_tFSofi_5r2rq8BLEB-EfcI,26931 -pyasn1_modules/rfc2314.py,sha256=h7G-AdL89X9-3qxtjcQYAikVtvK-l4xmxD_KZySDH50,1313 -pyasn1_modules/rfc2315.py,sha256=_0tMHafjOM4lUYOg-Cc_-_Fd_BGlPT32erpYswntRLM,9666 -pyasn1_modules/rfc2437.py,sha256=1lie1MIMdOFBnlAETF2ThpLs0rz6aV8AJaSlwWMsRUY,2623 -pyasn1_modules/rfc2459.py,sha256=ZHKavZuowKbEbzTxpjtSNi3nBUS3gsigSZjwMoPx2E0,50002 -pyasn1_modules/rfc2511.py,sha256=IuJekkyDgOD5mYN9a2zHf4hpKSuhPBgnwbPx5EUOH0k,10350 -pyasn1_modules/rfc2560.py,sha256=ztdWLRPg99biGGOTmkfTIJGZrCEwXNGgUPS-9bOzaBQ,8406 -pyasn1_modules/rfc2631.py,sha256=Het4nHPVFj6oElpEANYkKQuincUa0ms5SOt94Ph8jhs,1219 -pyasn1_modules/rfc2634.py,sha256=7sTu3YysbHImknLk7CbdQIjJjt6cC849-XqkuEDgFPk,9425 -pyasn1_modules/rfc2876.py,sha256=yWx-0S_Lm-qGaEmJ4e3DZy7GKpvxFTf0rQVfnz_ke-8,1438 -pyasn1_modules/rfc2985.py,sha256=8GL8jkWGpN1t7sVaEtyhVgfCM80XhlYOUEi9jhcAX0E,14359 -pyasn1_modules/rfc2986.py,sha256=fLxMpQEBGC2e68ASrQWCUvRzpndfy1uOiSvf1pa0V-M,1896 -pyasn1_modules/rfc3058.py,sha256=dKAjM1SUIk-Hoj5ps5MDzbDv8fHQma2J6B8SoZqlrb8,992 -pyasn1_modules/rfc3114.py,sha256=02eDCK2blUNybTaGX85vxGfCTnzHXXa9BP9IaVVocK8,1961 -pyasn1_modules/rfc3125.py,sha256=bd80G-Frzu3A_rfql04WLJcbKigQjsDY_6kFBinMFQk,16707 -pyasn1_modules/rfc3161.py,sha256=9kz_TvQ5_OpBPuHQDAh2WyqKeOThgxPq8E5iBB-sNp8,4260 -pyasn1_modules/rfc3274.py,sha256=ZULbMN3wksvv_fWvT_C1vskxuh_IzRCAD9QD1hdk-lo,1670 -pyasn1_modules/rfc3279.py,sha256=uRaWfvIw4WXBoJN9gcAhsW8MTDymGoa-FrrC2k033TI,6807 -pyasn1_modules/rfc3280.py,sha256=JxIDKgqrhEkRmr54yNmoNsFCRIK8Lz8X85yLHiMTXcg,46620 -pyasn1_modules/rfc3281.py,sha256=IBiEVBhKoWlb0KJwVwqAPW91PPW-v8Dla8p52n2GgXY,9866 -pyasn1_modules/rfc3370.py,sha256=q3dToJOoYvaxFO89Zqa_nYi9l3rQvGwCfLF7qFfhbfs,2811 -pyasn1_modules/rfc3412.py,sha256=MqrXwY35FPEZqXNdey4LZ44wBO6nrITqD5-thE_tLd8,1956 -pyasn1_modules/rfc3414.py,sha256=NmGZ3dWZY3e_Ovv0PyHDdK8hOqdbaar2AuUSi7YKkDc,1167 -pyasn1_modules/rfc3447.py,sha256=IBxHcG0Xg8kXomMB6hFv-CMa_Y9XQQa-b1bs7L0bhsM,1605 -pyasn1_modules/rfc3537.py,sha256=I0LVSnsyyMOTNJyLda-8T98JShYtDk13yPd_bmrZ3OQ,796 -pyasn1_modules/rfc3560.py,sha256=3Ud7sY7OAV_4KGNn_hg5xZblEkxE_ILH1kP2TI-KbZw,1818 -pyasn1_modules/rfc3565.py,sha256=nRephcXY7ioG5I4iaT6mSQYGwaouRQXoMnp2kFQQOE0,1438 -pyasn1_modules/rfc3657.py,sha256=H484bVE7VJ05rVbk6BdlbIbAWCd7CFqt5e5BptNs3wY,1746 -pyasn1_modules/rfc3709.py,sha256=KAaG7SKTT9Ef-Kza5Zn_qXkZppul8Wt8MPSkzS4qs5o,6469 -pyasn1_modules/rfc3739.py,sha256=p-D897qRYt6QR0fWmgoPVmB6VXnz_1mMY1HRJnH9eeE,5122 -pyasn1_modules/rfc3770.py,sha256=ue0Qaiys8J86M-8EtLNrcfuXm87Mr2GQ4f30lSs0vXE,1743 -pyasn1_modules/rfc3779.py,sha256=x8HYKGCaGO3BohCREHQUEa1oYGArWIC2J0PftxiPrjI,3260 -pyasn1_modules/rfc3820.py,sha256=hV70UpibmcSzH4_TncHPPHsw4OrtFjMbepS4EvieQLk,1478 -pyasn1_modules/rfc3852.py,sha256=VcWcbTAAYtZ0FDYcuofDxVVSMdKgithdTBixQ898Szs,20101 -pyasn1_modules/rfc4010.py,sha256=J2FptJQv03o8N-mo7vJ9q5_A9vrwEk0crpvAkXmVH74,1228 -pyasn1_modules/rfc4043.py,sha256=OWPgVzfK3Hs5sNQJSqUBkInhgikv-x15-xLSg30xwNE,1067 -pyasn1_modules/rfc4055.py,sha256=f2rlyaBeNhl287b_qLLsNpjgwxYRVzBgbOH28UnJZwQ,10392 -pyasn1_modules/rfc4073.py,sha256=bHVssQE3yXwetes1TPWAT30UhOEinHj8vEBaYjWC24g,1636 -pyasn1_modules/rfc4108.py,sha256=-I63Z0crn_Elvr85nSa9BqAlRx7cIJfEb9ItPDkq8JY,10598 -pyasn1_modules/rfc4210.py,sha256=3ndhsJ5yFx3ZUvUP8EDeJhUilSFAdeL8baT5tKPyIi0,28469 -pyasn1_modules/rfc4211.py,sha256=THqr9n4PGg0jVMmj8k9bhKzb5j7IapwtBinhWQAnL9k,12110 -pyasn1_modules/rfc4334.py,sha256=Q-fcYksrunAo1t07HE2jm5WlQgFAf5o39utpel0ZjcI,1586 -pyasn1_modules/rfc4357.py,sha256=tpZZ-6xDt_u8aYr4qbwe5UJxT-JVGOihuxPaemB1AXM,15036 -pyasn1_modules/rfc4387.py,sha256=CylvEQRpV_U9vVivzTJ4PVDjZEBAKS-6TrVVoepRk2E,441 -pyasn1_modules/rfc4476.py,sha256=klWmMFZg_aMqmVGuYEjXQa3v3iKlopkhMumBoLdmYT4,1960 -pyasn1_modules/rfc4490.py,sha256=Z9nkntSSc9npWHBjV9c86QGW_itvGYQ4l5jJO0a2GCA,3401 -pyasn1_modules/rfc4491.py,sha256=Lpej17T5MfF25FnfWo1CFqmBYWQTJS4WQSGki5hCgsM,1054 -pyasn1_modules/rfc4683.py,sha256=X0P6ln34ZsYkCepgGJH8AxUhoG85TuLof55jIIDMNeI,1839 -pyasn1_modules/rfc4985.py,sha256=oWCBG3tknFLUJOeG4aKF7JrkA4qMjPyJFGTnf7xmPd8,961 -pyasn1_modules/rfc5035.py,sha256=xgw9ztAM_bJKlIUCzni2zcE_z3ErEuXpWRPJpXI1KEw,4523 -pyasn1_modules/rfc5083.py,sha256=ENXIEL0CYrTqvf_iwpvAkBBJpi2pOFNBDFEYc37yqF8,1888 -pyasn1_modules/rfc5084.py,sha256=i9sFdUklbdTQodTya4BNFnpeFxGIB2uS1aNkfFdZpu4,2855 -pyasn1_modules/rfc5126.py,sha256=ape8y-hcslojU0MRD6-JCoQlJxsaS0h_0sWs2FlUGqI,15780 -pyasn1_modules/rfc5208.py,sha256=adwiwa639VdhaNMRTKXfEPl-AkbB5grrLJMJN6FZVsg,1432 -pyasn1_modules/rfc5275.py,sha256=mgirSEvl3OJn1C40dHkNTH04Sm4aEvT8c682FE_cOQ8,11605 -pyasn1_modules/rfc5280.py,sha256=GFwSclsvpTUc2rjEF2fwAK48wHI3PkXxHIbVMkfD4sQ,51236 -pyasn1_modules/rfc5480.py,sha256=GzBTgKQ68V-L-Qy0SBrCQMgqR5mGF7U73uXlBzfV2Jk,4834 -pyasn1_modules/rfc5636.py,sha256=2z0NoxI2uMlTHHGA10Q0W_329PsmZOjSFnciL2CiywE,2324 -pyasn1_modules/rfc5639.py,sha256=28YlbU49j4lr_Blwfjn2-WJyzKQhpJPPZMNW0lWXlSk,1025 -pyasn1_modules/rfc5649.py,sha256=3A--LQL7iw8DGXSDyiSUeh6wwFPKQQGyVY94mNzY0Ek,830 -pyasn1_modules/rfc5652.py,sha256=65A_djYGSuEhuFj08mn7y2kvVrbr4ArxcW0bW5JVzrE,21451 -pyasn1_modules/rfc5697.py,sha256=aWqi-QBZrEr6I5bIppRet4dREqFF8E1tO2BvaZBGOOE,1702 -pyasn1_modules/rfc5751.py,sha256=M8kTLARhdqh3UqmlZv_FWJfuJb-ph7P6MVGxSP7Q4wQ,3198 -pyasn1_modules/rfc5752.py,sha256=Sjijqbi5C_deVnJJMBtOHz7wBWFM7PCTwDWZ2lomWT0,1431 -pyasn1_modules/rfc5753.py,sha256=2Nwr8GsV2TgjTDLtE5QiIunPOA617F3CXB_tPYe7BHc,4534 -pyasn1_modules/rfc5755.py,sha256=RZ28NeCnEAGr2pLRSNFw0BRb_b_eulmxag-lRTmUeTo,12081 -pyasn1_modules/rfc5913.py,sha256=OayMmpi29ZlQI1EszIxXaU8Mhwi41BrH5esoyS80efQ,1161 -pyasn1_modules/rfc5914.py,sha256=nXOb4SvESbEFYI8h0nEYkRArNZ9w5Zqxva_4uAdMXNY,3714 -pyasn1_modules/rfc5915.py,sha256=VqMRd_Ksm0LFvE5XX4_MO6BdFG7Ch7NdQcwT_DMWAK4,1056 -pyasn1_modules/rfc5916.py,sha256=gHrFO9lX21h6Wa3JnEqyjuqXQlcTE0loUIu913Sit0E,800 -pyasn1_modules/rfc5917.py,sha256=nM08rGm9D3O8uqSbmshvp7_fHl2dYaTdhUGVJQHe0xc,1511 -pyasn1_modules/rfc5924.py,sha256=_8TqEJ9Q7cFSd2u3Za6rzlNPqGLl7IA4oHtYVpoJhdA,425 -pyasn1_modules/rfc5934.py,sha256=77z96SeP4iM2R6Rl5-Vx7OaENA8ZQvzrfhDVZRy9lqk,23798 -pyasn1_modules/rfc5940.py,sha256=66rMmgyKBhay-RZsWaKz7PUGwp0bqEAVULPb4Edk1vk,1613 -pyasn1_modules/rfc5958.py,sha256=NZPx-7FvjzgErz2lTURiRq8m3XCZ7D9QbGDhtIF-zCE,2650 -pyasn1_modules/rfc5990.py,sha256=-b0St64ba3LVRGSeNmbGoMIbkU8c8FDpo4zFWF0PCFM,5505 -pyasn1_modules/rfc6010.py,sha256=F43AYVFUwu-2_xjJE2Wmw1Wdt0K7l3vg0_fCa_QHqBU,2347 -pyasn1_modules/rfc6019.py,sha256=vzj5tfG4694-ucpErpAtE1DVOE4-v0dkN894Zr9xm4o,1086 -pyasn1_modules/rfc6031.py,sha256=X2cjNyVnrX3G2zG7kD4Rq__kF6-ftmmnqHlCQJDCuMU,12137 -pyasn1_modules/rfc6032.py,sha256=uNAu5zLHg0b583xxzFNUZxCnJaCzMw1iobzREuejMoM,1950 -pyasn1_modules/rfc6120.py,sha256=JehGZD8Y0Bdhr_ojpMSjHgnRHEdUXauZxqLxRwns6Cc,818 -pyasn1_modules/rfc6170.py,sha256=sL2yPZzO--MI4ToeAwlFEP-x6I0-etuJxT2mgAPjEO4,409 -pyasn1_modules/rfc6187.py,sha256=jOMiIhw4HAUn7hj37gKImNU_hK8TamAfd0V0Jrwh_YU,489 -pyasn1_modules/rfc6210.py,sha256=wLifK_EShv1a4TOhGJ-k9zA1kVVYVDNjS-Rh0ohmCh0,1052 -pyasn1_modules/rfc6211.py,sha256=XotTBQVseK7y0nJB4Fx-npdhRHeH53IM84kGupWIprk,2257 -pyasn1_modules/rfc6402.py,sha256=ksg6YsacS9vJAzObqFOPRCoe8mpyvRDb43Z0v-QKhnM,17148 -pyasn1_modules/rfc6482.py,sha256=10_Xyb2TaPFx72IUCZtu81aH5rmYihhdL0P-PVby1ys,2085 -pyasn1_modules/rfc6486.py,sha256=a3_5OJvkz2G7xWOC0dqbNqJQDsHQAOU62AWin107c4k,1916 -pyasn1_modules/rfc6487.py,sha256=gTUVkFYJyUcr1E4uoeN2cXPNaXyjYbixupbBKFQA4jQ,472 -pyasn1_modules/rfc6664.py,sha256=nq8F5wDeO49FoBGVQDx8ivvg_GsubdWa1bpZM_40Tms,4270 -pyasn1_modules/rfc6955.py,sha256=FBVb8LpHKMZjR3wOJtm-BPbi5EMiRoGuUWh41r1soCU,2814 -pyasn1_modules/rfc6960.py,sha256=BhEDCLLrae4RaCpMuKJc0kw1bGs56V0_F-NxiO9ctuw,7913 -pyasn1_modules/rfc7030.py,sha256=t-s2BDyX3Zk2sy_jMQl-P2I2NXFOn7huu0wFcM-2sqs,1441 -pyasn1_modules/rfc7191.py,sha256=uMsBzJ9167wxsiPYDQUnZQFVFNfgUxnCwRNeKnXxNGM,7062 -pyasn1_modules/rfc7229.py,sha256=GSiUz4QkYODfnIvLRXKiabyno9Gmd6CX0zWR7HoIpCk,743 -pyasn1_modules/rfc7292.py,sha256=wORjDGD_aqHoujB2wu6nNrEjYTw3VO_xDp-Qx0VWLbc,8478 -pyasn1_modules/rfc7296.py,sha256=eAZpZ2dgUhxbJrLLGtDff4UspauG7Tr5dj8WELYHnUM,885 -pyasn1_modules/rfc7508.py,sha256=ZmJFbQO934Fs8wxcpO0gg5fU0d8yEFlkkFD3KMUQbAE,2182 -pyasn1_modules/rfc7585.py,sha256=T0-sdzPJoop1jbB2RJ-wzUnf6t6CeD2eMMXpcz55JEg,1076 -pyasn1_modules/rfc7633.py,sha256=8P_fBWkoGk3rsk7SEAm6QZcPjoRGTRGQuasWMLOrLKY,841 -pyasn1_modules/rfc7773.py,sha256=6UGPWyVYuicKe6snZCnD1wuAu1MOVgzPoSALL2uvTrI,1315 -pyasn1_modules/rfc7894.py,sha256=HLaSBoOUB-_cSE5935TXAnuFBVpZBv6jBnLOPp_-LNk,2769 -pyasn1_modules/rfc7906.py,sha256=mDf1pWwVNlCcEQfswUhtQDStAnwS-5xbZtjMlfnWLdI,18921 -pyasn1_modules/rfc7914.py,sha256=JxWGnXV-V13xzOn7c7-_3vxDNpkPtdZIYU4KF2kFXR4,1493 -pyasn1_modules/rfc8017.py,sha256=pwPRSchvMtXuatcCLULHuvSL8kAPEqkC4aIJjd5vEAo,4178 -pyasn1_modules/rfc8018.py,sha256=8_49xA3vEOdlGUhasw2xTUv4TpHBvjRuoonMT_k1TTk,6166 -pyasn1_modules/rfc8103.py,sha256=pNYAFfKCNrg9ZmRKsNNwr2ooptEABF3gMaPbqCroRnQ,1017 -pyasn1_modules/rfc8209.py,sha256=9EQ077rjD9uoTZWIOGmeOaHLDDq0IRXh3Rt0eYB-Ysc,393 -pyasn1_modules/rfc8226.py,sha256=mudlVgrsJ6XeHnFmxBNW_NgcYcFsHUvK04_MTr3UkRM,4291 -pyasn1_modules/rfc8358.py,sha256=aiHaXQAaaP-q5c90x_uZHSpQRTB-yekwhe6V9-EtrFg,1136 -pyasn1_modules/rfc8360.py,sha256=T4sY6o2VLVPnZ9s4yJ8PzfVA8Y60ne-1KcVNtw5yt-s,1075 -pyasn1_modules/rfc8398.py,sha256=i3lwgf__9oJzOaaHJKWmDAx3d_deKNCCuvIDWqQWiJ4,1192 -pyasn1_modules/rfc8410.py,sha256=nteKyTKcIwVlgh1qUl-8kE63kKG-KgWtLrfF92TWyyQ,971 -pyasn1_modules/rfc8418.py,sha256=eTCPTOm6t-RyHd6PlowLogDzUO72lRddESYLiSiOpC0,1109 -pyasn1_modules/rfc8419.py,sha256=qcvBlXxqvsCvG_F6AKKjqBderqbWwBy8zjZOjAPdYU4,1704 -pyasn1_modules/rfc8479.py,sha256=rDKzrp-MmEF0t3E7lqKXhgwcggvx8NoWVbtJHGLxDYM,1142 -pyasn1_modules/rfc8494.py,sha256=GMht1RdAbjHLtSqHdJ2cLO8HXRz6SLIPE254T4oy0S4,2363 -pyasn1_modules/rfc8520.py,sha256=_o00lv2MYciOqo0UKjlZBQNY_MzzgQt1SV9VXCI0T9A,1496 -pyasn1_modules/rfc8619.py,sha256=qSYiBefLSFukLg6VIgR6dnhX-uBwJMItxqHjNXnBgM0,1136 -pyasn1_modules/rfc8649.py,sha256=oHCQK7g4vKs1B0IO9GgiidTyPOk4pz5bYkXSRmBOAHo,982 -pyasn1_modules/rfc8692.py,sha256=eLLbpx7CdcCSL3MgogE17WvPoV3VW-K1bo6dq4mRoeg,2098 -pyasn1_modules/rfc8696.py,sha256=iqGh7uhZ9wO2UU6Tk2tIBTw_uwOutogkksnDdg_-Qjg,3479 -pyasn1_modules/rfc8702.py,sha256=t7J9gvKZ3n02UCUjTHeJZWdcC8moBe2QmA_P6K8LVww,2739 -pyasn1_modules/rfc8708.py,sha256=iHNsHkk1G419l7KOQAZwq25yL9m6wyACmM0lS4f5XGw,978 -pyasn1_modules/rfc8769.py,sha256=vwRJCfdmar4FdQ-IhCHJRyyn4fVm_82WsVMuqBeDy5o,441 diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/WHEEL b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/WHEEL deleted file mode 100644 index 1eb3c49d..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (78.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/licenses/LICENSE.txt b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 598b8430..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2005-2020, Ilya Etingof -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/top_level.txt b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/top_level.txt deleted file mode 100644 index 9dad8496..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pyasn1_modules diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/zip-safe b/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/zip-safe deleted file mode 100644 index 8b137891..00000000 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules-0.4.2.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/.venv/lib/python3.10/site-packages/pyasn1_modules/__init__.py b/.venv/lib/python3.10/site-packages/pyasn1_modules/__init__.py index 5b90010d..ae0ff01d 100644 --- a/.venv/lib/python3.10/site-packages/pyasn1_modules/__init__.py +++ b/.venv/lib/python3.10/site-packages/pyasn1_modules/__init__.py @@ -1,2 +1,2 @@ # http://www.python.org/dev/peps/pep-0396/ -__version__ = '0.4.2' +__version__ = '0.4.1' diff --git a/Makefile b/Makefile index 1858efc3..213a563d 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: migrate init revision upgrade downgrade run +.PHONY: migrate init revision upgrade downgrade run seed-admin seed-client seed-agents seed-mcp-servers seed-tools seed-contacts seed-all # Comandos do Alembic init: @@ -34,4 +34,26 @@ alembic-reset: # Comando para forçar upgrade com CASCADE alembic-upgrade-cascade: - psql -U postgres -d a2a_saas -c "DROP TABLE IF EXISTS events CASCADE; DROP TABLE IF EXISTS sessions CASCADE; DROP TABLE IF EXISTS user_states CASCADE; DROP TABLE IF EXISTS app_states CASCADE;" && alembic upgrade head \ No newline at end of file + psql -U postgres -d a2a_saas -c "DROP TABLE IF EXISTS events CASCADE; DROP TABLE IF EXISTS sessions CASCADE; DROP TABLE IF EXISTS user_states CASCADE; DROP TABLE IF EXISTS app_states CASCADE;" && alembic upgrade head + +# Comandos para executar seeders +seed-admin: + python -m scripts.seeders.admin_seeder + +seed-client: + python -m scripts.seeders.client_seeder + +seed-agents: + python -m scripts.seeders.agent_seeder + +seed-mcp-servers: + python -m scripts.seeders.mcp_server_seeder + +seed-tools: + python -m scripts.seeders.tool_seeder + +seed-contacts: + python -m scripts.seeders.contact_seeder + +seed-all: + python -m scripts.run_seeders \ No newline at end of file diff --git a/README.md b/README.md index 2ca97d54..8438057d 100644 --- a/README.md +++ b/README.md @@ -1,296 +1,1115 @@ -# Evo AI - Plataforma de Agentes de IA +# Evo AI - AI Agents Platform -Evo AI é uma plataforma open-source para criação e gerenciamento de agentes de IA, permitindo a integração com diferentes modelos e serviços de IA. +Evo AI is an open-source platform for creating and managing AI agents, enabling integration with different AI models and services. -## 🚀 Visão Geral +## 🚀 Overview -O Evo AI é uma plataforma que permite: -- Criação e gerenciamento de agentes de IA -- Integração com diferentes modelos de linguagem -- Gerenciamento de clientes e contatos -- Configuração de servidores MCP -- Gerenciamento de ferramentas personalizadas -- Autenticação via API Key +The Evo AI platform allows: +- Creation and management of AI agents +- Integration with different language models +- Client and contact management +- MCP server configuration +- Custom tools management +- JWT authentication with email verification -## 🛠️ Tecnologias +## 🛠️ Technologies -- **FastAPI**: Framework web para construção da API -- **SQLAlchemy**: ORM para interação com o banco de dados -- **PostgreSQL**: Banco de dados principal -- **Alembic**: Sistema de migrações -- **Pydantic**: Validação e serialização de dados -- **Uvicorn**: Servidor ASGI -- **Redis**: Cache e gerenciamento de sessões +- **FastAPI**: Web framework for building the API +- **SQLAlchemy**: ORM for database interaction +- **PostgreSQL**: Main database +- **Alembic**: Migration system +- **Pydantic**: Data validation and serialization +- **Uvicorn**: ASGI server +- **Redis**: Cache and session management +- **JWT**: Secure token authentication +- **SendGrid**: Email service for verification -## 📁 Estrutura do Projeto +## 📁 Project Structure ``` src/ -├── api/ # Endpoints da API -├── core/ # Lógica central do negócio -├── models/ # Modelos de dados -├── schemas/ # Schemas Pydantic para validação -├── utils/ # Utilitários -├── config/ # Configurações -└── services/ # Serviços de negócio +├── api/ # API endpoints +├── core/ # Core business logic +├── models/ # Data models +├── schemas/ # Pydantic schemas for validation +├── utils/ # Utilities +├── config/ # Configurations +└── services/ # Business services ``` -## 📋 Requisitos +## 📋 Requirements - Python 3.8+ - PostgreSQL - Redis -- OpenAI API Key (ou outro provedor de IA) +- OpenAI API Key (or other AI provider) +- SendGrid Account (for email sending) -## 🔧 Instalação +## 🔧 Installation -1. Clone o repositório: +1. Clone the repository: ```bash -git clone https://github.com/seu-usuario/evo-ai.git +git clone https://github.com/your-username/evo-ai.git cd evo-ai ``` -2. Crie um ambiente virtual: +2. Create a virtual environment: ```bash python -m venv .venv source .venv/bin/activate # Linux/Mac -# ou +# or .venv\Scripts\activate # Windows ``` -3. Instale as dependências: +3. Install dependencies: ```bash pip install -r requirements.txt ``` -4. Configure as variáveis de ambiente: +4. Set up environment variables: ```bash cp .env.example .env -# Edite o arquivo .env com suas configurações +# Edit the .env file with your settings ``` -5. Execute as migrações: +5. Run migrations: ```bash make upgrade ``` -## 🔐 Autenticação +## 🔐 Authentication -A API utiliza autenticação via API Key. Para acessar os endpoints, você precisa: +The API uses JWT (JSON Web Token) authentication. To access the endpoints, you need to: -1. Incluir a API Key no header `X-API-Key` de todas as requisições -2. A API Key é gerada automaticamente quando o servidor é iniciado pela primeira vez -3. Você pode encontrar a API Key no arquivo `.env` ou nos logs do servidor +1. Register a user or log in to obtain a JWT token +2. Include the JWT token in the `Authorization` header of all requests in the format `Bearer ` +3. Tokens expire after a configured period (default: 30 minutes) -Exemplo de uso com curl: -```bash -curl -X GET "http://localhost:8000/api/clients/" \ - -H "X-API-Key: sua-api-key-aqui" +### Authentication Flow + +1. **User Registration**: +```http +POST /api/v1/auth/register ``` -## 🚀 Executando o Projeto +2. **Email Verification**: +An email will be sent containing a verification link. + +3. **Login**: +```http +POST /api/v1/auth/login +``` +Returns a JWT token to be used in requests. + +4. **Password Recovery (if needed)**: +```http +POST /api/v1/auth/forgot-password +POST /api/v1/auth/reset-password +``` + +### Example Usage with curl: +```bash +# Login +curl -X POST "http://localhost:8000/api/v1/auth/login" \ + -H "Content-Type: application/json" \ + -d '{"email": "your-email@example.com", "password": "your-password"}' + +# Use received token +curl -X GET "http://localhost:8000/api/v1/clients/" \ + -H "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9..." +``` + +### Access Control +- Regular users (associated with a client) only have access to their client's resources +- Admin users have access to all resources +- Certain operations (such as creating MCP servers) are restricted to administrators only + +## 🚀 Running the Project ```bash make run ``` -A API estará disponível em `http://localhost:8000` +The API will be available at `http://localhost:8000` -## 📚 Documentação da API +## 📚 API Documentation -### Clientes +### Authentication -#### Criar Cliente +#### Register User ```http -POST /clients/ +POST /api/v1/auth/register ``` -Cria um novo cliente. -#### Listar Clientes +**Request Body:** +```json +{ + "email": "user@example.com", + "password": "securePassword123", + "name": "Company Name" +} +``` + +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "email": "user@example.com", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "is_active": false, + "email_verified": false, + "is_admin": false, + "created_at": "2023-07-10T15:00:00.000Z" +} +``` + +Registers a new user and sends a verification email. The user will remain inactive until the email is verified. + +#### Login ```http -GET /clients/ +POST /api/v1/auth/login ``` -Lista todos os clientes com paginação. -#### Buscar Cliente +**Request Body:** +```json +{ + "email": "user@example.com", + "password": "securePassword123" +} +``` + +**Response (200 OK):** +```json +{ + "access_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...", + "token_type": "bearer" +} +``` + +Authenticates the user and returns a valid JWT token for use in subsequent requests. + +#### Verify Email ```http -GET /clients/{client_id} +GET /api/v1/auth/verify-email/{token} ``` -Busca um cliente específico. -#### Atualizar Cliente +**Response (200 OK):** +```json +{ + "message": "Email successfully verified. Your account is now active." +} +``` + +Verifies the user's email using the token sent by email. Activates the user's account. + +#### Resend Verification ```http -PUT /clients/{client_id} +POST /api/v1/auth/resend-verification ``` -Atualiza os dados de um cliente. -#### Remover Cliente +**Request Body:** +```json +{ + "email": "user@example.com" +} +``` + +**Response (200 OK):** +```json +{ + "message": "Verification email resent. Please check your inbox." +} +``` + +Resends the verification email for users with unverified email. + +#### Forgot Password ```http -DELETE /clients/{client_id} +POST /api/v1/auth/forgot-password ``` -Remove um cliente. -### Contatos +**Request Body:** +```json +{ + "email": "user@example.com" +} +``` -#### Criar Contato +**Response (200 OK):** +```json +{ + "message": "If the email exists in our database, a password reset link will be sent." +} +``` + +Sends an email with password recovery instructions if the email is registered. + +#### Reset Password ```http -POST /contacts/ +POST /api/v1/auth/reset-password ``` -Cria um novo contato. -#### Listar Contatos +**Request Body:** +```json +{ + "token": "password-reset-token-received-by-email", + "new_password": "newSecurePassword456" +} +``` + +**Response (200 OK):** +```json +{ + "message": "Password successfully reset." +} +``` + +Resets the user's password using the token received by email. + +### Clients + +#### Create Client ```http -GET /contacts/{client_id} +POST /api/v1/clients/ ``` -Lista contatos de um cliente. -#### Buscar Contato +**Request Body:** +```json +{ + "name": "Company Name" +} +``` + +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "Company Name", + "created_at": "2023-07-10T15:00:00.000Z" +} +``` + +Creates a new client. Requires administrator permissions. + +#### List Clients +```http +GET /api/v1/clients/ +``` + +**Query Parameters:** +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "Company Name", + "created_at": "2023-07-10T15:00:00.000Z" + } +] +``` + +Lists all clients with pagination. For administrator users, returns all clients. For regular users, returns only the client they are associated with. + +#### Get Client +```http +GET /api/v1/clients/{client_id} +``` + +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "Company Name", + "created_at": "2023-07-10T15:00:00.000Z" +} +``` + +Gets a specific client. The user must have permission to access this client. + +#### Update Client +```http +PUT /api/v1/clients/{client_id} +``` + +**Request Body:** +```json +{ + "name": "New Company Name" +} +``` + +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "New Company Name", + "created_at": "2023-07-10T15:00:00.000Z" +} +``` + +Updates client data. The user must have permission to access this client. + +#### Delete Client +```http +DELETE /api/v1/clients/{client_id} +``` + +**Response (204 No Content)** + +Deletes a client. Requires administrator permissions. + +### Contacts + +#### Create Contact +```http +POST /api/v1/contacts/ +``` + +**Request Body:** +```json +{ + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "ext_id": "optional-external-id", + "name": "Contact Name", + "meta": { + "phone": "+15551234567", + "category": "customer", + "notes": "Additional information" + } +} +``` + +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "ext_id": "optional-external-id", + "name": "Contact Name", + "meta": { + "phone": "+15551234567", + "category": "customer", + "notes": "Additional information" + } +} +``` + +Creates a new contact. The user must have permission to access the specified client. + +#### List Contacts +```http +GET /api/v1/contacts/{client_id} +``` + +**Query Parameters:** +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "ext_id": "optional-external-id", + "name": "Contact Name", + "meta": { + "phone": "+15551234567", + "category": "customer" + } + } +] +``` + +Lists contacts of a client. The user must have permission to access this client. + +#### Search Contact ```http GET /contact/{contact_id} ``` -Busca um contato específico. -#### Atualizar Contato +#### Update Contact ```http PUT /contact/{contact_id} ``` -Atualiza os dados de um contato. +Updates contact data. -#### Remover Contato +#### Remove Contact ```http DELETE /contact/{contact_id} ``` -Remove um contato. +Removes a contact. -### Agentes +### Agents -#### Criar Agente +#### Create Agent ```http -POST /agents/ +POST /api/v1/agents/ ``` -Cria um novo agente. -#### Listar Agentes +**Request Body:** +```json +{ + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "customer-service-agent", + "description": "Agent for customer service", + "type": "llm", + "model": "claude-3-opus-20240229", + "api_key": "your-api-key-here", + "instruction": "You are a customer service assistant for company X. Always be polite and try to solve customer problems efficiently.", + "config": { + "temperature": 0.7, + "max_tokens": 1024, + "tools": ["web_search", "knowledge_base"] + } +} +``` + +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "customer-service-agent", + "description": "Agent for customer service", + "type": "llm", + "model": "claude-3-opus-20240229", + "api_key": "your-api-key-here", + "instruction": "You are a customer service assistant for company X. Always be polite and try to solve customer problems efficiently.", + "config": { + "temperature": 0.7, + "max_tokens": 1024, + "tools": ["web_search", "knowledge_base"] + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" +} +``` + +Creates a new agent. The user must have permission to access the specified client. + +**Notes about agent types:** +- For `llm` type agents, the `model` and `api_key` fields are required +- For `sequential`, `parallel`, or `loop` type agents, the configuration must include a list of `sub_agents` + +#### List Agents ```http -GET /agents/{client_id} +GET /api/v1/agents/{client_id} ``` -Lista agentes de um cliente. -#### Buscar Agente +**Query Parameters:** +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "customer-service-agent", + "description": "Agent for customer service", + "type": "llm", + "model": "claude-3-opus-20240229", + "instruction": "You are a customer service assistant...", + "config": { + "temperature": 0.7, + "max_tokens": 1024, + "tools": ["web_search", "knowledge_base"] + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" + } +] +``` + +Lists all agents of a client. The user must have permission to access the specified client. + +#### Get Agent ```http -GET /agent/{agent_id} +GET /api/v1/agents/{agent_id} ``` -Busca um agente específico. -#### Atualizar Agente +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "customer-service-agent", + "description": "Agent for customer service", + "type": "llm", + "model": "claude-3-opus-20240229", + "instruction": "You are a customer service assistant...", + "config": { + "temperature": 0.7, + "max_tokens": 1024, + "tools": ["web_search", "knowledge_base"] + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" +} +``` + +Gets a specific agent. The user must have permission to access this agent. + +#### Update Agent ```http -PUT /agent/{agent_id} +PUT /api/v1/agents/{agent_id} ``` -Atualiza os dados de um agente. -#### Remover Agente +**Request Body:** +```json +{ + "name": "new-customer-service-agent", + "description": "Updated agent for customer service", + "type": "llm", + "model": "claude-3-sonnet-20240229", + "instruction": "You are a customer service assistant for company X...", + "config": { + "temperature": 0.5, + "max_tokens": 2048, + "tools": ["web_search", "knowledge_base", "calculator"] + } +} +``` + +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "client_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "new-customer-service-agent", + "description": "Updated agent for customer service", + "type": "llm", + "model": "claude-3-sonnet-20240229", + "instruction": "You are a customer service assistant for company X...", + "config": { + "temperature": 0.5, + "max_tokens": 2048, + "tools": ["web_search", "knowledge_base", "calculator"] + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:05:00.000Z" +} +``` + +Updates agent data. The user must have permission to access this agent. + +#### Delete Agent ```http -DELETE /agent/{agent_id} +DELETE /api/v1/agents/{agent_id} ``` -Remove um agente. -### Servidores MCP +**Response (204 No Content)** -#### Criar Servidor MCP +Deletes an agent. The user must have permission to access this agent. + +### MCP Servers + +#### Create MCP Server ```http -POST /mcp-servers/ +POST /api/v1/mcp-servers/ ``` -Cria um novo servidor MCP. -#### Listar Servidores MCP +**Request Body:** +```json +{ + "name": "openai-server", + "description": "MCP server for OpenAI API access", + "config_json": { + "base_url": "https://api.openai.com/v1", + "timeout": 30 + }, + "environments": { + "OPENAI_API_KEY": "${OPENAI_API_KEY}" + }, + "tools": ["web_search", "knowledge_base"], + "type": "official" +} +``` + +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "openai-server", + "description": "MCP server for OpenAI API access", + "config_json": { + "base_url": "https://api.openai.com/v1", + "timeout": 30 + }, + "environments": { + "OPENAI_API_KEY": "${OPENAI_API_KEY}" + }, + "tools": ["web_search", "knowledge_base"], + "type": "official", + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" +} +``` + +Creates a new MCP server. Requires administrator permissions. + +#### List MCP Servers ```http -GET /mcp-servers/ +GET /api/v1/mcp-servers/ ``` -Lista todos os servidores MCP. -#### Buscar Servidor MCP +**Query Parameters:** +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "openai-server", + "description": "MCP server for OpenAI API access", + "config_json": { + "base_url": "https://api.openai.com/v1", + "timeout": 30 + }, + "environments": { + "OPENAI_API_KEY": "${OPENAI_API_KEY}" + }, + "tools": ["web_search", "knowledge_base"], + "type": "official", + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" + } +] +``` + +Lists all available MCP servers. + +#### Get MCP Server ```http -GET /mcp-servers/{server_id} +GET /api/v1/mcp-servers/{server_id} ``` -Busca um servidor MCP específico. -#### Atualizar Servidor MCP +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "openai-server", + "description": "MCP server for OpenAI API access", + "config_json": { + "base_url": "https://api.openai.com/v1", + "timeout": 30 + }, + "environments": { + "OPENAI_API_KEY": "${OPENAI_API_KEY}" + }, + "tools": ["web_search", "knowledge_base"], + "type": "official", + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" +} +``` + +Gets a specific MCP server. + +#### Update MCP Server ```http -PUT /mcp-servers/{server_id} +PUT /api/v1/mcp-servers/{server_id} ``` -Atualiza os dados de um servidor MCP. -#### Remover Servidor MCP +**Request Body:** +```json +{ + "name": "updated-openai-server", + "description": "Updated MCP server for OpenAI API access", + "config_json": { + "base_url": "https://api.openai.com/v1", + "timeout": 60 + }, + "environments": { + "OPENAI_API_KEY": "${OPENAI_API_KEY}", + "OPENAI_ORG_ID": "${OPENAI_ORG_ID}" + }, + "tools": ["web_search", "knowledge_base", "image_generation"], + "type": "official" +} +``` + +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "updated-openai-server", + "description": "Updated MCP server for OpenAI API access", + "config_json": { + "base_url": "https://api.openai.com/v1", + "timeout": 60 + }, + "environments": { + "OPENAI_API_KEY": "${OPENAI_API_KEY}", + "OPENAI_ORG_ID": "${OPENAI_ORG_ID}" + }, + "tools": ["web_search", "knowledge_base", "image_generation"], + "type": "official", + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:05:00.000Z" +} +``` + +Updates an MCP server. Requires administrator permissions. + +#### Delete MCP Server ```http -DELETE /mcp-servers/{server_id} +DELETE /api/v1/mcp-servers/{server_id} ``` -Remove um servidor MCP. -### Ferramentas +**Response (204 No Content)** -#### Criar Ferramenta +Deletes an MCP server. Requires administrator permissions. + +### Tools + +#### Create Tool ```http -POST /tools/ +POST /api/v1/tools/ ``` -Cria uma nova ferramenta. -#### Listar Ferramentas -```http -GET /tools/ +**Request Body:** +```json +{ + "name": "web_search", + "description": "Real-time web search", + "config_json": { + "api_url": "https://api.search.com", + "max_results": 5 + }, + "environments": { + "SEARCH_API_KEY": "${SEARCH_API_KEY}" + } +} ``` -Lista todas as ferramentas. -#### Buscar Ferramenta -```http -GET /tools/{tool_id} +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "web_search", + "description": "Real-time web search", + "config_json": { + "api_url": "https://api.search.com", + "max_results": 5 + }, + "environments": { + "SEARCH_API_KEY": "${SEARCH_API_KEY}" + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" +} ``` -Busca uma ferramenta específica. -#### Atualizar Ferramenta -```http -PUT /tools/{tool_id} -``` -Atualiza os dados de uma ferramenta. +Creates a new tool. Requires administrator permissions. -#### Remover Ferramenta +#### List Tools ```http -DELETE /tools/{tool_id} +GET /api/v1/tools/ ``` -Remove uma ferramenta. + +**Query Parameters:** +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "web_search", + "description": "Real-time web search", + "config_json": { + "api_url": "https://api.search.com", + "max_results": 5 + }, + "environments": { + "SEARCH_API_KEY": "${SEARCH_API_KEY}" + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" + } +] +``` + +Lists all available tools. + +#### Get Tool +```http +GET /api/v1/tools/{tool_id} +``` + +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "web_search", + "description": "Real-time web search", + "config_json": { + "api_url": "https://api.search.com", + "max_results": 5 + }, + "environments": { + "SEARCH_API_KEY": "${SEARCH_API_KEY}" + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:00:00.000Z" +} +``` + +Gets a specific tool. + +#### Update Tool +```http +PUT /api/v1/tools/{tool_id} +``` + +**Request Body:** +```json +{ + "name": "web_search_pro", + "description": "Real-time web search with advanced filters", + "config_json": { + "api_url": "https://api.search.com/v2", + "max_results": 10, + "filters": { + "safe_search": true, + "time_range": "week" + } + }, + "environments": { + "SEARCH_API_KEY": "${SEARCH_API_KEY}" + } +} +``` + +**Response (200 OK):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "name": "web_search_pro", + "description": "Real-time web search with advanced filters", + "config_json": { + "api_url": "https://api.search.com/v2", + "max_results": 10, + "filters": { + "safe_search": true, + "time_range": "week" + } + }, + "environments": { + "SEARCH_API_KEY": "${SEARCH_API_KEY}" + }, + "created_at": "2023-07-10T15:00:00.000Z", + "updated_at": "2023-07-10T15:05:00.000Z" +} +``` + +Updates a tool. Requires administrator permissions. + +#### Delete Tool +```http +DELETE /api/v1/tools/{tool_id} +``` + +**Response (204 No Content)** + +Deletes a tool. Requires administrator permissions. ### Chat -#### Enviar Mensagem +#### Send Message ```http -POST /chat +POST /api/v1/chat ``` -Envia uma mensagem para um agente. -## 📝 Documentação Interativa +**Request Body:** +```json +{ + "agent_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "contact_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "message": "Hello, I need help with my order." +} +``` -A documentação interativa da API está disponível em: +**Response (200 OK):** +```json +{ + "response": "Hello! Of course, I'm here to help with your order. Could you please provide your order number or more details about the issue you're experiencing?", + "status": "success", + "timestamp": "2023-07-10T15:00:00.000Z" +} +``` + +Sends a message to an agent and returns the generated response. The user must have permission to access the specified agent and contact. + +#### Conversation History +```http +GET /api/v1/chat/history/{contact_id} +``` + +**Query Parameters:** +- `agent_id` (optional): Filter by a specific agent +- `start_date` (optional): Start date in ISO 8601 format +- `end_date` (optional): End date in ISO 8601 format +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "agent_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "contact_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "message": "Hello, I need help with my order.", + "response": "Hello! Of course, I'm here to help with your order. Could you please provide your order number or more details about the issue you're experiencing?", + "timestamp": "2023-07-10T15:00:00.000Z" + } +] +``` + +Retrieves the conversation history of a specific contact. The user must have permission to access the specified contact. + +### Administration + +#### Audit Logs +```http +GET /api/v1/admin/audit-logs +``` + +**Query Parameters:** +- `user_id` (optional): Filter by user +- `action` (optional): Filter by action type +- `start_date` (optional): Start date in ISO 8601 format +- `end_date` (optional): End date in ISO 8601 format +- `resource_type` (optional): Type of affected resource +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "user_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "action": "CREATE", + "resource_type": "AGENT", + "resource_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "timestamp": "2023-07-10T15:00:00.000Z", + "ip_address": "192.168.1.1", + "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", + "details": { + "before": null, + "after": { + "name": "customer-service-agent", + "type": "llm" + } + } + } +] +``` + +Retrieves audit logs. Requires administrator permissions. + +#### List Administrators +```http +GET /api/v1/admin/users +``` + +**Query Parameters:** +- `skip` (optional): Number of records to skip (default: 0) +- `limit` (optional): Maximum number of records to return (default: 100) + +**Response (200 OK):** +```json +[ + { + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "email": "admin@example.com", + "name": "Administrator", + "is_active": true, + "email_verified": true, + "is_admin": true, + "created_at": "2023-07-10T15:00:00.000Z" + } +] +``` + +Lists all administrator users. Requires administrator permissions. + +#### Create Administrator +```http +POST /api/v1/admin/users +``` + +**Request Body:** +```json +{ + "email": "new_admin@example.com", + "password": "securePassword123", + "name": "New Administrator" +} +``` + +**Response (201 Created):** +```json +{ + "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "email": "new_admin@example.com", + "name": "New Administrator", + "is_active": true, + "email_verified": true, + "is_admin": true, + "created_at": "2023-07-10T15:00:00.000Z" +} +``` + +Creates a new administrator user. Requires administrator permissions. + +#### Deactivate Administrator +```http +DELETE /api/v1/admin/users/{user_id} +``` + +**Response (204 No Content)** + +Deactivates an administrator user. Requires administrator permissions. The user is not removed from the database, just marked as inactive. + +## 📝 Interactive Documentation + +The interactive API documentation is available at: - Swagger UI: `http://localhost:8000/docs` - ReDoc: `http://localhost:8000/redoc` -## 📊 Logs +## 📊 Logs and Audit -Os logs são armazenados no diretório `logs/` com o seguinte formato: -- `{nome_do_logger}_{data}.log` +- Logs are stored in the `logs/` directory with the following format: + - `{logger_name}_{date}.log` +- The system maintains audit logs for important administrative actions +- Each action is recorded with information such as user, IP, date/time, and details -## 🤝 Contribuindo +## 🤝 Contributing -1. Faça um fork do projeto -2. Crie uma branch para sua feature (`git checkout -b feature/AmazingFeature`) -3. Commit suas mudanças (`git commit -m 'Add some AmazingFeature'`) -4. Push para a branch (`git push origin feature/AmazingFeature`) -5. Abra um Pull Request +1. Fork the project +2. Create a feature branch (`git checkout -b feature/AmazingFeature`) +3. Commit your changes (`git commit -m 'Add some AmazingFeature'`) +4. Push to the branch (`git push origin feature/AmazingFeature`) +5. Open a Pull Request -## 📄 Licença +## 📄 License -Este projeto está licenciado sob a licença MIT - veja o arquivo [LICENSE](LICENSE) para detalhes. +This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. -## 🙏 Agradecimentos +## 🙏 Acknowledgments - [FastAPI](https://fastapi.tiangolo.com/) - [SQLAlchemy](https://www.sqlalchemy.org/) -- [Google ADK](https://github.com/google/adk) \ No newline at end of file +- [Google ADK](https://github.com/google/adk) \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py index 5ffff663..58e850ce 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -31,6 +31,17 @@ target_metadata = [Base.metadata] # my_important_option = config.get_main_option("my_important_option") # ... etc. +# Lista de tabelas a serem ignoradas na geração automática de migrações +exclude_tables = ['sessions', 'events', 'app_states', 'user_states'] + +def include_object(object, name, type_, reflected, compare_to): + """ + Função de filtro para excluir determinadas tabelas da geração automática de migrações + """ + if type_ == "table" and name in exclude_tables: + return False + return True + def run_migrations_offline() -> None: """Run migrations in 'offline' mode. @@ -50,6 +61,7 @@ def run_migrations_offline() -> None: target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, + include_object=include_object, ) with context.begin_transaction(): @@ -87,6 +99,7 @@ def do_run_migrations(connection): connection=connection, target_metadata=target_metadata, compare_type=True, + include_object=include_object, ) with context.begin_transaction(): context.run_migrations() diff --git a/migrations/versions/98780d4fb293_add_audit_table.py b/migrations/versions/98780d4fb293_add_audit_table.py new file mode 100644 index 00000000..dbd60b9a --- /dev/null +++ b/migrations/versions/98780d4fb293_add_audit_table.py @@ -0,0 +1,44 @@ +"""add_audit_table + +Revision ID: 98780d4fb293 +Revises: f11fb4060739 +Create Date: 2025-04-28 15:17:10.491183 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '98780d4fb293' +down_revision: Union[str, None] = 'f11fb4060739' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('audit_logs', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('user_id', sa.UUID(), nullable=True), + sa.Column('action', sa.String(), nullable=False), + sa.Column('resource_type', sa.String(), nullable=False), + sa.Column('resource_id', sa.String(), nullable=True), + sa.Column('details', sa.JSON(), nullable=True), + sa.Column('ip_address', sa.String(), nullable=True), + sa.Column('user_agent', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('audit_logs') + # ### end Alembic commands ### diff --git a/migrations/versions/f11fb4060739_add_user_table.py b/migrations/versions/f11fb4060739_add_user_table.py new file mode 100644 index 00000000..7070612e --- /dev/null +++ b/migrations/versions/f11fb4060739_add_user_table.py @@ -0,0 +1,50 @@ +"""add_user_table + +Revision ID: f11fb4060739 +Revises: 2d612b95d0ea +Create Date: 2025-04-28 15:01:34.432588 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'f11fb4060739' +down_revision: Union[str, None] = '2d612b95d0ea' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('users', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.Column('password_hash', sa.String(), nullable=False), + sa.Column('client_id', sa.UUID(), nullable=True), + sa.Column('is_active', sa.Boolean(), nullable=True), + sa.Column('is_admin', sa.Boolean(), nullable=True), + sa.Column('email_verified', sa.Boolean(), nullable=True), + sa.Column('verification_token', sa.String(), nullable=True), + sa.Column('verification_token_expiry', sa.DateTime(timezone=True), nullable=True), + sa.Column('password_reset_token', sa.String(), nullable=True), + sa.Column('password_reset_expiry', sa.DateTime(timezone=True), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_users_email'), table_name='users') + op.drop_table('users') + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index 20ea4d23..8bfa5cf4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,4 +10,9 @@ google-adk litellm python-multipart alembic -asyncpg \ No newline at end of file +asyncpg +# Novas dependências para autenticação +python-jose[cryptography] +passlib[bcrypt] +sendgrid +pydantic[email] \ No newline at end of file diff --git a/scripts/run_seeders.py b/scripts/run_seeders.py new file mode 100644 index 00000000..7ce2b0b9 --- /dev/null +++ b/scripts/run_seeders.py @@ -0,0 +1,113 @@ +""" +Script principal para executar todos os seeders em sequência. +Verifica as dependências entre os seeders e executa na ordem correta. +""" + +import os +import sys +import logging +import argparse +from dotenv import load_dotenv + +# Configurar logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +# Importar seeders +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +from scripts.seeders.admin_seeder import create_admin_user +from scripts.seeders.client_seeder import create_demo_client_and_user +from scripts.seeders.agent_seeder import create_demo_agents +from scripts.seeders.mcp_server_seeder import create_mcp_servers +from scripts.seeders.tool_seeder import create_tools +from scripts.seeders.contact_seeder import create_demo_contacts + +def setup_environment(): + """Configura o ambiente para os seeders""" + load_dotenv() + + # Verificar se as variáveis de ambiente essenciais estão definidas + required_vars = ["POSTGRES_CONNECTION_STRING"] + missing_vars = [var for var in required_vars if not os.getenv(var)] + + if missing_vars: + logger.error(f"Variáveis de ambiente necessárias não definidas: {', '.join(missing_vars)}") + return False + + return True + +def run_seeders(seeders): + """ + Executa os seeders especificados + + Args: + seeders (list): Lista de seeders para executar + + Returns: + bool: True se todos os seeders foram executados com sucesso, False caso contrário + """ + all_seeders = { + "admin": create_admin_user, + "client": create_demo_client_and_user, + "agents": create_demo_agents, + "mcp_servers": create_mcp_servers, + "tools": create_tools, + "contacts": create_demo_contacts + } + + # Define a ordem correta de execução (dependências) + seeder_order = ["admin", "client", "mcp_servers", "tools", "agents", "contacts"] + + # Se nenhum seeder for especificado, executar todos + if not seeders: + seeders = seeder_order + else: + # Verificar se todos os seeders especificados existem + invalid_seeders = [s for s in seeders if s not in all_seeders] + if invalid_seeders: + logger.error(f"Seeders inválidos: {', '.join(invalid_seeders)}") + logger.info(f"Seeders disponíveis: {', '.join(all_seeders.keys())}") + return False + + # Garantir que seeders sejam executados na ordem correta + seeders = [s for s in seeder_order if s in seeders] + + # Executar seeders + success = True + for seeder_name in seeders: + logger.info(f"Executando seeder: {seeder_name}") + + try: + seeder_func = all_seeders[seeder_name] + if not seeder_func(): + logger.error(f"Falha ao executar seeder: {seeder_name}") + success = False + except Exception as e: + logger.error(f"Erro ao executar seeder {seeder_name}: {str(e)}") + success = False + + return success + +def main(): + """Função principal""" + parser = argparse.ArgumentParser(description='Executa seeders para popular o banco de dados') + parser.add_argument('--seeders', nargs='+', help='Seeders para executar (admin, client, agents, mcp_servers, tools, contacts)') + args = parser.parse_args() + + # Configurar ambiente + if not setup_environment(): + sys.exit(1) + + # Executar seeders + success = run_seeders(args.seeders) + + # Saída + if success: + logger.info("Todos os seeders foram executados com sucesso") + sys.exit(0) + else: + logger.error("Houve erros ao executar os seeders") + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/seeders/admin_seeder.py b/scripts/seeders/admin_seeder.py new file mode 100644 index 00000000..85ff023c --- /dev/null +++ b/scripts/seeders/admin_seeder.py @@ -0,0 +1,79 @@ +""" +Script para criar um usuário administrador inicial: +- Email: admin@evoai.com +- Senha: definida nas variáveis de ambiente ADMIN_INITIAL_PASSWORD +- is_admin: True +- is_active: True +- email_verified: True +""" + +import os +import sys +import logging +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from dotenv import load_dotenv +from src.models.models import User +from src.utils.security import get_password_hash + +# Configurar logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +def create_admin_user(): + """Cria um usuário administrador inicial no sistema""" + try: + # Carregar variáveis de ambiente + load_dotenv() + + # Obter configurações do banco de dados + db_url = os.getenv("POSTGRES_CONNECTION_STRING") + if not db_url: + logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") + return False + + # Obter senha do administrador + admin_password = os.getenv("ADMIN_INITIAL_PASSWORD") + if not admin_password: + logger.error("Variável de ambiente ADMIN_INITIAL_PASSWORD não definida") + return False + + # Configuração do email do admin + admin_email = os.getenv("ADMIN_EMAIL", "admin@evoai.com") + + # Conectar ao banco de dados + engine = create_engine(db_url) + Session = sessionmaker(bind=engine) + session = Session() + + # Verificar se o administrador já existe + existing_admin = session.query(User).filter(User.email == admin_email).first() + if existing_admin: + logger.info(f"Administrador com email {admin_email} já existe") + return True + + # Criar administrador + admin_user = User( + email=admin_email, + password_hash=get_password_hash(admin_password), + is_admin=True, + is_active=True, + email_verified=True + ) + + # Adicionar e comitar + session.add(admin_user) + session.commit() + + logger.info(f"Administrador criado com sucesso: {admin_email}") + return True + + except Exception as e: + logger.error(f"Erro ao criar administrador: {str(e)}") + return False + finally: + session.close() + +if __name__ == "__main__": + success = create_admin_user() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/scripts/seeders/agent_seeder.py b/scripts/seeders/agent_seeder.py new file mode 100644 index 00000000..a4eb3591 --- /dev/null +++ b/scripts/seeders/agent_seeder.py @@ -0,0 +1,158 @@ +""" +Script para criar agentes de exemplo para o cliente demo: +- Agente Atendimento: configurado para responder perguntas gerais +- Agente Vendas: configurado para responder sobre produtos +- Agente FAQ: configurado para responder perguntas frequentes +Cada agente com instruções e configurações pré-definidas +""" + +import os +import sys +import logging +import uuid +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.exc import SQLAlchemyError +from dotenv import load_dotenv +from src.models.models import Agent, Client, User + +# Configurar logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +def create_demo_agents(): + """Cria agentes de exemplo para o cliente demo""" + try: + # Carregar variáveis de ambiente + load_dotenv() + + # Obter configurações do banco de dados + db_url = os.getenv("POSTGRES_CONNECTION_STRING") + if not db_url: + logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") + return False + + # Conectar ao banco de dados + engine = create_engine(db_url) + Session = sessionmaker(bind=engine) + session = Session() + + try: + # Obter o cliente demo pelo email do usuário + demo_email = os.getenv("DEMO_EMAIL", "demo@exemplo.com") + demo_user = session.query(User).filter(User.email == demo_email).first() + + if not demo_user or not demo_user.client_id: + logger.error(f"Usuário demo não encontrado ou não associado a um cliente: {demo_email}") + return False + + client_id = demo_user.client_id + + # Verificar se já existem agentes para este cliente + existing_agents = session.query(Agent).filter(Agent.client_id == client_id).all() + if existing_agents: + logger.info(f"Já existem {len(existing_agents)} agentes para o cliente {client_id}") + return True + + # Definições dos agentes de exemplo + agents = [ + { + "name": "Atendimento_Geral", + "description": "Agente para atendimento geral e dúvidas básicas", + "type": "llm", + "model": "gpt-3.5-turbo", + "api_key": "${OPENAI_API_KEY}", # Será substituído pela variável de ambiente + "instruction": """ + Você é um assistente de atendimento ao cliente da empresa. + Seja cordial, objetivo e eficiente. Responda às dúvidas dos clientes + de forma clara e sucinta. Se não souber a resposta, informe que irá + consultar um especialista e retornará em breve. + """, + "config": { + "temperature": 0.7, + "max_tokens": 500, + "tools": [] + } + }, + { + "name": "Vendas_Produtos", + "description": "Agente especializado em vendas e informações sobre produtos", + "type": "llm", + "model": "claude-3-sonnet-20240229", + "api_key": "${ANTHROPIC_API_KEY}", # Será substituído pela variável de ambiente + "instruction": """ + Você é um especialista em vendas da empresa. + Seu objetivo é fornecer informações detalhadas sobre produtos, + comparar diferentes opções, destacar benefícios e vantagens competitivas. + Use uma linguagem persuasiva mas honesta, e sempre busque entender + as necessidades do cliente antes de recomendar um produto. + """, + "config": { + "temperature": 0.8, + "max_tokens": 800, + "tools": ["web_search"] + } + }, + { + "name": "FAQ_Bot", + "description": "Agente para responder perguntas frequentes", + "type": "llm", + "model": "gemini-pro", + "api_key": "${GOOGLE_API_KEY}", # Será substituído pela variável de ambiente + "instruction": """ + Você é um assistente especializado em responder perguntas frequentes. + Suas respostas devem ser diretas, objetivas e baseadas nas informações + da empresa. Utilize uma linguagem simples e acessível. Se a pergunta + não estiver relacionada às FAQs disponíveis, direcione o cliente para + o canal de atendimento adequado. + """, + "config": { + "temperature": 0.5, + "max_tokens": 400, + "tools": [] + } + } + ] + + # Criar os agentes + for agent_data in agents: + # Substituir placeholders de API Keys por variáveis de ambiente quando disponíveis + if "${OPENAI_API_KEY}" in agent_data["api_key"]: + agent_data["api_key"] = os.getenv("OPENAI_API_KEY", "") + elif "${ANTHROPIC_API_KEY}" in agent_data["api_key"]: + agent_data["api_key"] = os.getenv("ANTHROPIC_API_KEY", "") + elif "${GOOGLE_API_KEY}" in agent_data["api_key"]: + agent_data["api_key"] = os.getenv("GOOGLE_API_KEY", "") + + agent = Agent( + client_id=client_id, + name=agent_data["name"], + description=agent_data["description"], + type=agent_data["type"], + model=agent_data["model"], + api_key=agent_data["api_key"], + instruction=agent_data["instruction"].strip(), + config=agent_data["config"] + ) + + session.add(agent) + logger.info(f"Agente '{agent_data['name']}' criado para o cliente {client_id}") + + session.commit() + logger.info(f"Todos os agentes de exemplo foram criados com sucesso para o cliente {client_id}") + return True + + except SQLAlchemyError as e: + session.rollback() + logger.error(f"Erro de banco de dados ao criar agentes de exemplo: {str(e)}") + return False + + except Exception as e: + logger.error(f"Erro ao criar agentes de exemplo: {str(e)}") + return False + finally: + session.close() + +if __name__ == "__main__": + success = create_demo_agents() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/scripts/seeders/client_seeder.py b/scripts/seeders/client_seeder.py new file mode 100644 index 00000000..0419180a --- /dev/null +++ b/scripts/seeders/client_seeder.py @@ -0,0 +1,93 @@ +""" +Script para criar um cliente de exemplo: +- Nome: Cliente Demo +- Com usuário associado: + - Email: demo@exemplo.com + - Senha: demo123 (ou definida em variável de ambiente) + - is_admin: False + - is_active: True + - email_verified: True +""" + +import os +import sys +import logging +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.exc import SQLAlchemyError +from dotenv import load_dotenv +from src.models.models import User, Client +from src.utils.security import get_password_hash + +# Configurar logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +def create_demo_client_and_user(): + """Cria um cliente e usuário de demonstração no sistema""" + try: + # Carregar variáveis de ambiente + load_dotenv() + + # Obter configurações do banco de dados + db_url = os.getenv("POSTGRES_CONNECTION_STRING") + if not db_url: + logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") + return False + + # Obter senha do usuário demo (ou usar padrão) + demo_password = os.getenv("DEMO_PASSWORD", "demo123") + + # Configurações do cliente e usuário demo + demo_client_name = os.getenv("DEMO_CLIENT_NAME", "Cliente Demo") + demo_email = os.getenv("DEMO_EMAIL", "demo@exemplo.com") + + # Conectar ao banco de dados + engine = create_engine(db_url) + Session = sessionmaker(bind=engine) + session = Session() + + try: + # Verificar se o usuário já existe + existing_user = session.query(User).filter(User.email == demo_email).first() + if existing_user: + logger.info(f"Usuário demo com email {demo_email} já existe") + return True + + # Criar cliente demo + demo_client = Client(name=demo_client_name) + session.add(demo_client) + session.flush() # Obter o ID do cliente + + # Criar usuário demo associado ao cliente + demo_user = User( + email=demo_email, + password_hash=get_password_hash(demo_password), + client_id=demo_client.id, + is_admin=False, + is_active=True, + email_verified=True + ) + + # Adicionar e comitar + session.add(demo_user) + session.commit() + + logger.info(f"Cliente demo '{demo_client_name}' criado com sucesso") + logger.info(f"Usuário demo criado com sucesso: {demo_email}") + return True + + except SQLAlchemyError as e: + session.rollback() + logger.error(f"Erro de banco de dados ao criar cliente/usuário demo: {str(e)}") + return False + + except Exception as e: + logger.error(f"Erro ao criar cliente/usuário demo: {str(e)}") + return False + finally: + session.close() + +if __name__ == "__main__": + success = create_demo_client_and_user() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/scripts/seeders/contact_seeder.py b/scripts/seeders/contact_seeder.py new file mode 100644 index 00000000..4f034615 --- /dev/null +++ b/scripts/seeders/contact_seeder.py @@ -0,0 +1,184 @@ +""" +Script para criar contatos de exemplo para o cliente demo: +- Contatos com histórico de conversas +- Diferentes perfis de cliente +- Dados fictícios para demonstração +""" + +import os +import sys +import logging +import json +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.exc import SQLAlchemyError +from dotenv import load_dotenv +from src.models.models import Contact, User, Client + +# Configurar logging +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + + +def create_demo_contacts(): + """Cria contatos de exemplo para o cliente demo""" + try: + # Carregar variáveis de ambiente + load_dotenv() + + # Obter configurações do banco de dados + db_url = os.getenv("POSTGRES_CONNECTION_STRING") + if not db_url: + logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") + return False + + # Conectar ao banco de dados + engine = create_engine(db_url) + Session = sessionmaker(bind=engine) + session = Session() + + try: + # Obter o cliente demo pelo email do usuário + demo_email = os.getenv("DEMO_EMAIL", "demo@exemplo.com") + demo_user = session.query(User).filter(User.email == demo_email).first() + + if not demo_user or not demo_user.client_id: + logger.error( + f"Usuário demo não encontrado ou não associado a um cliente: {demo_email}" + ) + return False + + client_id = demo_user.client_id + + # Verificar se já existem contatos para este cliente + existing_contacts = ( + session.query(Contact).filter(Contact.client_id == client_id).all() + ) + if existing_contacts: + logger.info( + f"Já existem {len(existing_contacts)} contatos para o cliente {client_id}" + ) + return True + + # Definições dos contatos de exemplo + contacts = [ + { + "name": "Maria Silva", + "ext_id": "5511999998888", + "meta": { + "source": "whatsapp", + "tags": ["cliente_vip", "suporte_premium"], + "location": "São Paulo, SP", + "last_contact": "2023-08-15T14:30:00Z", + "account_details": { + "customer_since": "2020-03-10", + "plan": "Enterprise", + "payment_status": "active", + }, + }, + }, + { + "name": "João Santos", + "ext_id": "5511988887777", + "meta": { + "source": "whatsapp", + "tags": ["prospecção", "demo_solicitada"], + "location": "Rio de Janeiro, RJ", + "last_contact": "2023-09-20T10:15:00Z", + "interests": ["automação", "marketing", "chatbots"], + }, + }, + { + "name": "Ana Oliveira", + "ext_id": "5511977776666", + "meta": { + "source": "telegram", + "tags": ["suporte_técnico", "problema_resolvido"], + "location": "Belo Horizonte, MG", + "last_contact": "2023-09-25T16:45:00Z", + "support_cases": [ + { + "id": "SUP-2023-1234", + "status": "closed", + "priority": "high", + }, + { + "id": "SUP-2023-1567", + "status": "open", + "priority": "medium", + }, + ], + }, + }, + { + "name": "Carlos Pereira", + "ext_id": "5511966665555", + "meta": { + "source": "whatsapp", + "tags": ["cancelamento", "retenção"], + "location": "Porto Alegre, RS", + "last_contact": "2023-09-10T09:30:00Z", + "account_details": { + "customer_since": "2019-05-22", + "plan": "Professional", + "payment_status": "overdue", + "invoice_pending": True, + }, + }, + }, + { + "name": "Fernanda Lima", + "ext_id": "5511955554444", + "meta": { + "source": "telegram", + "tags": ["parceiro", "integrador"], + "location": "Curitiba, PR", + "last_contact": "2023-09-18T14:00:00Z", + "partner_details": { + "company": "TechSolutions Ltda", + "partner_level": "Gold", + "certified": True, + }, + }, + }, + ] + + # Criar os contatos + for contact_data in contacts: + contact = Contact( + client_id=client_id, + name=contact_data["name"], + ext_id=contact_data["ext_id"], + meta=contact_data["meta"], + ) + + session.add(contact) + logger.info( + f"Contato '{contact_data['name']}' criado para o cliente {client_id}" + ) + + session.commit() + logger.info( + f"Todos os contatos de exemplo foram criados com sucesso para o cliente {client_id}" + ) + return True + + except SQLAlchemyError as e: + session.rollback() + logger.error( + f"Erro de banco de dados ao criar contatos de exemplo: {str(e)}" + ) + return False + + except Exception as e: + logger.error(f"Erro ao criar contatos de exemplo: {str(e)}") + return False + finally: + session.close() + + +if __name__ == "__main__": + success = create_demo_contacts() + sys.exit(0 if success else 1) diff --git a/scripts/seeders/mcp_server_seeder.py b/scripts/seeders/mcp_server_seeder.py new file mode 100644 index 00000000..77a9a0fc --- /dev/null +++ b/scripts/seeders/mcp_server_seeder.py @@ -0,0 +1,150 @@ +""" +Script para criar servidores MCP padrão: +- Servidor Anthropic Claude +- Servidor OpenAI GPT +- Servidor Google Gemini +- Servidor Ollama (local) +Cada um com configurações padrão para produção +""" + +import os +import sys +import logging +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.exc import SQLAlchemyError +from dotenv import load_dotenv +from src.models.models import MCPServer + +# Configurar logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +def create_mcp_servers(): + """Cria servidores MCP padrão no sistema""" + try: + # Carregar variáveis de ambiente + load_dotenv() + + # Obter configurações do banco de dados + db_url = os.getenv("POSTGRES_CONNECTION_STRING") + if not db_url: + logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") + return False + + # Conectar ao banco de dados + engine = create_engine(db_url) + Session = sessionmaker(bind=engine) + session = Session() + + try: + # Verificar se já existem servidores MCP + existing_servers = session.query(MCPServer).all() + if existing_servers: + logger.info(f"Já existem {len(existing_servers)} servidores MCP cadastrados") + return True + + # Definições dos servidores MCP + mcp_servers = [ + { + "name": "Anthropic Claude", + "description": "Servidor para modelos Claude da Anthropic", + "config_json": { + "provider": "anthropic", + "models": ["claude-3-sonnet-20240229", "claude-3-opus-20240229", "claude-3-haiku-20240307"], + "api_base": "https://api.anthropic.com/v1", + "api_key_env": "ANTHROPIC_API_KEY" + }, + "environments": { + "production": True, + "development": True, + "staging": True + }, + "tools": ["function_calling", "web_search"], + "type": "official" + }, + { + "name": "OpenAI GPT", + "description": "Servidor para modelos GPT da OpenAI", + "config_json": { + "provider": "openai", + "models": ["gpt-4", "gpt-4-turbo", "gpt-3.5-turbo"], + "api_base": "https://api.openai.com/v1", + "api_key_env": "OPENAI_API_KEY" + }, + "environments": { + "production": True, + "development": True, + "staging": True + }, + "tools": ["function_calling", "web_search", "image_generation"], + "type": "official" + }, + { + "name": "Google Gemini", + "description": "Servidor para modelos Gemini do Google", + "config_json": { + "provider": "google", + "models": ["gemini-pro", "gemini-ultra"], + "api_base": "https://generativelanguage.googleapis.com/v1", + "api_key_env": "GOOGLE_API_KEY" + }, + "environments": { + "production": True, + "development": True, + "staging": True + }, + "tools": ["function_calling", "web_search"], + "type": "official" + }, + { + "name": "Ollama Local", + "description": "Servidor para modelos locais via Ollama", + "config_json": { + "provider": "ollama", + "models": ["llama3", "mistral", "mixtral"], + "api_base": "http://localhost:11434", + "api_key_env": None + }, + "environments": { + "production": False, + "development": True, + "staging": False + }, + "tools": [], + "type": "community" + } + ] + + # Criar os servidores MCP + for server_data in mcp_servers: + server = MCPServer( + name=server_data["name"], + description=server_data["description"], + config_json=server_data["config_json"], + environments=server_data["environments"], + tools=server_data["tools"], + type=server_data["type"] + ) + + session.add(server) + logger.info(f"Servidor MCP '{server_data['name']}' criado com sucesso") + + session.commit() + logger.info("Todos os servidores MCP foram criados com sucesso") + return True + + except SQLAlchemyError as e: + session.rollback() + logger.error(f"Erro de banco de dados ao criar servidores MCP: {str(e)}") + return False + + except Exception as e: + logger.error(f"Erro ao criar servidores MCP: {str(e)}") + return False + finally: + session.close() + +if __name__ == "__main__": + success = create_mcp_servers() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/scripts/seeders/tool_seeder.py b/scripts/seeders/tool_seeder.py new file mode 100644 index 00000000..b99a2b44 --- /dev/null +++ b/scripts/seeders/tool_seeder.py @@ -0,0 +1,183 @@ +""" +Script para criar ferramentas padrão: +- Pesquisa Web +- Consulta a Documentos +- Consulta a Base de Conhecimento +- Integração WhatsApp/Telegram +Cada uma com configurações básicas para demonstração +""" + +import os +import sys +import logging +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.exc import SQLAlchemyError +from dotenv import load_dotenv +from src.models.models import Tool + +# Configurar logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +def create_tools(): + """Cria ferramentas padrão no sistema""" + try: + # Carregar variáveis de ambiente + load_dotenv() + + # Obter configurações do banco de dados + db_url = os.getenv("POSTGRES_CONNECTION_STRING") + if not db_url: + logger.error("Variável de ambiente POSTGRES_CONNECTION_STRING não definida") + return False + + # Conectar ao banco de dados + engine = create_engine(db_url) + Session = sessionmaker(bind=engine) + session = Session() + + try: + # Verificar se já existem ferramentas + existing_tools = session.query(Tool).all() + if existing_tools: + logger.info(f"Já existem {len(existing_tools)} ferramentas cadastradas") + return True + + # Definições das ferramentas + tools = [ + { + "name": "web_search", + "description": "Pesquisa na web para obter informações atualizadas", + "config_json": { + "provider": "brave", + "api_base": "https://api.search.brave.com/res/v1/web/search", + "api_key_env": "BRAVE_API_KEY", + "max_results": 5, + "safe_search": "moderate" + }, + "environments": { + "production": True, + "development": True, + "staging": True + } + }, + { + "name": "document_query", + "description": "Consulta documentos internos para obter informações específicas", + "config_json": { + "provider": "internal", + "api_base": "${KNOWLEDGE_API_URL}/documents", + "api_key_env": "KNOWLEDGE_API_KEY", + "embeddings_model": "text-embedding-3-small", + "max_chunks": 10, + "similarity_threshold": 0.75 + }, + "environments": { + "production": True, + "development": True, + "staging": True + } + }, + { + "name": "knowledge_base", + "description": "Consulta base de conhecimento para solução de problemas", + "config_json": { + "provider": "internal", + "api_base": "${KNOWLEDGE_API_URL}/kb", + "api_key_env": "KNOWLEDGE_API_KEY", + "max_results": 3, + "categories": ["support", "faq", "troubleshooting"] + }, + "environments": { + "production": True, + "development": True, + "staging": True + } + }, + { + "name": "whatsapp_integration", + "description": "Integração com WhatsApp para envio e recebimento de mensagens", + "config_json": { + "provider": "meta", + "api_base": "https://graph.facebook.com/v17.0", + "api_key_env": "WHATSAPP_API_KEY", + "phone_number_id": "${WHATSAPP_PHONE_ID}", + "webhook_verify_token": "${WHATSAPP_VERIFY_TOKEN}", + "templates_enabled": True + }, + "environments": { + "production": True, + "development": False, + "staging": True + } + }, + { + "name": "telegram_integration", + "description": "Integração com Telegram para envio e recebimento de mensagens", + "config_json": { + "provider": "telegram", + "api_base": "https://api.telegram.org", + "api_key_env": "TELEGRAM_BOT_TOKEN", + "webhook_url": "${APP_URL}/webhook/telegram", + "allowed_updates": ["message", "callback_query"] + }, + "environments": { + "production": True, + "development": False, + "staging": True + } + } + ] + + # Criar as ferramentas + for tool_data in tools: + # Substituir placeholders por variáveis de ambiente quando disponíveis + if "api_base" in tool_data["config_json"]: + if "${KNOWLEDGE_API_URL}" in tool_data["config_json"]["api_base"]: + tool_data["config_json"]["api_base"] = tool_data["config_json"]["api_base"].replace( + "${KNOWLEDGE_API_URL}", os.getenv("KNOWLEDGE_API_URL", "http://localhost:5540") + ) + + if "webhook_url" in tool_data["config_json"]: + if "${APP_URL}" in tool_data["config_json"]["webhook_url"]: + tool_data["config_json"]["webhook_url"] = tool_data["config_json"]["webhook_url"].replace( + "${APP_URL}", os.getenv("APP_URL", "http://localhost:8000") + ) + + if "phone_number_id" in tool_data["config_json"]: + if "${WHATSAPP_PHONE_ID}" in tool_data["config_json"]["phone_number_id"]: + tool_data["config_json"]["phone_number_id"] = os.getenv("WHATSAPP_PHONE_ID", "") + + if "webhook_verify_token" in tool_data["config_json"]: + if "${WHATSAPP_VERIFY_TOKEN}" in tool_data["config_json"]["webhook_verify_token"]: + tool_data["config_json"]["webhook_verify_token"] = os.getenv("WHATSAPP_VERIFY_TOKEN", "") + + tool = Tool( + name=tool_data["name"], + description=tool_data["description"], + config_json=tool_data["config_json"], + environments=tool_data["environments"] + ) + + session.add(tool) + logger.info(f"Ferramenta '{tool_data['name']}' criada com sucesso") + + session.commit() + logger.info("Todas as ferramentas foram criadas com sucesso") + return True + + except SQLAlchemyError as e: + session.rollback() + logger.error(f"Erro de banco de dados ao criar ferramentas: {str(e)}") + return False + + except Exception as e: + logger.error(f"Erro ao criar ferramentas: {str(e)}") + return False + finally: + session.close() + +if __name__ == "__main__": + success = create_tools() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/src/api/admin_routes.py b/src/api/admin_routes.py new file mode 100644 index 00000000..4a84a09d --- /dev/null +++ b/src/api/admin_routes.py @@ -0,0 +1,175 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Request +from sqlalchemy.orm import Session +from typing import List, Optional +from datetime import datetime +import uuid + +from src.config.database import get_db +from src.core.jwt_middleware import get_jwt_token, verify_admin +from src.schemas.audit import AuditLogResponse, AuditLogFilter +from src.services.audit_service import get_audit_logs, create_audit_log +from src.services.user_service import get_admin_users, create_admin_user, deactivate_user +from src.schemas.user import UserResponse, AdminUserCreate + +router = APIRouter( + prefix="/admin", + tags=["administração"], + dependencies=[Depends(verify_admin)], # Todas as rotas requerem permissão de admin + responses={403: {"description": "Permissão negada"}}, +) + +# Rotas para auditoria +@router.get("/audit-logs", response_model=List[AuditLogResponse]) +async def read_audit_logs( + filters: AuditLogFilter = Depends(), + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), +): + """ + Obter logs de auditoria com filtros opcionais + + Args: + filters: Filtros para busca de logs + db: Sessão do banco de dados + payload: Payload do token JWT + + Returns: + List[AuditLogResponse]: Lista de logs de auditoria + """ + return get_audit_logs( + db, + skip=filters.skip, + limit=filters.limit, + user_id=filters.user_id, + action=filters.action, + resource_type=filters.resource_type, + resource_id=filters.resource_id, + start_date=filters.start_date, + end_date=filters.end_date + ) + +# Rotas para administradores +@router.get("/users", response_model=List[UserResponse]) +async def read_admin_users( + skip: int = 0, + limit: int = 100, + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), +): + """ + Listar usuários administradores + + Args: + skip: Número de registros para pular + limit: Número máximo de registros para retornar + db: Sessão do banco de dados + payload: Payload do token JWT + + Returns: + List[UserResponse]: Lista de usuários administradores + """ + return get_admin_users(db, skip, limit) + +@router.post("/users", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def create_new_admin_user( + user_data: AdminUserCreate, + request: Request, + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), +): + """ + Criar um novo usuário administrador + + Args: + user_data: Dados do usuário a ser criado + request: Objeto Request do FastAPI + db: Sessão do banco de dados + payload: Payload do token JWT + + Returns: + UserResponse: Dados do usuário criado + + Raises: + HTTPException: Se houver erro na criação + """ + # Obter o ID do usuário atual + user_id = payload.get("user_id") + if not user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Não foi possível identificar o usuário logado" + ) + + # Criar o usuário administrador + user, message = create_admin_user(db, user_data) + if not user: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + # Registrar ação no log de auditoria + create_audit_log( + db, + user_id=uuid.UUID(user_id), + action="create", + resource_type="admin_user", + resource_id=str(user.id), + details={"email": user.email}, + request=request + ) + + return user + +@router.delete("/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT) +async def deactivate_admin_user( + user_id: uuid.UUID, + request: Request, + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), +): + """ + Desativar um usuário administrador (não exclui, apenas desativa) + + Args: + user_id: ID do usuário a ser desativado + request: Objeto Request do FastAPI + db: Sessão do banco de dados + payload: Payload do token JWT + + Raises: + HTTPException: Se houver erro na desativação + """ + # Obter o ID do usuário atual + current_user_id = payload.get("user_id") + if not current_user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Não foi possível identificar o usuário logado" + ) + + # Não permitir desativar a si mesmo + if str(user_id) == current_user_id: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Não é possível desativar seu próprio usuário" + ) + + # Desativar o usuário + success, message = deactivate_user(db, user_id) + if not success: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + # Registrar ação no log de auditoria + create_audit_log( + db, + user_id=uuid.UUID(current_user_id), + action="deactivate", + resource_type="admin_user", + resource_id=str(user_id), + details=None, + request=request + ) \ No newline at end of file diff --git a/src/api/auth_routes.py b/src/api/auth_routes.py new file mode 100644 index 00000000..b2b485af --- /dev/null +++ b/src/api/auth_routes.py @@ -0,0 +1,229 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.orm import Session +from src.config.database import get_db +from src.models.models import User +from src.schemas.user import ( + UserCreate, + UserResponse, + UserLogin, + TokenResponse, + ForgotPassword, + PasswordReset, + MessageResponse +) +from src.services.user_service import ( + create_user, + verify_email, + resend_verification, + forgot_password, + reset_password +) +from src.services.auth_service import ( + authenticate_user, + create_access_token, + get_current_admin_user +) +import logging + +logger = logging.getLogger(__name__) + +router = APIRouter( + prefix="/auth", + tags=["autenticação"], + responses={404: {"description": "Não encontrado"}}, +) + +@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def register_user(user_data: UserCreate, db: Session = Depends(get_db)): + """ + Registra um novo usuário (cliente) no sistema + + Args: + user_data: Dados do usuário a ser registrado + db: Sessão do banco de dados + + Returns: + UserResponse: Dados do usuário criado + + Raises: + HTTPException: Se houver erro no registro + """ + user, message = create_user(db, user_data, is_admin=False) + if not user: + logger.error(f"Erro ao registrar usuário: {message}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + logger.info(f"Usuário registrado com sucesso: {user.email}") + return user + +@router.post("/register-admin", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def register_admin( + user_data: UserCreate, + db: Session = Depends(get_db), + current_admin: User = Depends(get_current_admin_user) +): + """ + Registra um novo administrador no sistema. + Apenas administradores existentes podem criar novos administradores. + + Args: + user_data: Dados do administrador a ser registrado + db: Sessão do banco de dados + current_admin: Administrador atual (autenticado) + + Returns: + UserResponse: Dados do administrador criado + + Raises: + HTTPException: Se houver erro no registro + """ + user, message = create_user(db, user_data, is_admin=True) + if not user: + logger.error(f"Erro ao registrar administrador: {message}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + logger.info(f"Administrador registrado com sucesso: {user.email} (criado por {current_admin.email})") + return user + +@router.get("/verify-email/{token}", response_model=MessageResponse) +async def verify_user_email(token: str, db: Session = Depends(get_db)): + """ + Verifica o email de um usuário usando o token fornecido + + Args: + token: Token de verificação + db: Sessão do banco de dados + + Returns: + MessageResponse: Mensagem de sucesso + + Raises: + HTTPException: Se o token for inválido ou expirado + """ + success, message = verify_email(db, token) + if not success: + logger.warning(f"Falha na verificação de email: {message}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + logger.info(f"Email verificado com sucesso usando token: {token}") + return {"message": message} + +@router.post("/resend-verification", response_model=MessageResponse) +async def resend_verification_email( + email_data: ForgotPassword, + db: Session = Depends(get_db) +): + """ + Reenvia o email de verificação para o usuário + + Args: + email_data: Email do usuário + db: Sessão do banco de dados + + Returns: + MessageResponse: Mensagem de sucesso + + Raises: + HTTPException: Se houver erro no reenvio + """ + success, message = resend_verification(db, email_data.email) + if not success: + logger.warning(f"Falha no reenvio de verificação: {message}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + logger.info(f"Email de verificação reenviado com sucesso para: {email_data.email}") + return {"message": message} + +@router.post("/login", response_model=TokenResponse) +async def login_for_access_token( + form_data: UserLogin, + db: Session = Depends(get_db) +): + """ + Realiza login e retorna um token de acesso JWT + + Args: + form_data: Dados de login (email e senha) + db: Sessão do banco de dados + + Returns: + TokenResponse: Token de acesso e tipo + + Raises: + HTTPException: Se as credenciais forem inválidas + """ + user = authenticate_user(db, form_data.email, form_data.password) + if not user: + logger.warning(f"Tentativa de login com credenciais inválidas: {form_data.email}") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Email ou senha incorretos", + headers={"WWW-Authenticate": "Bearer"}, + ) + + access_token = create_access_token(user) + logger.info(f"Login realizado com sucesso para usuário: {user.email}") + return {"access_token": access_token, "token_type": "bearer"} + +@router.post("/forgot-password", response_model=MessageResponse) +async def forgot_user_password( + email_data: ForgotPassword, + db: Session = Depends(get_db) +): + """ + Inicia o processo de recuperação de senha + + Args: + email_data: Email do usuário + db: Sessão do banco de dados + + Returns: + MessageResponse: Mensagem de sucesso + + Raises: + HTTPException: Se houver erro no processo + """ + success, message = forgot_password(db, email_data.email) + # Sempre retornamos a mesma mensagem por segurança + return {"message": "Se o email estiver cadastrado, você receberá instruções para redefinir sua senha."} + +@router.post("/reset-password", response_model=MessageResponse) +async def reset_user_password( + reset_data: PasswordReset, + db: Session = Depends(get_db) +): + """ + Redefine a senha do usuário usando o token fornecido + + Args: + reset_data: Token e nova senha + db: Sessão do banco de dados + + Returns: + MessageResponse: Mensagem de sucesso + + Raises: + HTTPException: Se o token for inválido ou expirado + """ + success, message = reset_password(db, reset_data.token, reset_data.new_password) + if not success: + logger.warning(f"Falha na redefinição de senha: {message}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=message + ) + + logger.info("Senha redefinida com sucesso") + return {"message": message} \ No newline at end of file diff --git a/src/api/routes.py b/src/api/routes.py index 297f8521..7e766bc5 100644 --- a/src/api/routes.py +++ b/src/api/routes.py @@ -1,11 +1,11 @@ -from fastapi import APIRouter, Depends, HTTPException, status, Security +from fastapi import APIRouter, Depends, HTTPException, status from sqlalchemy.orm import Session from typing import List, Dict, Any import uuid from datetime import datetime from src.config.database import get_db -from src.core.middleware import get_api_key +from src.core.jwt_middleware import get_jwt_token, verify_user_client, verify_admin, get_current_user_client_id from src.schemas.schemas import ( Client, ClientCreate, @@ -65,8 +65,19 @@ memory_service = InMemoryMemoryService() async def chat( request: ChatRequest, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o agente pertence ao cliente do usuário + agent = agent_service.get_agent(db, request.agent_id) + if not agent: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Agente não encontrado" + ) + + # Verificar se o usuário tem acesso ao agente (via cliente) + await verify_user_client(payload, db, agent.client_id) + try: final_response_text = await run_agent( request.agent_id, @@ -94,31 +105,60 @@ async def chat( # Rotas para Sessões @router.get("/sessions/client/{client_id}", response_model=List[Adk_Session]) -def get_client_sessions( +async def get_client_sessions( client_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso aos dados deste cliente + await verify_user_client(payload, db, client_id) return get_sessions_by_client(db, client_id) @router.get("/sessions/agent/{agent_id}", response_model=List[Adk_Session]) -def get_agent_sessions( +async def get_agent_sessions( agent_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), skip: int = 0, limit: int = 100, ): + # Verificar se o agente pertence ao cliente do usuário + agent = agent_service.get_agent(db, agent_id) + if not agent: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Agente não encontrado" + ) + + # Verificar se o usuário tem acesso ao agente (via cliente) + await verify_user_client(payload, db, agent.client_id) + return get_sessions_by_agent(db, agent_id, skip, limit) @router.get("/sessions/{session_id}", response_model=Adk_Session) -def get_session( +async def get_session( session_id: str, - api_key: str = Security(get_api_key), + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), ): - return get_session_by_id(session_service, session_id) + # Obter a sessão + session = get_session_by_id(session_service, session_id) + if not session: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sessão não encontrada" + ) + + # Verificar se o agente da sessão pertence ao cliente do usuário + agent_id = uuid.UUID(session.agent_id) if session.agent_id else None + if agent_id: + agent = agent_service.get_agent(db, agent_id) + if agent: + await verify_user_client(payload, db, agent.client_id) + + return session @router.get( @@ -127,8 +167,24 @@ def get_session( ) async def get_agent_messages( session_id: str, - api_key: str = Security(get_api_key), + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), ): + # Obter a sessão + session = get_session_by_id(session_service, session_id) + if not session: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sessão não encontrada" + ) + + # Verificar se o agente da sessão pertence ao cliente do usuário + agent_id = uuid.UUID(session.agent_id) if session.agent_id else None + if agent_id: + agent = agent_service.get_agent(db, agent_id) + if agent: + await verify_user_client(payload, db, agent.client_id) + return get_session_events(session_service, session_id) @@ -136,39 +192,70 @@ async def get_agent_messages( "/sessions/{session_id}", status_code=status.HTTP_204_NO_CONTENT, ) -def remove_session( +async def remove_session( session_id: str, - api_key: str = Security(get_api_key), + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), ): + # Obter a sessão + session = get_session_by_id(session_service, session_id) + if not session: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sessão não encontrada" + ) + + # Verificar se o agente da sessão pertence ao cliente do usuário + agent_id = uuid.UUID(session.agent_id) if session.agent_id else None + if agent_id: + agent = agent_service.get_agent(db, agent_id) + if agent: + await verify_user_client(payload, db, agent.client_id) + return delete_session(session_service, session_id) # Rotas para Clientes @router.post("/clients/", response_model=Client, status_code=status.HTTP_201_CREATED) -def create_client( +async def create_client( client: ClientCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem criar clientes + await verify_admin(payload) return client_service.create_client(db, client) @router.get("/clients/", response_model=List[Client]) -def read_clients( +async def read_clients( skip: int = 0, limit: int = 100, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): - return client_service.get_clients(db, skip, limit) + # Se for administrador, pode ver todos os clientes + # Se for usuário comum, só vê o próprio cliente + client_id = get_current_user_client_id(payload) + + if client_id: + # Usuário comum - retorna apenas seu próprio cliente + client = client_service.get_client(db, client_id) + return [client] if client else [] + else: + # Administrador - retorna todos os clientes + return client_service.get_clients(db, skip, limit) @router.get("/clients/{client_id}", response_model=Client) -def read_client( +async def read_client( client_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso aos dados deste cliente + await verify_user_client(payload, db, client_id) + db_client = client_service.get_client(db, client_id) if db_client is None: raise HTTPException( @@ -178,12 +265,15 @@ def read_client( @router.put("/clients/{client_id}", response_model=Client) -def update_client( +async def update_client( client_id: uuid.UUID, client: ClientCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso aos dados deste cliente + await verify_user_client(payload, db, client_id) + db_client = client_service.update_client(db, client_id, client) if db_client is None: raise HTTPException( @@ -193,11 +283,14 @@ def update_client( @router.delete("/clients/{client_id}", status_code=status.HTTP_204_NO_CONTENT) -def delete_client( +async def delete_client( client_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem excluir clientes + await verify_admin(payload) + if not client_service.delete_client(db, client_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Cliente não encontrado" @@ -206,46 +299,71 @@ def delete_client( # Rotas para Contatos @router.post("/contacts/", response_model=Contact, status_code=status.HTTP_201_CREATED) -def create_contact( +async def create_contact( contact: ContactCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso ao cliente do contato + await verify_user_client(payload, db, contact.client_id) + return contact_service.create_contact(db, contact) @router.get("/contacts/{client_id}", response_model=List[Contact]) -def read_contacts( +async def read_contacts( client_id: uuid.UUID, skip: int = 0, limit: int = 100, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso aos dados deste cliente + await verify_user_client(payload, db, client_id) + return contact_service.get_contacts_by_client(db, client_id, skip, limit) @router.get("/contact/{contact_id}", response_model=Contact) -def read_contact( +async def read_contact( contact_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): db_contact = contact_service.get_contact(db, contact_id) if db_contact is None: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" ) + + # Verificar se o usuário tem acesso ao cliente do contato + await verify_user_client(payload, db, db_contact.client_id) + return db_contact @router.put("/contact/{contact_id}", response_model=Contact) -def update_contact( +async def update_contact( contact_id: uuid.UUID, contact: ContactCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Buscar o contato atual + db_current_contact = contact_service.get_contact(db, contact_id) + if db_current_contact is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" + ) + + # Verificar se o usuário tem acesso ao cliente do contato + await verify_user_client(payload, db, db_current_contact.client_id) + + # Verificar se está tentando mudar o cliente + if contact.client_id != db_current_contact.client_id: + # Verificar se o usuário tem acesso ao novo cliente também + await verify_user_client(payload, db, contact.client_id) + db_contact = contact_service.update_contact(db, contact_id, contact) if db_contact is None: raise HTTPException( @@ -255,11 +373,21 @@ def update_contact( @router.delete("/contact/{contact_id}", status_code=status.HTTP_204_NO_CONTENT) -def delete_contact( +async def delete_contact( contact_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Buscar o contato + db_contact = contact_service.get_contact(db, contact_id) + if db_contact is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" + ) + + # Verificar se o usuário tem acesso ao cliente do contato + await verify_user_client(payload, db, db_contact.client_id) + if not contact_service.delete_contact(db, contact_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Contato não encontrado" @@ -268,87 +396,129 @@ def delete_contact( # Rotas para Agentes @router.post("/agents/", response_model=Agent, status_code=status.HTTP_201_CREATED) -def create_agent( +async def create_agent( agent: AgentCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso ao cliente do agente + await verify_user_client(payload, db, agent.client_id) + return agent_service.create_agent(db, agent) @router.get("/agents/{client_id}", response_model=List[Agent]) -def read_agents( +async def read_agents( client_id: uuid.UUID, skip: int = 0, limit: int = 100, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Verificar se o usuário tem acesso aos dados deste cliente + await verify_user_client(payload, db, client_id) + return agent_service.get_agents_by_client(db, client_id, skip, limit) @router.get("/agent/{agent_id}", response_model=Agent) -def read_agent( +async def read_agent( agent_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): db_agent = agent_service.get_agent(db, agent_id) if db_agent is None: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) + + # Verificar se o usuário tem acesso ao cliente do agente + await verify_user_client(payload, db, db_agent.client_id) + return db_agent @router.put("/agent/{agent_id}", response_model=Agent) async def update_agent( - agent_id: uuid.UUID, agent_data: Dict[str, Any], db: Session = Depends(get_db) + agent_id: uuid.UUID, + agent_data: Dict[str, Any], + db: Session = Depends(get_db), + payload: dict = Depends(get_jwt_token), ): - """Atualiza um agente existente""" + # Buscar o agente atual + db_agent = agent_service.get_agent(db, agent_id) + if db_agent is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" + ) + + # Verificar se o usuário tem acesso ao cliente do agente + await verify_user_client(payload, db, db_agent.client_id) + + # Se estiver tentando mudar o client_id, verificar permissão para o novo cliente também + if 'client_id' in agent_data and agent_data['client_id'] != str(db_agent.client_id): + new_client_id = uuid.UUID(agent_data['client_id']) + await verify_user_client(payload, db, new_client_id) + return await agent_service.update_agent(db, agent_id, agent_data) @router.delete("/agent/{agent_id}", status_code=status.HTTP_204_NO_CONTENT) -def delete_agent( +async def delete_agent( agent_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Buscar o agente + db_agent = agent_service.get_agent(db, agent_id) + if db_agent is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" + ) + + # Verificar se o usuário tem acesso ao cliente do agente + await verify_user_client(payload, db, db_agent.client_id) + if not agent_service.delete_agent(db, agent_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Agente não encontrado" ) -# Rotas para MCPServers +# Rotas para Servidores MCP @router.post( "/mcp-servers/", response_model=MCPServer, status_code=status.HTTP_201_CREATED ) -def create_mcp_server( +async def create_mcp_server( server: MCPServerCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem criar servidores MCP + await verify_admin(payload) + return mcp_server_service.create_mcp_server(db, server) @router.get("/mcp-servers/", response_model=List[MCPServer]) -def read_mcp_servers( +async def read_mcp_servers( skip: int = 0, limit: int = 100, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Todos os usuários autenticados podem listar servidores MCP return mcp_server_service.get_mcp_servers(db, skip, limit) @router.get("/mcp-servers/{server_id}", response_model=MCPServer) -def read_mcp_server( +async def read_mcp_server( server_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Todos os usuários autenticados podem ver detalhes do servidor MCP db_server = mcp_server_service.get_mcp_server(db, server_id) if db_server is None: raise HTTPException( @@ -358,12 +528,15 @@ def read_mcp_server( @router.put("/mcp-servers/{server_id}", response_model=MCPServer) -def update_mcp_server( +async def update_mcp_server( server_id: uuid.UUID, server: MCPServerCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem atualizar servidores MCP + await verify_admin(payload) + db_server = mcp_server_service.update_mcp_server(db, server_id, server) if db_server is None: raise HTTPException( @@ -373,43 +546,51 @@ def update_mcp_server( @router.delete("/mcp-servers/{server_id}", status_code=status.HTTP_204_NO_CONTENT) -def delete_mcp_server( +async def delete_mcp_server( server_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem excluir servidores MCP + await verify_admin(payload) + if not mcp_server_service.delete_mcp_server(db, server_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Servidor MCP não encontrado" ) -# Rotas para Tools +# Rotas para Ferramentas @router.post("/tools/", response_model=Tool, status_code=status.HTTP_201_CREATED) -def create_tool( +async def create_tool( tool: ToolCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem criar ferramentas + await verify_admin(payload) + return tool_service.create_tool(db, tool) @router.get("/tools/", response_model=List[Tool]) -def read_tools( +async def read_tools( skip: int = 0, limit: int = 100, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Todos os usuários autenticados podem listar ferramentas return tool_service.get_tools(db, skip, limit) @router.get("/tools/{tool_id}", response_model=Tool) -def read_tool( +async def read_tool( tool_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Todos os usuários autenticados podem ver detalhes da ferramenta db_tool = tool_service.get_tool(db, tool_id) if db_tool is None: raise HTTPException( @@ -419,12 +600,15 @@ def read_tool( @router.put("/tools/{tool_id}", response_model=Tool) -def update_tool( +async def update_tool( tool_id: uuid.UUID, tool: ToolCreate, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem atualizar ferramentas + await verify_admin(payload) + db_tool = tool_service.update_tool(db, tool_id, tool) if db_tool is None: raise HTTPException( @@ -434,11 +618,14 @@ def update_tool( @router.delete("/tools/{tool_id}", status_code=status.HTTP_204_NO_CONTENT) -def delete_tool( +async def delete_tool( tool_id: uuid.UUID, db: Session = Depends(get_db), - api_key: str = Security(get_api_key), + payload: dict = Depends(get_jwt_token), ): + # Apenas administradores podem excluir ferramentas + await verify_admin(payload) + if not tool_service.delete_tool(db, tool_id): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Ferramenta não encontrada" diff --git a/src/config/settings.py b/src/config/settings.py index 9f5e8e57..1dfdf323 100644 --- a/src/config/settings.py +++ b/src/config/settings.py @@ -1,5 +1,5 @@ import os -from typing import Optional +from typing import Optional, List from pydantic_settings import BaseSettings from functools import lru_cache import secrets @@ -36,14 +36,38 @@ class Settings(BaseSettings): # TTL do cache de ferramentas em segundos (1 hora) TOOLS_CACHE_TTL: int = int(os.getenv("TOOLS_CACHE_TTL", 3600)) - # Configurações da API - API_KEY: str = secrets.token_urlsafe(32) # Gera uma API Key aleatória se não for definida - API_KEY_HEADER: str = "X-API-Key" + # Configurações JWT + JWT_SECRET_KEY: str = os.getenv("JWT_SECRET_KEY", secrets.token_urlsafe(32)) + JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256") + JWT_EXPIRATION_TIME: int = int(os.getenv("JWT_EXPIRATION_TIME", 30)) # Em minutos + + # Configurações SendGrid + SENDGRID_API_KEY: str = os.getenv("SENDGRID_API_KEY", "") + EMAIL_FROM: str = os.getenv("EMAIL_FROM", "noreply@yourdomain.com") + APP_URL: str = os.getenv("APP_URL", "http://localhost:8000") # Configurações do Servidor - HOST: str = "0.0.0.0" - PORT: int = 8000 - DEBUG: bool = False + HOST: str = os.getenv("HOST", "0.0.0.0") + PORT: int = int(os.getenv("PORT", 8000)) + DEBUG: bool = os.getenv("DEBUG", "false").lower() == "true" + + # Configurações de CORS + CORS_ORIGINS: List[str] = os.getenv("CORS_ORIGINS", "*").split(",") + + # Configurações de Token + TOKEN_EXPIRY_HOURS: int = int(os.getenv("TOKEN_EXPIRY_HOURS", 24)) # Tokens de verificação/reset + + # Configurações de Segurança + PASSWORD_MIN_LENGTH: int = int(os.getenv("PASSWORD_MIN_LENGTH", 8)) + MAX_LOGIN_ATTEMPTS: int = int(os.getenv("MAX_LOGIN_ATTEMPTS", 5)) + LOGIN_LOCKOUT_MINUTES: int = int(os.getenv("LOGIN_LOCKOUT_MINUTES", 30)) + + # Configurações de Seeders + ADMIN_EMAIL: str = os.getenv("ADMIN_EMAIL", "admin@evoai.com") + ADMIN_INITIAL_PASSWORD: str = os.getenv("ADMIN_INITIAL_PASSWORD", "senhaforte123") + DEMO_EMAIL: str = os.getenv("DEMO_EMAIL", "demo@exemplo.com") + DEMO_PASSWORD: str = os.getenv("DEMO_PASSWORD", "demo123") + DEMO_CLIENT_NAME: str = os.getenv("DEMO_CLIENT_NAME", "Cliente Demo") class Config: env_file = ".env" diff --git a/src/core/jwt_middleware.py b/src/core/jwt_middleware.py new file mode 100644 index 00000000..203ec667 --- /dev/null +++ b/src/core/jwt_middleware.py @@ -0,0 +1,145 @@ +from fastapi import HTTPException, Depends, status +from fastapi.security import OAuth2PasswordBearer +from jose import JWTError, jwt +from src.config.settings import settings +from datetime import datetime +from sqlalchemy.orm import Session +from src.config.database import get_db +from src.models.models import User +from src.services.user_service import get_user_by_email +from uuid import UUID +import logging +from typing import Optional + +logger = logging.getLogger(__name__) + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="auth/login") + +async def get_jwt_token(token: str = Depends(oauth2_scheme)) -> dict: + """ + Extrai e valida o token JWT + + Args: + token: Token JWT + + Returns: + dict: Dados do payload do token + + Raises: + HTTPException: Se o token for inválido + """ + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Credenciais inválidas", + headers={"WWW-Authenticate": "Bearer"}, + ) + + try: + payload = jwt.decode( + token, + settings.JWT_SECRET_KEY, + algorithms=[settings.JWT_ALGORITHM] + ) + + email: str = payload.get("sub") + if email is None: + logger.warning("Token sem email (sub)") + raise credentials_exception + + exp = payload.get("exp") + if exp is None or datetime.fromtimestamp(exp) < datetime.utcnow(): + logger.warning(f"Token expirado para {email}") + raise credentials_exception + + return payload + + except JWTError as e: + logger.error(f"Erro ao decodificar token JWT: {str(e)}") + raise credentials_exception + +async def verify_user_client( + payload: dict = Depends(get_jwt_token), + db: Session = Depends(get_db), + required_client_id: UUID = None +) -> bool: + """ + Verifica se o usuário está associado ao cliente especificado + + Args: + payload: Payload do token JWT + db: Sessão do banco de dados + required_client_id: ID do cliente que deve ser verificado + + Returns: + bool: True se verificado com sucesso + + Raises: + HTTPException: Se o usuário não tiver permissão + """ + # Administradores têm acesso a todos os clientes + if payload.get("is_admin", False): + return True + + # Para não-admins, verificar se o client_id corresponde + user_client_id = payload.get("client_id") + if not user_client_id: + logger.warning(f"Usuário não-admin sem client_id no token: {payload.get('sub')}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Usuário não associado a um cliente" + ) + + # Se não foi especificado um client_id para verificar, qualquer cliente é válido + if not required_client_id: + return True + + # Verificar se o client_id do usuário corresponde ao required_client_id + if str(user_client_id) != str(required_client_id): + logger.warning(f"Acesso negado: Usuário {payload.get('sub')} tentou acessar recursos do cliente {required_client_id}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Permissão negada para acessar recursos deste cliente" + ) + + return True + +async def verify_admin(payload: dict = Depends(get_jwt_token)) -> bool: + """ + Verifica se o usuário é um administrador + + Args: + payload: Payload do token JWT + + Returns: + bool: True se for administrador + + Raises: + HTTPException: Se o usuário não for administrador + """ + if not payload.get("is_admin", False): + logger.warning(f"Acesso admin negado para usuário: {payload.get('sub')}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Permissão negada. Acesso restrito a administradores." + ) + + return True + +def get_current_user_client_id(payload: dict = Depends(get_jwt_token)) -> Optional[UUID]: + """ + Obtém o ID do cliente associado ao usuário atual + + Args: + payload: Payload do token JWT + + Returns: + Optional[UUID]: ID do cliente ou None se for administrador + """ + if payload.get("is_admin", False): + return None + + client_id = payload.get("client_id") + if client_id: + return UUID(client_id) + + return None \ No newline at end of file diff --git a/src/core/middleware.py b/src/core/middleware.py deleted file mode 100644 index 291ddf88..00000000 --- a/src/core/middleware.py +++ /dev/null @@ -1,26 +0,0 @@ -from fastapi import HTTPException, Security, status -from fastapi.security.api_key import APIKeyHeader -from src.config.settings import settings -import logging - -logger = logging.getLogger(__name__) - -api_key_header = APIKeyHeader(name="X-API-Key", auto_error=True) - -async def get_api_key(api_key_header: str = Security(api_key_header)) -> str: - """Verifica se a API Key fornecida é válida""" - try: - if api_key_header == settings.API_KEY: - return api_key_header - else: - logger.warning(f"Tentativa de acesso com API Key inválida: {api_key_header}") - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="API Key inválida" - ) - except Exception as e: - logger.error(f"Erro ao verificar API Key: {str(e)}") - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Erro ao verificar API Key" - ) \ No newline at end of file diff --git a/src/main.py b/src/main.py index 50a3333b..b39023e8 100644 --- a/src/main.py +++ b/src/main.py @@ -7,9 +7,12 @@ root_dir = Path(__file__).parent.parent sys.path.append(str(root_dir)) from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware from typing import Dict, Any from src.config.database import engine, Base from src.api.routes import router +from src.api.auth_routes import router as auth_router +from src.api.admin_routes import router as admin_router from src.config.settings import settings from src.utils.logger import setup_logger @@ -19,10 +22,41 @@ logger = setup_logger(__name__) # Inicialização do FastAPI app = FastAPI( title=settings.API_TITLE, - description=settings.API_DESCRIPTION, + description=settings.API_DESCRIPTION + """ + \n\n + ## Autenticação + Esta API utiliza autenticação JWT (JSON Web Token). Para acessar os endpoints protegidos: + + 1. Registre-se em `/api/v1/auth/register` ou faça login em `/api/v1/auth/login` + 2. Use o token recebido no header de autorização: `Authorization: Bearer {token}` + 3. Tokens expiram após o tempo configurado (padrão: 30 minutos) + + Diferente da versão anterior que usava API Key, o sistema JWT: + - Identifica o usuário específico que está fazendo a requisição + - Limita o acesso apenas aos recursos do cliente ao qual o usuário está associado + - Distingue entre usuários comuns e administradores para controle de acesso + + ## Área Administrativa + Funcionalidades exclusivas para administradores estão disponíveis em `/api/v1/admin/*`: + + - Gerenciamento de usuários administradores + - Logs de auditoria para rastreamento de ações + - Controle de acesso privilegiado + + Essas rotas são acessíveis apenas para usuários com flag `is_admin=true`. + """, version=settings.API_VERSION, ) +# Configuração de CORS +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + # Configuração do PostgreSQL POSTGRES_CONNECTION_STRING = os.getenv( "POSTGRES_CONNECTION_STRING", @@ -33,8 +67,15 @@ POSTGRES_CONNECTION_STRING = os.getenv( Base.metadata.create_all(bind=engine) # Incluir as rotas +app.include_router(auth_router, prefix="/api/v1") +app.include_router(admin_router, prefix="/api/v1") app.include_router(router, prefix="/api/v1") @app.get("/") def read_root(): - return {"message": "Welcome to A2A SaaS API"} + return { + "message": "Bem-vindo à API Evo AI", + "documentation": "/docs", + "version": settings.API_VERSION, + "auth": "Para acessar a API, use autenticação JWT via '/api/v1/auth/login'" + } diff --git a/src/models/models.py b/src/models/models.py index 6d52387a..1cfc3e35 100644 --- a/src/models/models.py +++ b/src/models/models.py @@ -1,5 +1,6 @@ from sqlalchemy import Column, String, UUID, DateTime, ForeignKey, JSON, Text, BigInteger, CheckConstraint, Boolean from sqlalchemy.sql import func +from sqlalchemy.orm import relationship, backref from src.config.database import Base import uuid @@ -11,6 +12,26 @@ class Client(Base): created_at = Column(DateTime(timezone=True), server_default=func.now()) updated_at = Column(DateTime(timezone=True), onupdate=func.now()) +class User(Base): + __tablename__ = "users" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + email = Column(String, unique=True, index=True, nullable=False) + password_hash = Column(String, nullable=False) + client_id = Column(UUID(as_uuid=True), ForeignKey("clients.id", ondelete="CASCADE"), nullable=True) + is_active = Column(Boolean, default=False) + is_admin = Column(Boolean, default=False) + email_verified = Column(Boolean, default=False) + verification_token = Column(String, nullable=True) + verification_token_expiry = Column(DateTime(timezone=True), nullable=True) + password_reset_token = Column(String, nullable=True) + password_reset_expiry = Column(DateTime(timezone=True), nullable=True) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) + + # Relacionamento com Client (One-to-One, opcional para administradores) + client = relationship("Client", backref=backref("user", uselist=False, cascade="all, delete-orphan")) + class Contact(Base): __tablename__ = "contacts" @@ -109,4 +130,20 @@ class Session(Base): user_id = Column(String) state = Column(JSON) create_time = Column(DateTime(timezone=True)) - update_time = Column(DateTime(timezone=True)) \ No newline at end of file + update_time = Column(DateTime(timezone=True)) + +class AuditLog(Base): + __tablename__ = "audit_logs" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True) + action = Column(String, nullable=False) + resource_type = Column(String, nullable=False) + resource_id = Column(String, nullable=True) + details = Column(JSON, nullable=True) + ip_address = Column(String, nullable=True) + user_agent = Column(String, nullable=True) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + + # Relacionamento com User + user = relationship("User", backref="audit_logs") \ No newline at end of file diff --git a/src/schemas/audit.py b/src/schemas/audit.py new file mode 100644 index 00000000..52be0c07 --- /dev/null +++ b/src/schemas/audit.py @@ -0,0 +1,37 @@ +from pydantic import BaseModel, Field +from typing import Optional, Dict, Any +from datetime import datetime +from uuid import UUID + +class AuditLogBase(BaseModel): + """Schema base para log de auditoria""" + action: str + resource_type: str + resource_id: Optional[str] = None + details: Optional[Dict[str, Any]] = None + +class AuditLogCreate(AuditLogBase): + """Schema para criação de log de auditoria""" + pass + +class AuditLogResponse(AuditLogBase): + """Schema para resposta de log de auditoria""" + id: UUID + user_id: Optional[UUID] = None + ip_address: Optional[str] = None + user_agent: Optional[str] = None + created_at: datetime + + class Config: + from_attributes = True + +class AuditLogFilter(BaseModel): + """Schema para filtros de busca de logs de auditoria""" + user_id: Optional[UUID] = None + action: Optional[str] = None + resource_type: Optional[str] = None + resource_id: Optional[str] = None + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + skip: Optional[int] = Field(0, ge=0) + limit: Optional[int] = Field(100, ge=1, le=1000) \ No newline at end of file diff --git a/src/schemas/user.py b/src/schemas/user.py new file mode 100644 index 00000000..98fb41be --- /dev/null +++ b/src/schemas/user.py @@ -0,0 +1,46 @@ +from pydantic import BaseModel, EmailStr, Field +from typing import Optional +from datetime import datetime +from uuid import UUID + +class UserBase(BaseModel): + email: EmailStr + +class UserCreate(UserBase): + password: str + name: str # Para criação do cliente associado + +class UserLogin(BaseModel): + email: EmailStr + password: str + +class UserResponse(UserBase): + id: UUID + client_id: Optional[UUID] = None + is_active: bool + email_verified: bool + is_admin: bool + created_at: datetime + + class Config: + from_attributes = True + +class TokenResponse(BaseModel): + access_token: str + token_type: str + +class TokenData(BaseModel): + sub: str # email do usuário + exp: datetime + is_admin: bool + client_id: Optional[UUID] = None + +class PasswordReset(BaseModel): + token: str + new_password: str + +class ForgotPassword(BaseModel): + email: EmailStr + +class MessageResponse(BaseModel): + message: str \ No newline at end of file diff --git a/src/services/audit_service.py b/src/services/audit_service.py new file mode 100644 index 00000000..f1799f50 --- /dev/null +++ b/src/services/audit_service.py @@ -0,0 +1,136 @@ +from sqlalchemy.orm import Session +from sqlalchemy.exc import SQLAlchemyError +from src.models.models import AuditLog, User +from datetime import datetime +from fastapi import Request +from typing import Optional, Dict, Any, List +import uuid +import logging +import json + +logger = logging.getLogger(__name__) + +def create_audit_log( + db: Session, + user_id: Optional[uuid.UUID], + action: str, + resource_type: str, + resource_id: Optional[str] = None, + details: Optional[Dict[str, Any]] = None, + request: Optional[Request] = None +) -> Optional[AuditLog]: + """ + Cria um novo registro de auditoria + + Args: + db: Sessão do banco de dados + user_id: ID do usuário que realizou a ação (ou None se anônimo) + action: Ação realizada (ex: "create", "update", "delete") + resource_type: Tipo de recurso (ex: "client", "agent", "user") + resource_id: ID do recurso (opcional) + details: Detalhes adicionais da ação (opcional) + request: Objeto Request do FastAPI (opcional, para obter IP e User-Agent) + + Returns: + Optional[AuditLog]: Registro de auditoria criado ou None em caso de erro + """ + try: + ip_address = None + user_agent = None + + if request: + ip_address = request.client.host if hasattr(request, 'client') else None + user_agent = request.headers.get("user-agent") + + # Converter details para formato serializável + if details: + # Converter UUIDs para strings + for key, value in details.items(): + if isinstance(value, uuid.UUID): + details[key] = str(value) + + audit_log = AuditLog( + user_id=user_id, + action=action, + resource_type=resource_type, + resource_id=str(resource_id) if resource_id else None, + details=details, + ip_address=ip_address, + user_agent=user_agent + ) + + db.add(audit_log) + db.commit() + db.refresh(audit_log) + + logger.info( + f"Audit log criado: {action} em {resource_type}" + + (f" (ID: {resource_id})" if resource_id else "") + ) + + return audit_log + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao criar registro de auditoria: {str(e)}") + return None + except Exception as e: + logger.error(f"Erro inesperado ao criar registro de auditoria: {str(e)}") + return None + +def get_audit_logs( + db: Session, + skip: int = 0, + limit: int = 100, + user_id: Optional[uuid.UUID] = None, + action: Optional[str] = None, + resource_type: Optional[str] = None, + resource_id: Optional[str] = None, + start_date: Optional[datetime] = None, + end_date: Optional[datetime] = None +) -> List[AuditLog]: + """ + Obtém registros de auditoria com filtros opcionais + + Args: + db: Sessão do banco de dados + skip: Número de registros para pular + limit: Número máximo de registros para retornar + user_id: Filtrar por ID do usuário + action: Filtrar por ação + resource_type: Filtrar por tipo de recurso + resource_id: Filtrar por ID do recurso + start_date: Data inicial + end_date: Data final + + Returns: + List[AuditLog]: Lista de registros de auditoria + """ + query = db.query(AuditLog) + + # Aplicar filtros, se fornecidos + if user_id: + query = query.filter(AuditLog.user_id == user_id) + + if action: + query = query.filter(AuditLog.action == action) + + if resource_type: + query = query.filter(AuditLog.resource_type == resource_type) + + if resource_id: + query = query.filter(AuditLog.resource_id == resource_id) + + if start_date: + query = query.filter(AuditLog.created_at >= start_date) + + if end_date: + query = query.filter(AuditLog.created_at <= end_date) + + # Ordenar por data de criação (mais recentes primeiro) + query = query.order_by(AuditLog.created_at.desc()) + + # Aplicar paginação + query = query.offset(skip).limit(limit) + + return query.all() \ No newline at end of file diff --git a/src/services/auth_service.py b/src/services/auth_service.py new file mode 100644 index 00000000..79cc38fc --- /dev/null +++ b/src/services/auth_service.py @@ -0,0 +1,152 @@ +from sqlalchemy.orm import Session +from src.models.models import User +from src.schemas.user import TokenData +from src.services.user_service import authenticate_user, get_user_by_email +from src.utils.security import create_jwt_token +from fastapi import Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer +from jose import JWTError, jwt +from src.config.settings import settings +from datetime import datetime, timedelta +import logging +from typing import Optional + +logger = logging.getLogger(__name__) + +# Definir scheme de autenticação OAuth2 com password flow +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="auth/login") + +async def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)) -> User: + """ + Obtém o usuário atual a partir do token JWT + + Args: + token: Token JWT + db: Sessão do banco de dados + + Returns: + User: Usuário atual + + Raises: + HTTPException: Se o token for inválido ou o usuário não for encontrado + """ + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Credenciais inválidas", + headers={"WWW-Authenticate": "Bearer"}, + ) + + try: + # Decodificar o token + payload = jwt.decode( + token, + settings.JWT_SECRET_KEY, + algorithms=[settings.JWT_ALGORITHM] + ) + + # Extrair dados do token + email: str = payload.get("sub") + if email is None: + logger.warning("Token sem email (sub)") + raise credentials_exception + + # Verificar se o token expirou + exp = payload.get("exp") + if exp is None or datetime.fromtimestamp(exp) < datetime.utcnow(): + logger.warning(f"Token expirado para {email}") + raise credentials_exception + + # Criar objeto TokenData + token_data = TokenData( + sub=email, + exp=datetime.fromtimestamp(exp), + is_admin=payload.get("is_admin", False), + client_id=payload.get("client_id") + ) + + except JWTError as e: + logger.error(f"Erro ao decodificar token JWT: {str(e)}") + raise credentials_exception + + # Buscar usuário no banco de dados + user = get_user_by_email(db, email=token_data.sub) + if user is None: + logger.warning(f"Usuário não encontrado para o email: {token_data.sub}") + raise credentials_exception + + if not user.is_active: + logger.warning(f"Tentativa de acesso com usuário inativo: {user.email}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Usuário inativo" + ) + + return user + +async def get_current_active_user(current_user: User = Depends(get_current_user)) -> User: + """ + Verifica se o usuário atual está ativo + + Args: + current_user: Usuário atual + + Returns: + User: Usuário atual se estiver ativo + + Raises: + HTTPException: Se o usuário não estiver ativo + """ + if not current_user.is_active: + logger.warning(f"Tentativa de acesso com usuário inativo: {current_user.email}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Usuário inativo" + ) + return current_user + +async def get_current_admin_user(current_user: User = Depends(get_current_user)) -> User: + """ + Verifica se o usuário atual é um administrador + + Args: + current_user: Usuário atual + + Returns: + User: Usuário atual se for administrador + + Raises: + HTTPException: Se o usuário não for administrador + """ + if not current_user.is_admin: + logger.warning(f"Tentativa de acesso admin por usuário não-admin: {current_user.email}") + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Permissão negada. Acesso restrito a administradores." + ) + return current_user + +def create_access_token(user: User) -> str: + """ + Cria um token de acesso JWT para o usuário + + Args: + user: Usuário para o qual criar o token + + Returns: + str: Token JWT + """ + # Dados a serem incluídos no token + token_data = { + "sub": user.email, + "is_admin": user.is_admin, + } + + # Incluir client_id apenas se não for administrador + if not user.is_admin and user.client_id: + token_data["client_id"] = str(user.client_id) + + # Criar token + return create_jwt_token(token_data) + +# Dependência para obter a sessão do banco de dados +from src.config.database import get_db \ No newline at end of file diff --git a/src/services/email_service.py b/src/services/email_service.py new file mode 100644 index 00000000..cc2ed178 --- /dev/null +++ b/src/services/email_service.py @@ -0,0 +1,159 @@ +import sendgrid +from sendgrid.helpers.mail import Mail, Email, To, Content +from src.config.settings import settings +import logging +from datetime import datetime + +logger = logging.getLogger(__name__) + +def send_verification_email(email: str, token: str) -> bool: + """ + Envia um email de verificação para o usuário + + Args: + email: Email do destinatário + token: Token de verificação de email + + Returns: + bool: True se o email foi enviado com sucesso, False caso contrário + """ + try: + sg = sendgrid.SendGridAPIClient(api_key=settings.SENDGRID_API_KEY) + from_email = Email(settings.EMAIL_FROM) + to_email = To(email) + subject = "Verificação de Email - Evo AI" + + verification_link = f"{settings.APP_URL}/auth/verify-email/{token}" + + content = Content( + "text/html", + f""" + + + + + +
+
+

Evo AI

+
+
+

Bem-vindo à Plataforma Evo AI!

+

Obrigado por se cadastrar. Para verificar sua conta e começar a usar nossos serviços, + por favor clique no botão abaixo:

+

+ Verificar meu Email +

+

Ou copie e cole o link abaixo no seu navegador:

+

{verification_link}

+

Este link é válido por 24 horas.

+

Se você não solicitou este email, por favor ignore-o.

+
+ +
+ + + """ + ) + + mail = Mail(from_email, to_email, subject, content) + response = sg.client.mail.send.post(request_body=mail.get()) + + if response.status_code >= 200 and response.status_code < 300: + logger.info(f"Email de verificação enviado para {email}") + return True + else: + logger.error(f"Falha ao enviar email de verificação para {email}. Status: {response.status_code}") + return False + + except Exception as e: + logger.error(f"Erro ao enviar email de verificação para {email}: {str(e)}") + return False + +def send_password_reset_email(email: str, token: str) -> bool: + """ + Envia um email de redefinição de senha para o usuário + + Args: + email: Email do destinatário + token: Token de redefinição de senha + + Returns: + bool: True se o email foi enviado com sucesso, False caso contrário + """ + try: + sg = sendgrid.SendGridAPIClient(api_key=settings.SENDGRID_API_KEY) + from_email = Email(settings.EMAIL_FROM) + to_email = To(email) + subject = "Redefinição de Senha - Evo AI" + + reset_link = f"{settings.APP_URL}/reset-password?token={token}" + + content = Content( + "text/html", + f""" + + + + + +
+
+

Evo AI

+
+
+

Redefinição de Senha

+

Recebemos uma solicitação para redefinir sua senha. Clique no botão abaixo + para criar uma nova senha:

+

+ Redefinir minha Senha +

+

Ou copie e cole o link abaixo no seu navegador:

+

{reset_link}

+

Este link é válido por 1 hora.

+

Se você não solicitou esta alteração, por favor ignore este email + e entre em contato com o suporte imediatamente.

+
+ +
+ + + """ + ) + + mail = Mail(from_email, to_email, subject, content) + response = sg.client.mail.send.post(request_body=mail.get()) + + if response.status_code >= 200 and response.status_code < 300: + logger.info(f"Email de redefinição de senha enviado para {email}") + return True + else: + logger.error(f"Falha ao enviar email de redefinição de senha para {email}. Status: {response.status_code}") + return False + + except Exception as e: + logger.error(f"Erro ao enviar email de redefinição de senha para {email}: {str(e)}") + return False \ No newline at end of file diff --git a/src/services/user_service.py b/src/services/user_service.py new file mode 100644 index 00000000..4480e54e --- /dev/null +++ b/src/services/user_service.py @@ -0,0 +1,302 @@ +from sqlalchemy.orm import Session +from sqlalchemy.exc import SQLAlchemyError +from src.models.models import User, Client +from src.schemas.user import UserCreate, UserResponse +from src.utils.security import get_password_hash, verify_password, generate_token +from src.services.email_service import send_verification_email, send_password_reset_email +from datetime import datetime, timedelta +import uuid +import logging +from typing import Optional, Tuple + +logger = logging.getLogger(__name__) + +def create_user(db: Session, user_data: UserCreate, is_admin: bool = False) -> Tuple[Optional[User], str]: + """ + Cria um novo usuário no sistema + + Args: + db: Sessão do banco de dados + user_data: Dados do usuário a ser criado + is_admin: Se o usuário é um administrador + + Returns: + Tuple[Optional[User], str]: Tupla com o usuário criado (ou None em caso de erro) e mensagem de status + """ + try: + # Verificar se email já existe + db_user = db.query(User).filter(User.email == user_data.email).first() + if db_user: + logger.warning(f"Tentativa de cadastro com email já existente: {user_data.email}") + return None, "Email já cadastrado" + + # Criar token de verificação + verification_token = generate_token() + token_expiry = datetime.utcnow() + timedelta(hours=24) + + # Iniciar transação + user = None + client_id = None + + try: + # Se não for admin, criar um cliente associado + if not is_admin: + client = Client(name=user_data.name) + db.add(client) + db.flush() # Obter o ID do cliente + client_id = client.id + + # Criar usuário + user = User( + email=user_data.email, + password_hash=get_password_hash(user_data.password), + client_id=client_id, + is_admin=is_admin, + is_active=False, # Inativo até verificar email + email_verified=False, + verification_token=verification_token, + verification_token_expiry=token_expiry + ) + db.add(user) + db.commit() + + # Enviar email de verificação + email_sent = send_verification_email(user.email, verification_token) + if not email_sent: + logger.error(f"Falha ao enviar email de verificação para {user.email}") + # Não fazemos rollback aqui, apenas logamos o erro + + logger.info(f"Usuário criado com sucesso: {user.email}") + return user, "Usuário criado com sucesso. Verifique seu email para ativar sua conta." + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao criar usuário: {str(e)}") + return None, f"Erro ao criar usuário: {str(e)}" + + except Exception as e: + logger.error(f"Erro inesperado ao criar usuário: {str(e)}") + return None, f"Erro inesperado: {str(e)}" + +def verify_email(db: Session, token: str) -> Tuple[bool, str]: + """ + Verifica o email de um usuário usando o token fornecido + + Args: + db: Sessão do banco de dados + token: Token de verificação + + Returns: + Tuple[bool, str]: Tupla com status da operação e mensagem + """ + try: + # Buscar usuário pelo token + user = db.query(User).filter(User.verification_token == token).first() + + if not user: + logger.warning(f"Tentativa de verificação com token inválido: {token}") + return False, "Token de verificação inválido" + + # Verificar se o token expirou + if user.verification_token_expiry < datetime.utcnow(): + logger.warning(f"Tentativa de verificação com token expirado para usuário: {user.email}") + return False, "Token de verificação expirado" + + # Atualizar usuário + user.email_verified = True + user.is_active = True + user.verification_token = None + user.verification_token_expiry = None + + db.commit() + logger.info(f"Email verificado com sucesso para usuário: {user.email}") + return True, "Email verificado com sucesso. Sua conta está ativa." + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao verificar email: {str(e)}") + return False, f"Erro ao verificar email: {str(e)}" + + except Exception as e: + logger.error(f"Erro inesperado ao verificar email: {str(e)}") + return False, f"Erro inesperado: {str(e)}" + +def resend_verification(db: Session, email: str) -> Tuple[bool, str]: + """ + Reenvia o email de verificação + + Args: + db: Sessão do banco de dados + email: Email do usuário + + Returns: + Tuple[bool, str]: Tupla com status da operação e mensagem + """ + try: + # Buscar usuário pelo email + user = db.query(User).filter(User.email == email).first() + + if not user: + logger.warning(f"Tentativa de reenvio de verificação para email inexistente: {email}") + return False, "Email não encontrado" + + if user.email_verified: + logger.info(f"Tentativa de reenvio de verificação para email já verificado: {email}") + return False, "Email já foi verificado" + + # Gerar novo token + verification_token = generate_token() + token_expiry = datetime.utcnow() + timedelta(hours=24) + + # Atualizar usuário + user.verification_token = verification_token + user.verification_token_expiry = token_expiry + + db.commit() + + # Enviar email + email_sent = send_verification_email(user.email, verification_token) + if not email_sent: + logger.error(f"Falha ao reenviar email de verificação para {user.email}") + return False, "Falha ao enviar email de verificação" + + logger.info(f"Email de verificação reenviado com sucesso para: {user.email}") + return True, "Email de verificação reenviado. Verifique sua caixa de entrada." + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao reenviar verificação: {str(e)}") + return False, f"Erro ao reenviar verificação: {str(e)}" + + except Exception as e: + logger.error(f"Erro inesperado ao reenviar verificação: {str(e)}") + return False, f"Erro inesperado: {str(e)}" + +def forgot_password(db: Session, email: str) -> Tuple[bool, str]: + """ + Inicia o processo de recuperação de senha + + Args: + db: Sessão do banco de dados + email: Email do usuário + + Returns: + Tuple[bool, str]: Tupla com status da operação e mensagem + """ + try: + # Buscar usuário pelo email + user = db.query(User).filter(User.email == email).first() + + if not user: + # Por segurança, não informamos se o email existe ou não + logger.info(f"Tentativa de recuperação de senha para email inexistente: {email}") + return True, "Se o email estiver cadastrado, você receberá instruções para redefinir sua senha." + + # Gerar token de reset + reset_token = generate_token() + token_expiry = datetime.utcnow() + timedelta(hours=1) # Token válido por 1 hora + + # Atualizar usuário + user.password_reset_token = reset_token + user.password_reset_expiry = token_expiry + + db.commit() + + # Enviar email + email_sent = send_password_reset_email(user.email, reset_token) + if not email_sent: + logger.error(f"Falha ao enviar email de recuperação de senha para {user.email}") + return False, "Falha ao enviar email de recuperação de senha" + + logger.info(f"Email de recuperação de senha enviado com sucesso para: {user.email}") + return True, "Se o email estiver cadastrado, você receberá instruções para redefinir sua senha." + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao processar recuperação de senha: {str(e)}") + return False, f"Erro ao processar recuperação de senha: {str(e)}" + + except Exception as e: + logger.error(f"Erro inesperado ao processar recuperação de senha: {str(e)}") + return False, f"Erro inesperado: {str(e)}" + +def reset_password(db: Session, token: str, new_password: str) -> Tuple[bool, str]: + """ + Redefine a senha do usuário usando o token fornecido + + Args: + db: Sessão do banco de dados + token: Token de redefinição de senha + new_password: Nova senha + + Returns: + Tuple[bool, str]: Tupla com status da operação e mensagem + """ + try: + # Buscar usuário pelo token + user = db.query(User).filter(User.password_reset_token == token).first() + + if not user: + logger.warning(f"Tentativa de redefinição de senha com token inválido: {token}") + return False, "Token de redefinição de senha inválido" + + # Verificar se o token expirou + if user.password_reset_expiry < datetime.utcnow(): + logger.warning(f"Tentativa de redefinição de senha com token expirado para usuário: {user.email}") + return False, "Token de redefinição de senha expirado" + + # Atualizar senha + user.password_hash = get_password_hash(new_password) + user.password_reset_token = None + user.password_reset_expiry = None + + db.commit() + logger.info(f"Senha redefinida com sucesso para usuário: {user.email}") + return True, "Senha redefinida com sucesso. Você já pode fazer login com sua nova senha." + + except SQLAlchemyError as e: + db.rollback() + logger.error(f"Erro ao redefinir senha: {str(e)}") + return False, f"Erro ao redefinir senha: {str(e)}" + + except Exception as e: + logger.error(f"Erro inesperado ao redefinir senha: {str(e)}") + return False, f"Erro inesperado: {str(e)}" + +def get_user_by_email(db: Session, email: str) -> Optional[User]: + """ + Busca um usuário pelo email + + Args: + db: Sessão do banco de dados + email: Email do usuário + + Returns: + Optional[User]: Usuário encontrado ou None + """ + try: + return db.query(User).filter(User.email == email).first() + except Exception as e: + logger.error(f"Erro ao buscar usuário por email: {str(e)}") + return None + +def authenticate_user(db: Session, email: str, password: str) -> Optional[User]: + """ + Autentica um usuário com email e senha + + Args: + db: Sessão do banco de dados + email: Email do usuário + password: Senha do usuário + + Returns: + Optional[User]: Usuário autenticado ou None + """ + user = get_user_by_email(db, email) + if not user: + return None + if not verify_password(password, user.password_hash): + return None + if not user.is_active: + return None + return user \ No newline at end of file diff --git a/src/utils/security.py b/src/utils/security.py new file mode 100644 index 00000000..c169c913 --- /dev/null +++ b/src/utils/security.py @@ -0,0 +1,41 @@ +from passlib.context import CryptContext +from datetime import datetime, timedelta +import secrets +import string +from jose import jwt +from src.config.settings import settings +import logging + +logger = logging.getLogger(__name__) + +# Contexto para hash de senhas usando bcrypt +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +def get_password_hash(password: str) -> str: + """Cria um hash da senha fornecida""" + return pwd_context.hash(password) + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """Verifica se a senha fornecida corresponde ao hash armazenado""" + return pwd_context.verify(plain_password, hashed_password) + +def create_jwt_token(data: dict, expires_delta: timedelta = None) -> str: + """Cria um token JWT""" + to_encode = data.copy() + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta( + minutes=settings.JWT_EXPIRATION_TIME + ) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode( + to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM + ) + return encoded_jwt + +def generate_token(length: int = 32) -> str: + """Gera um token seguro para verificação de email ou reset de senha""" + alphabet = string.ascii_letters + string.digits + token = ''.join(secrets.choice(alphabet) for _ in range(length)) + return token \ No newline at end of file