From 97c15ca6bd37e2a9e8e23999d44e0c693fa5ef5e Mon Sep 17 00:00:00 2001 From: Davidson Gomes Date: Thu, 8 May 2025 18:44:50 -0300 Subject: [PATCH 1/6] refactor(auth): temporarily enable auto-verification for user registration --- src/api/auth_routes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/api/auth_routes.py b/src/api/auth_routes.py index ee044337..86a3548d 100644 --- a/src/api/auth_routes.py +++ b/src/api/auth_routes.py @@ -54,7 +54,8 @@ async def register_user(user_data: UserCreate, db: Session = Depends(get_db)): Raises: HTTPException: If there is an error in registration """ - user, message = create_user(db, user_data, is_admin=False, auto_verify=False) + # TODO: remover o auto_verify temporariamente para teste + user, message = create_user(db, user_data, is_admin=False, auto_verify=True) if not user: logger.error(f"Error registering user: {message}") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) From 9deeb02df81497d9675872056fa8488bf9d542a6 Mon Sep 17 00:00:00 2001 From: Davidson Gomes Date: Fri, 9 May 2025 10:36:33 -0300 Subject: [PATCH 2/6] refactor(client_routes): remove leading slash from post route for client creation --- src/api/client_routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/client_routes.py b/src/api/client_routes.py index 6d0f1bda..2429f6b3 100644 --- a/src/api/client_routes.py +++ b/src/api/client_routes.py @@ -37,7 +37,7 @@ router = APIRouter( ) -@router.post("/", response_model=Client, status_code=status.HTTP_201_CREATED) +@router.post("", response_model=Client, status_code=status.HTTP_201_CREATED) async def create_user( registration: ClientRegistration, db: Session = Depends(get_db), From b32a044cac97f5bbf908bbf8308169b5b24becdb Mon Sep 17 00:00:00 2001 From: Davidson Gomes Date: Fri, 9 May 2025 20:42:30 -0300 Subject: [PATCH 3/6] refactor(agent_builder, custom_tools): simplify tool configuration handling and ensure parameter defaults --- Dockerfile | 2 ++ src/services/agent_builder.py | 3 +- src/services/custom_tools.py | 60 +++++++++++++++++++++++------------ src/services/mcp_service.py | 3 ++ 4 files changed, 45 insertions(+), 23 deletions(-) diff --git a/Dockerfile b/Dockerfile index ea1c043c..f7dca666 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,6 +21,8 @@ RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ RUN curl -fsSL https://get.docker.com | bash +RUN curl -LsSf https://astral.sh/uv/install.sh | sh + COPY . . RUN pip install --no-cache-dir -e . diff --git a/src/services/agent_builder.py b/src/services/agent_builder.py index 41fbf2b9..6713b455 100644 --- a/src/services/agent_builder.py +++ b/src/services/agent_builder.py @@ -32,8 +32,7 @@ class AgentBuilder: """Create an LLM agent from the agent data.""" # Get custom tools from the configuration custom_tools = [] - if agent.config.get("tools"): - custom_tools = self.custom_tool_builder.build_tools(agent.config["tools"]) + custom_tools = self.custom_tool_builder.build_tools(agent.config) # Get MCP tools from the configuration mcp_tools = [] diff --git a/src/services/custom_tools.py b/src/services/custom_tools.py index 9d603395..002a4c7b 100644 --- a/src/services/custom_tools.py +++ b/src/services/custom_tools.py @@ -18,10 +18,15 @@ class CustomToolBuilder: endpoint = tool_config["endpoint"] method = tool_config["method"] headers = tool_config.get("headers", {}) - parameters = tool_config.get("parameters", {}) + parameters = tool_config.get("parameters", {}) or {} values = tool_config.get("values", {}) error_handling = tool_config.get("error_handling", {}) + # Garante que todos os parâmetros são dicts + path_params = parameters.get("path_params") or {} + query_params = parameters.get("query_params") or {} + body_params = parameters.get("body_params") or {} + def http_tool(**kwargs): try: # Combines default values with provided values @@ -35,32 +40,30 @@ class CustomToolBuilder: # Processes path parameters url = endpoint - for param, value in parameters.get("path_params", {}).items(): + for param, value in path_params.items(): if param in all_values: url = url.replace(f"{{{param}}}", str(all_values[param])) # Process query parameters - query_params = {} - for param, value in parameters.get("query_params", {}).items(): + query_params_dict = {} + for param, value in query_params.items(): if isinstance(value, list): # If the value is a list, join with comma - query_params[param] = ",".join(value) + query_params_dict[param] = ",".join(value) elif param in all_values: # If the parameter is in the values, use the provided value - query_params[param] = all_values[param] + query_params_dict[param] = all_values[param] else: # Otherwise, use the default value from the configuration - query_params[param] = value + query_params_dict[param] = value # Adds default values to query params if they are not present for param, value in values.items(): - if param not in query_params and param not in parameters.get( - "path_params", {} - ): - query_params[param] = value + if param not in query_params_dict and param not in path_params: + query_params_dict[param] = value body_data = {} - for param, param_config in parameters.get("body_params", {}).items(): + for param, param_config in body_params.items(): if param in all_values: body_data[param] = all_values[param] @@ -68,8 +71,8 @@ class CustomToolBuilder: for param, value in values.items(): if ( param not in body_data - and param not in query_params - and param not in parameters.get("path_params", {}) + and param not in query_params_dict + and param not in path_params ): body_data[param] = value @@ -78,7 +81,7 @@ class CustomToolBuilder: method=method, url=url, headers=processed_headers, - params=query_params, + params=query_params_dict, json=body_data if body_data else None, timeout=error_handling.get("timeout", 30), ) @@ -104,18 +107,18 @@ class CustomToolBuilder: param_docs = [] # Adds path parameters - for param, value in parameters.get("path_params", {}).items(): + for param, value in path_params.items(): param_docs.append(f"{param}: {value}") # Adds query parameters - for param, value in parameters.get("query_params", {}).items(): + for param, value in query_params.items(): if isinstance(value, list): param_docs.append(f"{param}: List[{', '.join(value)}]") else: param_docs.append(f"{param}: {value}") # Adds body parameters - for param, param_config in parameters.get("body_params", {}).items(): + for param, param_config in body_params.items(): required = "Required" if param_config.get("required", False) else "Optional" param_docs.append( f"{param} ({param_config['type']}, {required}): {param_config['description']}" @@ -143,11 +146,26 @@ class CustomToolBuilder: return FunctionTool(func=http_tool) def build_tools(self, tools_config: Dict[str, Any]) -> List[FunctionTool]: - """Builds a list of tools based on the provided configuration.""" + """Builds a list of tools based on the provided configuration. Accepts both 'tools' and 'custom_tools' (with http_tools).""" self.tools = [] - # Processes HTTP tools - for http_tool_config in tools_config.get("http_tools", []): + # Permite receber tanto 'tools' quanto 'custom_tools' (com http_tools) + http_tools = [] + if tools_config.get("http_tools"): + http_tools = tools_config.get("http_tools", []) + elif tools_config.get("custom_tools") and tools_config["custom_tools"].get( + "http_tools" + ): + http_tools = tools_config["custom_tools"].get("http_tools", []) + # Suporte para caso tools seja um dict com http_tools + elif ( + tools_config.get("tools") + and isinstance(tools_config["tools"], dict) + and tools_config["tools"].get("http_tools") + ): + http_tools = tools_config["tools"].get("http_tools", []) + + for http_tool_config in http_tools: self.tools.append(self._create_http_tool(http_tool_config)) return self.tools diff --git a/src/services/mcp_service.py b/src/services/mcp_service.py index a064e590..43911d19 100644 --- a/src/services/mcp_service.py +++ b/src/services/mcp_service.py @@ -79,6 +79,9 @@ class MCPService: self, tools: List[Any], agent_tools: List[str] ) -> List[Any]: """Filters tools compatible with the agent.""" + if not agent_tools or len(agent_tools) == 0: + return tools + filtered_tools = [] for tool in tools: logger.info(f"Tool: {tool.name}") From c93fe6a8db6f3f07ac571bbe2242daf046215200 Mon Sep 17 00:00:00 2001 From: Davidson Gomes Date: Fri, 9 May 2025 21:19:33 -0300 Subject: [PATCH 4/6] feat(agent_service, agent_builder, schemas): add agent_tools field to agent configuration and implement processing logic --- src/schemas/agent_config.py | 3 +++ src/services/agent_builder.py | 18 +++++++++++++++++- src/services/agent_service.py | 20 ++++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/src/schemas/agent_config.py b/src/schemas/agent_config.py index a74bfc82..b3ac9040 100644 --- a/src/schemas/agent_config.py +++ b/src/schemas/agent_config.py @@ -137,6 +137,9 @@ class LLMConfig(BaseModel): custom_mcp_servers: Optional[List[CustomMCPServerConfig]] = Field( default=None, description="List of custom MCP servers with URL and headers" ) + agent_tools: Optional[List[UUID]] = Field( + default=None, description="List of IDs of sub-agents" + ) sub_agents: Optional[List[UUID]] = Field( default=None, description="List of IDs of sub-agents" ) diff --git a/src/services/agent_builder.py b/src/services/agent_builder.py index 6713b455..a8ad3738 100644 --- a/src/services/agent_builder.py +++ b/src/services/agent_builder.py @@ -2,6 +2,7 @@ from typing import List, Optional, Tuple from google.adk.agents.llm_agent import LlmAgent from google.adk.agents import SequentialAgent, ParallelAgent, LoopAgent, BaseAgent from google.adk.models.lite_llm import LiteLlm +from google.adk.tools.agent_tool import AgentTool from src.utils.logger import setup_logger from src.core.exceptions import AgentNotFoundError from src.services.agent_service import get_agent @@ -26,6 +27,18 @@ class AgentBuilder: self.custom_tool_builder = CustomToolBuilder() self.mcp_service = MCPService() + async def _agent_tools_builder(self, agent) -> List[AgentTool]: + """Build the tools for an agent.""" + agent_tools_ids = agent.config.get("agent_tools") + agent_tools = [] + if agent_tools_ids and isinstance(agent_tools_ids, list): + for agent_tool_id in agent_tools_ids: + sub_agent = get_agent(self.db, agent_tool_id) + llm_agent, _ = await self.build_llm_agent(sub_agent) + if llm_agent: + agent_tools.append(AgentTool(agent=llm_agent)) + return agent_tools + async def _create_llm_agent( self, agent ) -> Tuple[LlmAgent, Optional[AsyncExitStack]]: @@ -42,8 +55,11 @@ class AgentBuilder: agent.config, self.db ) + # Get agent tools + agent_tools = await self._agent_tools_builder(agent) + # Combine all tools - all_tools = custom_tools + mcp_tools + all_tools = custom_tools + mcp_tools + agent_tools now = datetime.now() current_datetime = now.strftime("%d/%m/%Y %H:%M") diff --git a/src/services/agent_service.py b/src/services/agent_service.py index 1d7f4c1d..516ada64 100644 --- a/src/services/agent_service.py +++ b/src/services/agent_service.py @@ -224,6 +224,9 @@ async def create_agent(db: Session, agent: AgentCreate) -> Agent: if "custom_tools" in config: processed_config["custom_tools"] = config["custom_tools"] + if "agent_tools" in config: + processed_config["agent_tools"] = config["agent_tools"] + if "sub_agents" in config: processed_config["sub_agents"] = config["sub_agents"] @@ -236,6 +239,7 @@ async def create_agent(db: Session, agent: AgentCreate) -> Agent: "tools", "custom_tools", "sub_agents", + "agent_tools", "custom_mcp_servers", "mcp_servers", ]: @@ -303,6 +307,12 @@ async def create_agent(db: Session, agent: AgentCreate) -> Agent: str(agent_id) for agent_id in config["sub_agents"] ] + # Process agent tools + if "agent_tools" in config and config["agent_tools"] is not None: + processed_config["agent_tools"] = [ + str(agent_id) for agent_id in config["agent_tools"] + ] + # Process tools if "tools" in config and config["tools"] is not None: processed_tools = [] @@ -484,6 +494,9 @@ async def update_agent( if "sub_agents" in config: processed_config["sub_agents"] = config["sub_agents"] + if "agent_tools" in config: + processed_config["agent_tools"] = config["agent_tools"] + if "custom_mcp_servers" in config: processed_config["custom_mcp_servers"] = config["custom_mcp_servers"] @@ -493,6 +506,7 @@ async def update_agent( "tools", "custom_tools", "sub_agents", + "agent_tools", "custom_mcp_servers", "mcp_servers", ]: @@ -563,6 +577,12 @@ async def update_agent( str(agent_id) for agent_id in config["sub_agents"] ] + # Process agent tools + if "agent_tools" in config and config["agent_tools"] is not None: + processed_config["agent_tools"] = [ + str(agent_id) for agent_id in config["agent_tools"] + ] + # Process tools if "tools" in config and config["tools"] is not None: processed_tools = [] From 2b370a2a4d5fb8a4e34dbbd7e45a0e46ee806562 Mon Sep 17 00:00:00 2001 From: Davidson Gomes Date: Fri, 9 May 2025 21:20:33 -0300 Subject: [PATCH 5/6] refactor(schemas, custom_tools): remove commented-out legacy code and clean up parameter handling --- src/schemas/schemas.py | 12 ------------ src/services/custom_tools.py | 3 --- 2 files changed, 15 deletions(-) diff --git a/src/schemas/schemas.py b/src/schemas/schemas.py index 95d6aafc..01409b6d 100644 --- a/src/schemas/schemas.py +++ b/src/schemas/schemas.py @@ -122,18 +122,6 @@ class AgentBase(BaseModel): def validate_api_key_id(cls, v, values): return v - # Código anterior (comentado temporariamente) - # # Se o tipo for llm, api_key_id é obrigatório - # if "type" in values and values["type"] == "llm" and not v: - # # Verifica se tem api_key no config (retrocompatibilidade) - # if "config" in values and values["config"] and "api_key" in values["config"]: - # # Tem api_key no config, então aceita - # return v - # raise ValueError( - # "api_key_id é obrigatório para agentes do tipo llm" - # ) - # return v - @validator("config") def validate_config(cls, v, values): if "type" in values and values["type"] == "a2a": diff --git a/src/services/custom_tools.py b/src/services/custom_tools.py index 002a4c7b..ad2f8639 100644 --- a/src/services/custom_tools.py +++ b/src/services/custom_tools.py @@ -22,7 +22,6 @@ class CustomToolBuilder: values = tool_config.get("values", {}) error_handling = tool_config.get("error_handling", {}) - # Garante que todos os parâmetros são dicts path_params = parameters.get("path_params") or {} query_params = parameters.get("query_params") or {} body_params = parameters.get("body_params") or {} @@ -149,7 +148,6 @@ class CustomToolBuilder: """Builds a list of tools based on the provided configuration. Accepts both 'tools' and 'custom_tools' (with http_tools).""" self.tools = [] - # Permite receber tanto 'tools' quanto 'custom_tools' (com http_tools) http_tools = [] if tools_config.get("http_tools"): http_tools = tools_config.get("http_tools", []) @@ -157,7 +155,6 @@ class CustomToolBuilder: "http_tools" ): http_tools = tools_config["custom_tools"].get("http_tools", []) - # Suporte para caso tools seja um dict com http_tools elif ( tools_config.get("tools") and isinstance(tools_config["tools"], dict) From fd915824d8e3d42e17204c95040f6c4cbf500a28 Mon Sep 17 00:00:00 2001 From: Davidson Gomes Date: Mon, 12 May 2025 13:19:39 -0300 Subject: [PATCH 6/6] feat(workflow_agent): add message-node function to handle message events in workflow --- src/services/workflow_agent.py | 56 +++++++++++++++++++++++++++++----- 1 file changed, 49 insertions(+), 7 deletions(-) diff --git a/src/services/workflow_agent.py b/src/services/workflow_agent.py index 77ab235f..a7217182 100644 --- a/src/services/workflow_agent.py +++ b/src/services/workflow_agent.py @@ -304,10 +304,47 @@ class WorkflowAgent(BaseAgent): "session_id": session_id, } + # Função para message-node + async def message_node_function( + state: State, node_id: str, node_data: Dict[str, Any] + ) -> AsyncGenerator[State, None]: + message_data = node_data.get("message", {}) + message_type = message_data.get("type", "text") + message_content = message_data.get("content", "") + + print(f"\n💬 MESSAGE-NODE: {message_content}") + + content = state.get("content", []) + session_id = state.get("session_id", "") + conversation_history = state.get("conversation_history", []) + + # Adiciona a mensagem como um novo Event do tipo agent + new_event = Event( + author="agent", + content=Content(parts=[Part(text=message_content)]), + ) + content = content + [new_event] + + node_outputs = state.get("node_outputs", {}) + node_outputs[node_id] = { + "message_type": message_type, + "message_content": message_content, + } + + yield { + "content": content, + "status": "message_added", + "node_outputs": node_outputs, + "cycle_count": state.get("cycle_count", 0), + "conversation_history": conversation_history, + "session_id": session_id, + } + return { "start-node": start_node_function, "agent-node": agent_node_function, "condition-node": condition_node_function, + "message-node": message_node_function, } def _evaluate_condition(self, condition: Dict[str, Any], state: State) -> bool: @@ -708,18 +745,23 @@ class WorkflowAgent(BaseAgent): graph = await self._create_graph(ctx, self.flow_json) # 4. Prepare the initial state + user_event = Event( + author="user", + content=Content(parts=[Part(text=user_message)]), + ) + + # Se o histórico estiver vazio, adiciona a mensagem do usuário + conversation_history = ctx.session.events or [] + if not conversation_history or (len(conversation_history) == 0): + conversation_history = [user_event] + initial_state = State( - content=[ - Event( - author="user", - content=Content(parts=[Part(text=user_message)]), - ) - ], + content=[user_event], status="started", session_id=session_id, cycle_count=0, node_outputs={}, - conversation_history=ctx.session.events, + conversation_history=conversation_history, ) # 5. Execute the graph