Merge branch 'release/0.0.2'

This commit is contained in:
Davidson Gomes 2025-05-12 17:12:48 -03:00
commit bafbd494ed
8 changed files with 318 additions and 222 deletions

View File

@ -39,6 +39,10 @@ SENDGRID_API_KEY="your-sendgrid-api-key"
EMAIL_FROM="noreply@yourdomain.com" EMAIL_FROM="noreply@yourdomain.com"
APP_URL="https://yourdomain.com" APP_URL="https://yourdomain.com"
LANGFUSE_PUBLIC_KEY="your-langfuse-public-key"
LANGFUSE_SECRET_KEY="your-langfuse-secret-key"
OTEL_EXPORTER_OTLP_ENDPOINT="https://cloud.langfuse.com/api/public/otel"
# Server settings # Server settings
HOST="0.0.0.0" HOST="0.0.0.0"
PORT=8000 PORT=8000

View File

@ -49,6 +49,8 @@ dependencies = [
"jwcrypto==1.5.6", "jwcrypto==1.5.6",
"pyjwt[crypto]==2.9.0", "pyjwt[crypto]==2.9.0",
"langgraph==0.4.1", "langgraph==0.4.1",
"opentelemetry-sdk==1.33.0",
"opentelemetry-exporter-otlp==1.33.0",
] ]
[project.optional-dependencies] [project.optional-dependencies]

View File

@ -102,9 +102,8 @@ async def websocket_chat(
memory_service=memory_service, memory_service=memory_service,
db=db, db=db,
): ):
# Send each chunk as a JSON message
await websocket.send_json( await websocket.send_json(
{"message": chunk, "turn_complete": False} {"message": json.loads(chunk), "turn_complete": False}
) )
# Send signal of complete turn # Send signal of complete turn

View File

@ -84,6 +84,11 @@ class Settings(BaseSettings):
DEMO_PASSWORD: str = os.getenv("DEMO_PASSWORD", "demo123") DEMO_PASSWORD: str = os.getenv("DEMO_PASSWORD", "demo123")
DEMO_CLIENT_NAME: str = os.getenv("DEMO_CLIENT_NAME", "Demo Client") DEMO_CLIENT_NAME: str = os.getenv("DEMO_CLIENT_NAME", "Demo Client")
# Langfuse / OpenTelemetry settings
LANGFUSE_PUBLIC_KEY: str = os.getenv("LANGFUSE_PUBLIC_KEY", "")
LANGFUSE_SECRET_KEY: str = os.getenv("LANGFUSE_SECRET_KEY", "")
OTEL_EXPORTER_OTLP_ENDPOINT: str = os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "")
class Config: class Config:
env_file = ".env" env_file = ".env"
env_file_encoding = "utf-8" env_file_encoding = "utf-8"

View File

@ -7,6 +7,7 @@ from fastapi.staticfiles import StaticFiles
from src.config.database import engine, Base from src.config.database import engine, Base
from src.config.settings import settings from src.config.settings import settings
from src.utils.logger import setup_logger from src.utils.logger import setup_logger
from src.utils.otel import init_otel
# Necessary for other modules # Necessary for other modules
from src.services.service_providers import session_service # noqa: F401 from src.services.service_providers import session_service # noqa: F401
@ -85,6 +86,9 @@ app.include_router(session_router, prefix=API_PREFIX)
app.include_router(agent_router, prefix=API_PREFIX) app.include_router(agent_router, prefix=API_PREFIX)
app.include_router(a2a_router, prefix=API_PREFIX) app.include_router(a2a_router, prefix=API_PREFIX)
# Inicializa o OpenTelemetry para Langfuse
init_otel()
@app.get("/") @app.get("/")
def read_root(): def read_root():

View File

@ -10,6 +10,9 @@ from src.services.agent_builder import AgentBuilder
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from typing import Optional, AsyncGenerator from typing import Optional, AsyncGenerator
import asyncio import asyncio
import json
from src.utils.otel import get_tracer
from opentelemetry import trace
logger = setup_logger(__name__) logger = setup_logger(__name__)
@ -24,6 +27,16 @@ async def run_agent(
db: Session, db: Session,
session_id: Optional[str] = None, session_id: Optional[str] = None,
timeout: float = 60.0, timeout: float = 60.0,
):
tracer = get_tracer()
with tracer.start_as_current_span(
"run_agent",
attributes={
"agent_id": agent_id,
"external_id": external_id,
"session_id": session_id or f"{external_id}_{agent_id}",
"message": message,
},
): ):
exit_stack = None exit_stack = None
try: try:
@ -109,7 +122,9 @@ async def run_agent(
if last_response: if last_response:
await response_queue.put(last_response) await response_queue.put(last_response)
else: else:
await response_queue.put("Finished without specific response") await response_queue.put(
"Finished without specific response"
)
execution_completed.set() execution_completed.set()
except Exception as e: except Exception as e:
@ -127,7 +142,9 @@ async def run_agent(
p.cancel() p.cancel()
if not execution_completed.is_set(): if not execution_completed.is_set():
logger.warning(f"Agent execution timed out after {timeout} seconds") logger.warning(
f"Agent execution timed out after {timeout} seconds"
)
await response_queue.put( await response_queue.put(
"The response took too long and was interrupted." "The response took too long and was interrupted."
) )
@ -180,6 +197,17 @@ async def run_agent(
# Do not raise the exception to not obscure the original error # Do not raise the exception to not obscure the original error
def convert_sets(obj):
if isinstance(obj, set):
return list(obj)
elif isinstance(obj, dict):
return {k: convert_sets(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [convert_sets(i) for i in obj]
else:
return obj
async def run_agent_stream( async def run_agent_stream(
agent_id: str, agent_id: str,
external_id: str, external_id: str,
@ -190,6 +218,18 @@ async def run_agent_stream(
db: Session, db: Session,
session_id: Optional[str] = None, session_id: Optional[str] = None,
) -> AsyncGenerator[str, None]: ) -> AsyncGenerator[str, None]:
tracer = get_tracer()
span = tracer.start_span(
"run_agent_stream",
attributes={
"agent_id": agent_id,
"external_id": external_id,
"session_id": session_id or f"{external_id}_{agent_id}",
"message": message,
},
)
try:
with trace.use_span(span, end_on_exit=True):
try: try:
logger.info( logger.info(
f"Starting streaming execution of agent {agent_id} for external_id {external_id}" f"Starting streaming execution of agent {agent_id} for external_id {external_id}"
@ -246,11 +286,9 @@ async def run_agent_stream(
) )
async for event in events_async: async for event in events_async:
if event.content and event.content.parts: event_dict = event.dict()
text = event.content.parts[0].text event_dict = convert_sets(event_dict)
if text: yield json.dumps(event_dict)
yield text
await asyncio.sleep(0) # Allow other tasks to run
completed_session = session_service.get_session( completed_session = session_service.get_session(
app_name=agent_id, app_name=agent_id,
@ -276,5 +314,9 @@ async def run_agent_stream(
logger.error(f"Error processing request: {str(e)}") logger.error(f"Error processing request: {str(e)}")
raise e raise e
except Exception as e: except Exception as e:
logger.error(f"Internal error processing request: {str(e)}", exc_info=True) logger.error(
f"Internal error processing request: {str(e)}", exc_info=True
)
raise InternalServerError(str(e)) raise InternalServerError(str(e))
finally:
span.end()

View File

@ -304,7 +304,6 @@ class WorkflowAgent(BaseAgent):
"session_id": session_id, "session_id": session_id,
} }
# Função para message-node
async def message_node_function( async def message_node_function(
state: State, node_id: str, node_data: Dict[str, Any] state: State, node_id: str, node_data: Dict[str, Any]
) -> AsyncGenerator[State, None]: ) -> AsyncGenerator[State, None]:
@ -318,7 +317,6 @@ class WorkflowAgent(BaseAgent):
session_id = state.get("session_id", "") session_id = state.get("session_id", "")
conversation_history = state.get("conversation_history", []) conversation_history = state.get("conversation_history", [])
# Adiciona a mensagem como um novo Event do tipo agent
new_event = Event( new_event = Event(
author="agent", author="agent",
content=Content(parts=[Part(text=message_content)]), content=Content(parts=[Part(text=message_content)]),
@ -750,7 +748,7 @@ class WorkflowAgent(BaseAgent):
content=Content(parts=[Part(text=user_message)]), content=Content(parts=[Part(text=user_message)]),
) )
# Se o histórico estiver vazio, adiciona a mensagem do usuário # If the conversation history is empty, add the user message
conversation_history = ctx.session.events or [] conversation_history = ctx.session.events or []
if not conversation_history or (len(conversation_history) == 0): if not conversation_history or (len(conversation_history) == 0):
conversation_history = [user_event] conversation_history = [user_event]
@ -768,16 +766,17 @@ class WorkflowAgent(BaseAgent):
print("\n🚀 Starting workflow execution:") print("\n🚀 Starting workflow execution:")
print(f"Initial content: {user_message[:100]}...") print(f"Initial content: {user_message[:100]}...")
# Execute the graph with a recursion limit to avoid infinite loops sent_events = 0 # Count of events already sent
result = await graph.ainvoke(initial_state, {"recursion_limit": 20})
# 6. Process and return the result async for state in graph.astream(initial_state, {"recursion_limit": 20}):
final_content = result.get("content", []) # The state can be a dict with the node name as a key
print(f"\n✅ FINAL RESULT: {final_content[:100]}...") for node_state in state.values():
content = node_state.get("content", [])
for content in final_content: # Only send new events
if content.author != "user": for event in content[sent_events:]:
yield content if event.author != "user":
yield event
sent_events = len(content)
# Execute sub-agents # Execute sub-agents
for sub_agent in self.sub_agents: for sub_agent in self.sub_agents:

41
src/utils/otel.py Normal file
View File

@ -0,0 +1,41 @@
import os
import base64
from src.config.settings import settings
from opentelemetry import trace
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
_otlp_initialized = False
def init_otel():
global _otlp_initialized
if _otlp_initialized:
return
if not (
settings.LANGFUSE_PUBLIC_KEY
and settings.LANGFUSE_SECRET_KEY
and settings.OTEL_EXPORTER_OTLP_ENDPOINT
):
return
langfuse_auth = base64.b64encode(
f"{settings.LANGFUSE_PUBLIC_KEY}:{settings.LANGFUSE_SECRET_KEY}".encode()
).decode()
os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = settings.OTEL_EXPORTER_OTLP_ENDPOINT
os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {langfuse_auth}"
provider = TracerProvider(
resource=Resource.create({"service.name": "evo_ai_agent"})
)
exporter = OTLPSpanExporter()
provider.add_span_processor(BatchSpanProcessor(exporter))
trace.set_tracer_provider(provider)
_otlp_initialized = True
def get_tracer(name: str = "evo_ai_agent"):
return trace.get_tracer(name)