diff --git a/src/google/adk/agents/callback_context.py b/src/google/adk/agents/callback_context.py index 9b9f237..93b785c 100644 --- a/src/google/adk/agents/callback_context.py +++ b/src/google/adk/agents/callback_context.py @@ -23,7 +23,6 @@ from .readonly_context import ReadonlyContext if TYPE_CHECKING: from google.genai import types - from ..events.event import Event from ..events.event_actions import EventActions from ..sessions.state import State from .invocation_context import InvocationContext diff --git a/src/google/adk/flows/llm_flows/functions.py b/src/google/adk/flows/llm_flows/functions.py index 805957e..d1b6f84 100644 --- a/src/google/adk/flows/llm_flows/functions.py +++ b/src/google/adk/flows/llm_flows/functions.py @@ -310,7 +310,7 @@ async def _process_function_live_helper( function_response = { 'status': f'No active streaming function named {function_name} found' } - elif hasattr(tool, "func") and inspect.isasyncgenfunction(tool.func): + elif inspect.isasyncgenfunction(tool.func): print('is async') # for streaming tool use case diff --git a/src/google/adk/models/llm_response.py b/src/google/adk/models/llm_response.py index 895e7a0..d7a613d 100644 --- a/src/google/adk/models/llm_response.py +++ b/src/google/adk/models/llm_response.py @@ -14,7 +14,7 @@ from __future__ import annotations -from typing import Optional +from typing import Any, Optional from google.genai import types from pydantic import BaseModel @@ -37,6 +37,7 @@ class LlmResponse(BaseModel): error_message: Error message if the response is an error. interrupted: Flag indicating that LLM was interrupted when generating the content. Usually it's due to user interruption during a bidi streaming. + custom_metadata: The custom metadata of the LlmResponse. """ model_config = ConfigDict(extra='forbid') @@ -71,6 +72,14 @@ class LlmResponse(BaseModel): Usually it's due to user interruption during a bidi streaming. """ + custom_metadata: Optional[dict[str, Any]] = None + """The custom metadata of the LlmResponse. + + An optional key-value pair to label an LlmResponse. + + NOTE: the entire dict must be JSON serializable. + """ + @staticmethod def create( generate_content_response: types.GenerateContentResponse,