mirror of
https://github.com/EvolutionAPI/adk-python.git
synced 2025-12-20 12:22:19 -06:00
Copybara import of the project:
-- ad923c2c8c503ba73c62db695e88f1a3ea1aeeea by YU MING HSU <abego452@gmail.com>: docs: enhance Contribution process within CONTRIBUTING.md -- 8022924fb7e975ac278d38fce3b5fd593d874536 by YU MING HSU <abego452@gmail.com>: fix: move _maybe_append_user_content from google_llm.py to base_llm.py, so subclass can get benefit from it, call _maybe_append_user_content from generate_content_async within lite_llm.py -- cf891fb1a3bbccaaf9d0055b23f614ce52449977 by YU MING HSU <abego452@gmail.com>: fix: modify install dependencies cmd, and use pyink to format codebase COPYBARA_INTEGRATE_REVIEW=https://github.com/google/adk-python/pull/428 from hsuyuming:fix_litellm_error_issue_427 dbec4949798e6399a0410d1b8ba7cc6a7cad7bdd PiperOrigin-RevId: 754124679
This commit is contained in:
committed by
Copybara-Service
parent
8f94a0c7b3
commit
879064343c
@@ -172,19 +172,19 @@ def _content_to_message_param(
|
||||
tool_calls = []
|
||||
content_present = False
|
||||
for part in content.parts:
|
||||
if part.function_call:
|
||||
tool_calls.append(
|
||||
ChatCompletionMessageToolCall(
|
||||
type="function",
|
||||
id=part.function_call.id,
|
||||
function=Function(
|
||||
name=part.function_call.name,
|
||||
arguments=part.function_call.args,
|
||||
),
|
||||
)
|
||||
if part.function_call:
|
||||
tool_calls.append(
|
||||
ChatCompletionMessageToolCall(
|
||||
type="function",
|
||||
id=part.function_call.id,
|
||||
function=Function(
|
||||
name=part.function_call.name,
|
||||
arguments=part.function_call.args,
|
||||
),
|
||||
)
|
||||
elif part.text or part.inline_data:
|
||||
content_present = True
|
||||
)
|
||||
elif part.text or part.inline_data:
|
||||
content_present = True
|
||||
|
||||
final_content = message_content if content_present else None
|
||||
|
||||
@@ -453,9 +453,9 @@ def _get_completion_inputs(
|
||||
for content in llm_request.contents or []:
|
||||
message_param_or_list = _content_to_message_param(content)
|
||||
if isinstance(message_param_or_list, list):
|
||||
messages.extend(message_param_or_list)
|
||||
elif message_param_or_list: # Ensure it's not None before appending
|
||||
messages.append(message_param_or_list)
|
||||
messages.extend(message_param_or_list)
|
||||
elif message_param_or_list: # Ensure it's not None before appending
|
||||
messages.append(message_param_or_list)
|
||||
|
||||
if llm_request.config.system_instruction:
|
||||
messages.insert(
|
||||
@@ -611,6 +611,7 @@ class LiteLlm(BaseLlm):
|
||||
LlmResponse: The model response.
|
||||
"""
|
||||
|
||||
self._maybe_append_user_content(llm_request)
|
||||
logger.info(_build_request_log(llm_request))
|
||||
|
||||
messages, tools = _get_completion_inputs(llm_request)
|
||||
|
||||
Reference in New Issue
Block a user