mirror of
https://github.com/EvolutionAPI/adk-python.git
synced 2025-07-16 04:02:55 -06:00
chore: reformat codes using autoformat.sh
PiperOrigin-RevId: 764584699
This commit is contained in:
parent
2b5c89b3a9
commit
0250d9e3ac
@ -188,10 +188,10 @@ class VertexAiSessionService(BaseSessionService):
|
|||||||
) -> ListSessionsResponse:
|
) -> ListSessionsResponse:
|
||||||
reasoning_engine_id = _parse_reasoning_engine_id(app_name)
|
reasoning_engine_id = _parse_reasoning_engine_id(app_name)
|
||||||
|
|
||||||
path = f"reasoningEngines/{reasoning_engine_id}/sessions"
|
path = f'reasoningEngines/{reasoning_engine_id}/sessions'
|
||||||
if user_id:
|
if user_id:
|
||||||
parsed_user_id = urllib.parse.quote(f'''"{user_id}"''', safe="")
|
parsed_user_id = urllib.parse.quote(f'''"{user_id}"''', safe='')
|
||||||
path = path + f"?filter=user_id={parsed_user_id}"
|
path = path + f'?filter=user_id={parsed_user_id}'
|
||||||
|
|
||||||
api_client = _get_api_client(self.project, self.location)
|
api_client = _get_api_client(self.project, self.location)
|
||||||
api_response = await api_client.async_request(
|
api_response = await api_client.async_request(
|
||||||
|
@ -210,74 +210,76 @@ async def test_generate_content_async_stream(gemini_llm, llm_request):
|
|||||||
async def test_generate_content_async_stream_preserves_thinking_and_text_parts(
|
async def test_generate_content_async_stream_preserves_thinking_and_text_parts(
|
||||||
gemini_llm, llm_request
|
gemini_llm, llm_request
|
||||||
):
|
):
|
||||||
with mock.patch.object(gemini_llm, "api_client") as mock_client:
|
with mock.patch.object(gemini_llm, "api_client") as mock_client:
|
||||||
class MockAsyncIterator:
|
|
||||||
def __init__(self, seq):
|
|
||||||
self._iter = iter(seq)
|
|
||||||
|
|
||||||
def __aiter__(self):
|
class MockAsyncIterator:
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self):
|
def __init__(self, seq):
|
||||||
try:
|
self._iter = iter(seq)
|
||||||
return next(self._iter)
|
|
||||||
except StopIteration:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
|
|
||||||
response1 = types.GenerateContentResponse(
|
def __aiter__(self):
|
||||||
candidates=[
|
return self
|
||||||
types.Candidate(
|
|
||||||
content=Content(
|
|
||||||
role="model",
|
|
||||||
parts=[Part(text="Think1", thought=True)],
|
|
||||||
),
|
|
||||||
finish_reason=None,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
response2 = types.GenerateContentResponse(
|
|
||||||
candidates=[
|
|
||||||
types.Candidate(
|
|
||||||
content=Content(
|
|
||||||
role="model",
|
|
||||||
parts=[Part(text="Think2", thought=True)],
|
|
||||||
),
|
|
||||||
finish_reason=None,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
response3 = types.GenerateContentResponse(
|
|
||||||
candidates=[
|
|
||||||
types.Candidate(
|
|
||||||
content=Content(
|
|
||||||
role="model",
|
|
||||||
parts=[Part.from_text(text="Answer.")],
|
|
||||||
),
|
|
||||||
finish_reason=types.FinishReason.STOP,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
async def mock_coro():
|
async def __anext__(self):
|
||||||
return MockAsyncIterator([response1, response2, response3])
|
try:
|
||||||
|
return next(self._iter)
|
||||||
|
except StopIteration:
|
||||||
|
raise StopAsyncIteration
|
||||||
|
|
||||||
mock_client.aio.models.generate_content_stream.return_value = mock_coro()
|
response1 = types.GenerateContentResponse(
|
||||||
|
candidates=[
|
||||||
responses = [
|
types.Candidate(
|
||||||
resp
|
content=Content(
|
||||||
async for resp in gemini_llm.generate_content_async(
|
role="model",
|
||||||
llm_request, stream=True
|
parts=[Part(text="Think1", thought=True)],
|
||||||
|
),
|
||||||
|
finish_reason=None,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
)
|
||||||
|
response2 = types.GenerateContentResponse(
|
||||||
|
candidates=[
|
||||||
|
types.Candidate(
|
||||||
|
content=Content(
|
||||||
|
role="model",
|
||||||
|
parts=[Part(text="Think2", thought=True)],
|
||||||
|
),
|
||||||
|
finish_reason=None,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
response3 = types.GenerateContentResponse(
|
||||||
|
candidates=[
|
||||||
|
types.Candidate(
|
||||||
|
content=Content(
|
||||||
|
role="model",
|
||||||
|
parts=[Part.from_text(text="Answer.")],
|
||||||
|
),
|
||||||
|
finish_reason=types.FinishReason.STOP,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
assert len(responses) == 4
|
async def mock_coro():
|
||||||
assert responses[0].partial is True
|
return MockAsyncIterator([response1, response2, response3])
|
||||||
assert responses[1].partial is True
|
|
||||||
assert responses[2].partial is True
|
mock_client.aio.models.generate_content_stream.return_value = mock_coro()
|
||||||
assert responses[3].content.parts[0].text == "Think1Think2"
|
|
||||||
assert responses[3].content.parts[0].thought is True
|
responses = [
|
||||||
assert responses[3].content.parts[1].text == "Answer."
|
resp
|
||||||
mock_client.aio.models.generate_content_stream.assert_called_once()
|
async for resp in gemini_llm.generate_content_async(
|
||||||
|
llm_request, stream=True
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
assert len(responses) == 4
|
||||||
|
assert responses[0].partial is True
|
||||||
|
assert responses[1].partial is True
|
||||||
|
assert responses[2].partial is True
|
||||||
|
assert responses[3].content.parts[0].text == "Think1Think2"
|
||||||
|
assert responses[3].content.parts[0].thought is True
|
||||||
|
assert responses[3].content.parts[1].text == "Answer."
|
||||||
|
mock_client.aio.models.generate_content_stream.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
@ -111,7 +111,9 @@ MOCK_SESSION = Session(
|
|||||||
|
|
||||||
|
|
||||||
SESSION_REGEX = r'^reasoningEngines/([^/]+)/sessions/([^/]+)$'
|
SESSION_REGEX = r'^reasoningEngines/([^/]+)/sessions/([^/]+)$'
|
||||||
SESSIONS_REGEX = r'^reasoningEngines/([^/]+)/sessions\?filter=user_id=%22([^%]+)%22.*$' # %22 represents double-quotes in a URL-encoded string
|
SESSIONS_REGEX = ( # %22 represents double-quotes in a URL-encoded string
|
||||||
|
r'^reasoningEngines/([^/]+)/sessions\?filter=user_id=%22([^%]+)%22.*$'
|
||||||
|
)
|
||||||
EVENTS_REGEX = r'^reasoningEngines/([^/]+)/sessions/([^/]+)/events$'
|
EVENTS_REGEX = r'^reasoningEngines/([^/]+)/sessions/([^/]+)/events$'
|
||||||
LRO_REGEX = r'^operations/([^/]+)$'
|
LRO_REGEX = r'^operations/([^/]+)$'
|
||||||
|
|
||||||
@ -156,7 +158,7 @@ class MockApiClient:
|
|||||||
return {
|
return {
|
||||||
'name': path,
|
'name': path,
|
||||||
'done': True,
|
'done': True,
|
||||||
'response': self.session_dict['4'] # Return the created session
|
'response': self.session_dict['4'], # Return the created session
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
raise ValueError(f'Unsupported path: {path}')
|
raise ValueError(f'Unsupported path: {path}')
|
||||||
|
Loading…
Reference in New Issue
Block a user