feat: Improve parallelization for remote services API calls (#1548)

* Provide the option to make remote services call concurrent

Signed-off-by: Vinay Damodaran <vrdn@hey.com>

* Use yield from correctly?

Signed-off-by: Vinay Damodaran <vrdn@hey.com>

* not do amateur hour stuff

Signed-off-by: Vinay Damodaran <vrdn@hey.com>

---------

Signed-off-by: Vinay Damodaran <vrdn@hey.com>
This commit is contained in:
Vinay R Damodaran
2025-05-14 06:47:55 -07:00
committed by GitHub
parent 9f8b479f17
commit 3a04f2a367
3 changed files with 17 additions and 5 deletions

View File

@@ -1,4 +1,5 @@
from collections.abc import Iterable
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import Optional, Type, Union
@@ -37,6 +38,7 @@ class PictureDescriptionApiModel(PictureDescriptionBaseModel):
accelerator_options=accelerator_options,
)
self.options: PictureDescriptionApiOptions
self.concurrency = self.options.concurrency
if self.enabled:
if not enable_remote_services:
@@ -48,8 +50,8 @@ class PictureDescriptionApiModel(PictureDescriptionBaseModel):
def _annotate_images(self, images: Iterable[Image.Image]) -> Iterable[str]:
# Note: technically we could make a batch request here,
# but not all APIs will allow for it. For example, vllm won't allow more than 1.
for image in images:
yield api_image_request(
def _api_request(image):
return api_image_request(
image=image,
prompt=self.options.prompt,
url=self.options.url,
@@ -57,3 +59,6 @@ class PictureDescriptionApiModel(PictureDescriptionBaseModel):
headers=self.options.headers,
**self.options.params,
)
with ThreadPoolExecutor(max_workers=self.concurrency) as executor:
yield from executor.map(_api_request, images)