new mcp servers format

This commit is contained in:
Davidson Gomes
2025-04-28 12:37:58 -03:00
parent 0112573d9b
commit e98744b7a4
7182 changed files with 4839 additions and 4998 deletions

View File

@@ -37,6 +37,7 @@ class SagemakerConfig(BaseConfig):
"""
max_new_tokens: Optional[int] = None
max_completion_tokens: Optional[int] = None
top_p: Optional[float] = None
temperature: Optional[float] = None
return_full_text: Optional[bool] = None
@@ -44,6 +45,7 @@ class SagemakerConfig(BaseConfig):
def __init__(
self,
max_new_tokens: Optional[int] = None,
max_completion_tokens: Optional[int] = None,
top_p: Optional[float] = None,
temperature: Optional[float] = None,
return_full_text: Optional[bool] = None,
@@ -65,7 +67,7 @@ class SagemakerConfig(BaseConfig):
)
def get_supported_openai_params(self, model: str) -> List:
return ["stream", "temperature", "max_tokens", "top_p", "stop", "n"]
return ["stream", "temperature", "max_tokens", "max_completion_tokens", "top_p", "stop", "n"]
def map_openai_params(
self,
@@ -102,6 +104,8 @@ class SagemakerConfig(BaseConfig):
if value == 0:
value = 1
optional_params["max_new_tokens"] = value
if param == "max_completion_tokens":
optional_params["max_new_tokens"] = value
non_default_params.pop("aws_sagemaker_allow_zero_temp", None)
return optional_params