new mcp servers format
This commit is contained in:
Binary file not shown.
Binary file not shown.
@@ -37,6 +37,7 @@ class SagemakerConfig(BaseConfig):
|
||||
"""
|
||||
|
||||
max_new_tokens: Optional[int] = None
|
||||
max_completion_tokens: Optional[int] = None
|
||||
top_p: Optional[float] = None
|
||||
temperature: Optional[float] = None
|
||||
return_full_text: Optional[bool] = None
|
||||
@@ -44,6 +45,7 @@ class SagemakerConfig(BaseConfig):
|
||||
def __init__(
|
||||
self,
|
||||
max_new_tokens: Optional[int] = None,
|
||||
max_completion_tokens: Optional[int] = None,
|
||||
top_p: Optional[float] = None,
|
||||
temperature: Optional[float] = None,
|
||||
return_full_text: Optional[bool] = None,
|
||||
@@ -65,7 +67,7 @@ class SagemakerConfig(BaseConfig):
|
||||
)
|
||||
|
||||
def get_supported_openai_params(self, model: str) -> List:
|
||||
return ["stream", "temperature", "max_tokens", "top_p", "stop", "n"]
|
||||
return ["stream", "temperature", "max_tokens", "max_completion_tokens", "top_p", "stop", "n"]
|
||||
|
||||
def map_openai_params(
|
||||
self,
|
||||
@@ -102,6 +104,8 @@ class SagemakerConfig(BaseConfig):
|
||||
if value == 0:
|
||||
value = 1
|
||||
optional_params["max_new_tokens"] = value
|
||||
if param == "max_completion_tokens":
|
||||
optional_params["max_new_tokens"] = value
|
||||
non_default_params.pop("aws_sagemaker_allow_zero_temp", None)
|
||||
return optional_params
|
||||
|
||||
|
||||
Reference in New Issue
Block a user