structure saas with tools
This commit is contained in:
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
Translates from OpenAI's `/v1/chat/completions` to DeepSeek's `/v1/chat/completions`
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from litellm.litellm_core_utils.prompt_templates.common_utils import (
|
||||
handle_messages_with_content_list_to_str_conversion,
|
||||
)
|
||||
from litellm.secret_managers.main import get_secret_str
|
||||
from litellm.types.llms.openai import AllMessageValues
|
||||
|
||||
from ...openai.chat.gpt_transformation import OpenAIGPTConfig
|
||||
|
||||
|
||||
class DeepSeekChatConfig(OpenAIGPTConfig):
|
||||
def _transform_messages(
|
||||
self, messages: List[AllMessageValues], model: str
|
||||
) -> List[AllMessageValues]:
|
||||
"""
|
||||
DeepSeek does not support content in list format.
|
||||
"""
|
||||
messages = handle_messages_with_content_list_to_str_conversion(messages)
|
||||
return super()._transform_messages(messages=messages, model=model)
|
||||
|
||||
def _get_openai_compatible_provider_info(
|
||||
self, api_base: Optional[str], api_key: Optional[str]
|
||||
) -> Tuple[Optional[str], Optional[str]]:
|
||||
api_base = (
|
||||
api_base
|
||||
or get_secret_str("DEEPSEEK_API_BASE")
|
||||
or "https://api.deepseek.com/beta"
|
||||
) # type: ignore
|
||||
dynamic_api_key = api_key or get_secret_str("DEEPSEEK_API_KEY")
|
||||
return api_base, dynamic_api_key
|
||||
|
||||
def get_complete_url(
|
||||
self,
|
||||
api_base: Optional[str],
|
||||
api_key: Optional[str],
|
||||
model: str,
|
||||
optional_params: dict,
|
||||
litellm_params: dict,
|
||||
stream: Optional[bool] = None,
|
||||
) -> str:
|
||||
"""
|
||||
If api_base is not provided, use the default DeepSeek /chat/completions endpoint.
|
||||
"""
|
||||
if not api_base:
|
||||
api_base = "https://api.deepseek.com/beta"
|
||||
|
||||
if not api_base.endswith("/chat/completions"):
|
||||
api_base = f"{api_base}/chat/completions"
|
||||
|
||||
return api_base
|
||||
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Cost calculator for DeepSeek Chat models.
|
||||
|
||||
Handles prompt caching scenario.
|
||||
"""
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from litellm.litellm_core_utils.llm_cost_calc.utils import generic_cost_per_token
|
||||
from litellm.types.utils import Usage
|
||||
|
||||
|
||||
def cost_per_token(model: str, usage: Usage) -> Tuple[float, float]:
|
||||
"""
|
||||
Calculates the cost per token for a given model, prompt tokens, and completion tokens.
|
||||
|
||||
Follows the same logic as Anthropic's cost per token calculation.
|
||||
"""
|
||||
return generic_cost_per_token(
|
||||
model=model, usage=usage, custom_llm_provider="deepseek"
|
||||
)
|
||||
Reference in New Issue
Block a user