new mcp servers format

This commit is contained in:
Davidson Gomes
2025-04-28 12:37:58 -03:00
parent 0112573d9b
commit e98744b7a4
7182 changed files with 4839 additions and 4998 deletions

View File

@@ -610,6 +610,7 @@ async def _get_fuzzy_user_object(
- Check if user_email is user_email in db
- If not, create new user with user_email and sso_user_id and user_id = sso_user_id
"""
response = None
if sso_user_id is not None:
response = await prisma_client.db.litellm_usertable.find_unique(
@@ -952,6 +953,62 @@ async def get_team_object(
)
class ExperimentalUIJWTToken:
@staticmethod
def get_experimental_ui_login_jwt_auth_token(user_info: LiteLLM_UserTable) -> str:
from datetime import UTC, datetime, timedelta
from litellm.proxy.common_utils.encrypt_decrypt_utils import (
encrypt_value_helper,
)
if user_info.user_role is None:
raise Exception("User role is required for experimental UI login")
# Calculate expiration time (10 minutes from now)
expiration_time = datetime.now(UTC) + timedelta(minutes=10)
# Format the expiration time as ISO 8601 string
expires = expiration_time.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "+00:00"
valid_token = UserAPIKeyAuth(
token="ui-token",
key_name="ui-token",
key_alias="ui-token",
max_budget=litellm.max_ui_session_budget,
rpm_limit=100, # allow user to have a conversation on test key pane of UI
expires=expires,
user_id=user_info.user_id,
team_id="litellm-dashboard",
models=user_info.models,
max_parallel_requests=None,
user_role=LitellmUserRoles(user_info.user_role),
)
return encrypt_value_helper(valid_token.model_dump_json(exclude_none=True))
@staticmethod
def get_key_object_from_ui_hash_key(
hashed_token: str,
) -> Optional[UserAPIKeyAuth]:
import json
from litellm.proxy.auth.user_api_key_auth import UserAPIKeyAuth
from litellm.proxy.common_utils.encrypt_decrypt_utils import (
decrypt_value_helper,
)
decrypted_token = decrypt_value_helper(hashed_token, exception_type="debug")
if decrypted_token is None:
return None
try:
return UserAPIKeyAuth(**json.loads(decrypted_token))
except Exception as e:
raise Exception(
f"Invalid hash key. Hash key={hashed_token}. Decrypted token={decrypted_token}. Error: {e}"
)
@log_db_metrics
async def get_key_object(
hashed_token: str,

View File

@@ -24,6 +24,7 @@ from litellm.caching import DualCache
from litellm.litellm_core_utils.dd_tracing import tracer
from litellm.proxy._types import *
from litellm.proxy.auth.auth_checks import (
ExperimentalUIJWTToken,
_cache_key_object,
_get_user_role,
_is_user_proxy_admin,
@@ -51,6 +52,7 @@ from litellm.proxy.auth.oauth2_check import check_oauth2_token
from litellm.proxy.auth.oauth2_proxy_hook import handle_oauth2_proxy_request
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
from litellm.proxy.utils import PrismaClient, ProxyLogging
from litellm.secret_managers.main import get_secret_bool
from litellm.types.services import ServiceTypes
user_api_key_service_logger_obj = ServiceLogging() # used for tracking latency on OTEL
@@ -553,6 +555,12 @@ async def _user_api_key_auth_builder( # noqa: PLR0915
verbose_logger.debug("api key not found in cache.")
valid_token = None
## Check UI Hash Key
if valid_token is None and get_secret_bool("EXPERIMENTAL_UI_LOGIN"):
valid_token = ExperimentalUIJWTToken.get_key_object_from_ui_hash_key(
api_key
)
if (
valid_token is not None
and isinstance(valid_token, UserAPIKeyAuth)