new mcp servers format

This commit is contained in:
Davidson Gomes
2025-04-28 12:37:58 -03:00
parent 0112573d9b
commit e98744b7a4
7182 changed files with 4839 additions and 4998 deletions

View File

@@ -150,7 +150,11 @@ from litellm.proxy.analytics_endpoints.analytics_endpoints import (
router as analytics_router,
)
from litellm.proxy.anthropic_endpoints.endpoints import router as anthropic_router
from litellm.proxy.auth.auth_checks import get_team_object, log_db_metrics
from litellm.proxy.auth.auth_checks import (
ExperimentalUIJWTToken,
get_team_object,
log_db_metrics,
)
from litellm.proxy.auth.auth_utils import check_response_size_is_safe
from litellm.proxy.auth.handle_jwt import JWTHandler
from litellm.proxy.auth.litellm_license import LicenseCheck
@@ -179,6 +183,7 @@ from litellm.proxy.common_utils.html_forms.ui_login import html_form
from litellm.proxy.common_utils.http_parsing_utils import (
_read_request_body,
check_file_size_under_limit,
get_form_data,
)
from litellm.proxy.common_utils.load_config_utils import (
get_config_file_contents_from_gcs,
@@ -212,6 +217,7 @@ from litellm.proxy.litellm_pre_call_utils import add_litellm_data_to_request
from litellm.proxy.management_endpoints.budget_management_endpoints import (
router as budget_management_router,
)
from litellm.proxy.management_endpoints.common_utils import _user_has_admin_view
from litellm.proxy.management_endpoints.customer_endpoints import (
router as customer_router,
)
@@ -4120,7 +4126,7 @@ async def audio_transcriptions(
data: Dict = {}
try:
# Use orjson to parse JSON data, orjson speeds up requests significantly
form_data = await request.form()
form_data = await get_form_data(request)
data = {key: value for key, value in form_data.items() if key != "file"}
# Include original request and headers in the data
@@ -6371,6 +6377,7 @@ async def model_group_info(
raise HTTPException(
status_code=500, detail={"error": "LLM Router is not loaded in"}
)
## CHECK IF MODEL RESTRICTIONS ARE SET AT KEY/TEAM LEVEL ##
model_access_groups: Dict[str, List[str]] = defaultdict(list)
if llm_router is None:
@@ -6384,11 +6391,36 @@ async def model_group_info(
proxy_model_list=proxy_model_list,
model_access_groups=model_access_groups,
)
team_models = get_team_models(
team_models=user_api_key_dict.team_models,
proxy_model_list=proxy_model_list,
model_access_groups=model_access_groups,
)
team_models = []
if (
not user_api_key_dict.team_id
and user_api_key_dict.user_id is not None
and not _user_has_admin_view(user_api_key_dict)
):
if prisma_client is None:
raise HTTPException(
status_code=500,
detail={"error": CommonProxyErrors.db_not_connected_error.value},
)
user_object = await prisma_client.db.litellm_usertable.find_first(
where={"user_id": user_api_key_dict.user_id}
)
user_object_typed = LiteLLM_UserTable(**user_object.model_dump())
user_models = []
if user_object is not None:
user_models = get_team_models(
team_models=user_object_typed.models,
proxy_model_list=proxy_model_list,
model_access_groups=model_access_groups,
)
team_models = user_models
else:
team_models = get_team_models(
team_models=user_api_key_dict.team_models,
proxy_model_list=proxy_model_list,
model_access_groups=model_access_groups,
)
all_models_str = get_complete_model_list(
key_models=key_models,
team_models=team_models,
@@ -6723,7 +6755,7 @@ async def login(request: Request): # noqa: PLR0915
)
# check if we can find the `username` in the db. on the ui, users can enter username=their email
_user_row = None
_user_row: Optional[LiteLLM_UserTable] = None
user_role: Optional[
Literal[
LitellmUserRoles.PROXY_ADMIN,
@@ -6733,8 +6765,11 @@ async def login(request: Request): # noqa: PLR0915
]
] = None
if prisma_client is not None:
_user_row = await prisma_client.db.litellm_usertable.find_first(
where={"user_email": {"equals": username}}
_user_row = cast(
Optional[LiteLLM_UserTable],
await prisma_client.db.litellm_usertable.find_first(
where={"user_email": {"equals": username}}
),
)
disabled_non_admin_personal_key_creation = (
get_disabled_non_admin_personal_key_creation()
@@ -6799,6 +6834,31 @@ async def login(request: Request): # noqa: PLR0915
litellm_dashboard_ui += "/ui/"
import jwt
if get_secret_bool("EXPERIMENTAL_UI_LOGIN"):
user_info: Optional[LiteLLM_UserTable] = None
if _user_row is not None:
user_info = _user_row
elif (
user_id is not None
): # if user_id is not None, we are using the UI_USERNAME and UI_PASSWORD
user_info = LiteLLM_UserTable(
user_id=user_id,
user_role=user_role,
models=[],
max_budget=litellm.max_ui_session_budget,
)
if user_info is None:
raise HTTPException(
status_code=401,
detail={
"error": "User Information is required for experimental UI login"
},
)
key = ExperimentalUIJWTToken.get_experimental_ui_login_jwt_auth_token(
user_info
)
jwt_token = jwt.encode( # type: ignore
{
"user_id": user_id,