Skip to content

Commit f8ce307

Browse files
committed
fix check_if_token_is_service_account
1 parent b0fa934 commit f8ce307

File tree

4 files changed

+57
-63
lines changed

4 files changed

+57
-63
lines changed

litellm/model_prices_and_context_window_backup.json

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4453,6 +4453,42 @@
44534453
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
44544454
"supports_tool_choice": true
44554455
},
4456+
"gemini-2.5-pro-exp-03-25": {
4457+
"max_tokens": 65536,
4458+
"max_input_tokens": 1048576,
4459+
"max_output_tokens": 65536,
4460+
"max_images_per_prompt": 3000,
4461+
"max_videos_per_prompt": 10,
4462+
"max_video_length": 1,
4463+
"max_audio_length_hours": 8.4,
4464+
"max_audio_per_prompt": 1,
4465+
"max_pdf_size_mb": 30,
4466+
"input_cost_per_image": 0,
4467+
"input_cost_per_video_per_second": 0,
4468+
"input_cost_per_audio_per_second": 0,
4469+
"input_cost_per_token": 0,
4470+
"input_cost_per_character": 0,
4471+
"input_cost_per_token_above_128k_tokens": 0,
4472+
"input_cost_per_character_above_128k_tokens": 0,
4473+
"input_cost_per_image_above_128k_tokens": 0,
4474+
"input_cost_per_video_per_second_above_128k_tokens": 0,
4475+
"input_cost_per_audio_per_second_above_128k_tokens": 0,
4476+
"output_cost_per_token": 0,
4477+
"output_cost_per_character": 0,
4478+
"output_cost_per_token_above_128k_tokens": 0,
4479+
"output_cost_per_character_above_128k_tokens": 0,
4480+
"litellm_provider": "vertex_ai-language-models",
4481+
"mode": "chat",
4482+
"supports_system_messages": true,
4483+
"supports_function_calling": true,
4484+
"supports_vision": true,
4485+
"supports_audio_input": true,
4486+
"supports_video_input": true,
4487+
"supports_pdf_input": true,
4488+
"supports_response_schema": true,
4489+
"supports_tool_choice": true,
4490+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
4491+
},
44564492
"gemini-2.0-pro-exp-02-05": {
44574493
"max_tokens": 8192,
44584494
"max_input_tokens": 2097152,

litellm/proxy/auth/service_account_checks.py

Lines changed: 0 additions & 53 deletions
This file was deleted.

litellm/proxy/litellm_pre_call_utils.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -747,7 +747,10 @@ def _get_enforced_params(
747747
enforced_params: Optional[list] = None
748748
if general_settings is not None:
749749
enforced_params = general_settings.get("enforced_params")
750-
if "service_account_settings" in general_settings:
750+
if (
751+
"service_account_settings" in general_settings
752+
and check_if_token_is_service_account(user_api_key_dict) is True
753+
):
751754
service_account_settings = general_settings["service_account_settings"]
752755
if "enforced_params" in service_account_settings:
753756
if enforced_params is None:
@@ -760,6 +763,20 @@ def _get_enforced_params(
760763
return enforced_params
761764

762765

766+
def check_if_token_is_service_account(valid_token: UserAPIKeyAuth) -> bool:
767+
"""
768+
Checks if the token is a service account
769+
770+
Returns:
771+
bool: True if token is a service account
772+
773+
"""
774+
if valid_token.metadata:
775+
if "service_account_id" in valid_token.metadata:
776+
return True
777+
return False
778+
779+
763780
def _enforced_params_check(
764781
request_body: dict,
765782
general_settings: Optional[dict],

litellm/proxy/proxy_config.yaml

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,6 @@ model_list:
44
model: openai/gpt-4o
55
api_key: sk-xxxxxxx
66

7-
mcp_servers:
8-
{
9-
"zapier_mcp": {
10-
"url": "https://actions.zapier.com/mcp/sk-akxxxxx/sse"
11-
},
12-
"fetch": {
13-
"url": "http://localhost:8000/sse"
14-
}
15-
}
7+
general_settings:
8+
service_account_settings:
9+
enforced_params: ["user"] # this means the "user" param is enforced for all requests made through any service account keys

0 commit comments

Comments
 (0)