@@ -53,23 +53,23 @@ def test_from_anthropic_to_openai_response():
53
53
assert result ["choices" ][0 ]["finish_reason" ] == "stop"
54
54
assert result ["usage" ]["prompt_tokens" ] == 5
55
55
56
- def test_get_gcp_target_env (monkeypatch ):
56
+ def test_get_proxy_target_env (monkeypatch ):
57
57
monkeypatch .setenv ("PROXY_TARGET" , "https://custom-proxy" )
58
- result = vertex .get_gcp_target ("any-model" , "/v1/chat/completions" )
58
+ result = vertex .get_proxy_target ("any-model" , "/v1/chat/completions" )
59
59
assert result == "https://custom-proxy"
60
60
61
- def test_get_gcp_target_known_chat (monkeypatch ):
61
+ def test_get_proxy_target_known_chat (monkeypatch ):
62
62
monkeypatch .delenv ("PROXY_TARGET" , raising = False )
63
63
model = vertex .known_chat_models [0 ]
64
64
path = "/v1/chat/completions"
65
- result = vertex .get_gcp_target (model , path )
65
+ result = vertex .get_proxy_target (model , path )
66
66
assert "endpoints/openapi/chat/completions" in result
67
67
68
- def test_get_gcp_target_raw_predict (monkeypatch ):
68
+ def test_get_proxy_target_raw_predict (monkeypatch ):
69
69
monkeypatch .delenv ("PROXY_TARGET" , raising = False )
70
70
model = "unknown-model"
71
71
path = "/v1/other"
72
- result = vertex .get_gcp_target (model , path )
72
+ result = vertex .get_proxy_target (model , path )
73
73
assert ":rawPredict" in result
74
74
75
75
@patch ("api.routers.vertex.get_access_token" , return_value = "dummy-token" )
@@ -84,7 +84,7 @@ def test_get_header_removes_hop_headers(mock_token, dummy_request):
84
84
})
85
85
model = "test-model"
86
86
path = "/v1/chat/completions"
87
- with patch ("api.routers.vertex.get_gcp_target " , return_value = "http://target" ):
87
+ with patch ("api.routers.vertex.get_proxy_target " , return_value = "http://target" ):
88
88
target_url , headers = vertex .get_header (model , req , path )
89
89
assert target_url == "http://target"
90
90
assert "Host" not in headers
0 commit comments