|
20 | 20 |
|
21 | 21 |
|
22 | 22 | def check_openai_nonstream_response(
|
23 |
| - response: Dict, |
24 |
| - *, |
25 |
| - model: str, |
26 |
| - object_str: str, |
27 |
| - num_choices: int, |
28 |
| - finish_reason: List[str], |
29 |
| - completion_tokens: Optional[int] = None, |
| 23 | + response: Dict, |
| 24 | + *, |
| 25 | + model: str, |
| 26 | + object_str: str, |
| 27 | + num_choices: int, |
| 28 | + finish_reason: List[str], |
| 29 | + completion_tokens: Optional[int] = None, |
30 | 30 | ):
|
31 | 31 | assert response["model"] == model
|
32 | 32 | assert response["object"] == object_str
|
@@ -68,16 +68,16 @@ def check_openai_nonstream_response(
|
68 | 68 |
|
69 | 69 |
|
70 | 70 | def check_openai_stream_response(
|
71 |
| - responses: List[Dict], |
72 |
| - *, |
73 |
| - model: str, |
74 |
| - object_str: str, |
75 |
| - num_choices: int, |
76 |
| - finish_reason: str, |
77 |
| - echo_prompt: Optional[str] = None, |
78 |
| - suffix: Optional[str] = None, |
79 |
| - stop: Optional[List[str]] = None, |
80 |
| - require_substr: Optional[List[str]] = None, |
| 71 | + responses: List[Dict], |
| 72 | + *, |
| 73 | + model: str, |
| 74 | + object_str: str, |
| 75 | + num_choices: int, |
| 76 | + finish_reason: str, |
| 77 | + echo_prompt: Optional[str] = None, |
| 78 | + suffix: Optional[str] = None, |
| 79 | + stop: Optional[List[str]] = None, |
| 80 | + require_substr: Optional[List[str]] = None, |
81 | 81 | ):
|
82 | 82 | assert len(responses) > 0
|
83 | 83 |
|
@@ -137,12 +137,12 @@ def check_openai_stream_response(
|
137 | 137 |
|
138 | 138 | def check_format(name_beg: str, name_end: str, beg_tag: str, schema: str):
|
139 | 139 | try:
|
140 |
| - schema: dict[str, Any] = json.loads(schema) |
| 140 | + paras: Dict[str, Any] = json.loads(schema) |
141 | 141 | except json.JSONDecodeError as e:
|
142 | 142 | print(f"Invalid JSON format: {e}")
|
143 | 143 | assert False
|
144 |
| - assert "hash_code" in schema |
145 |
| - hash_code = schema["hash_code"] |
| 144 | + assert "hash_code" in paras |
| 145 | + hash_code = paras["hash_code"] |
146 | 146 | assert hash_code in CHECK_INFO
|
147 | 147 | info = CHECK_INFO[hash_code]
|
148 | 148 | assert name_beg == info["name"]
|
@@ -239,7 +239,7 @@ def check_format(name_beg: str, name_end: str, beg_tag: str, schema: str):
|
239 | 239 | "state": {
|
240 | 240 | "type": "string",
|
241 | 241 | "description": "the two-letter abbreviation for the state that the city is"
|
242 |
| - " in, e.g. 'CA' which would mean 'California'", |
| 242 | + " in, e.g. 'CA' which would mean 'California'", |
243 | 243 | },
|
244 | 244 | "unit": {
|
245 | 245 | "type": "string",
|
@@ -283,7 +283,7 @@ def check_format(name_beg: str, name_end: str, beg_tag: str, schema: str):
|
283 | 283 | "state": {
|
284 | 284 | "type": "string",
|
285 | 285 | "description": "the two-letter abbreviation for the state that the city is"
|
286 |
| - " in, e.g. 'CA' which would mean 'California'", |
| 286 | + " in, e.g. 'CA' which would mean 'California'", |
287 | 287 | },
|
288 | 288 | "unit": {
|
289 | 289 | "type": "string",
|
@@ -371,10 +371,10 @@ def check_format(name_beg: str, name_end: str, beg_tag: str, schema: str):
|
371 | 371 | @pytest.mark.parametrize("stream", [False, True])
|
372 | 372 | @pytest.mark.parametrize("messages", CHAT_COMPLETION_MESSAGES)
|
373 | 373 | def test_openai_v1_chat_completion_structural_tag(
|
374 |
| - served_model: str, |
375 |
| - launch_server, # pylint: disable=unused-argument |
376 |
| - stream: bool, |
377 |
| - messages: List[Dict[str, str]], |
| 374 | + served_model: str, |
| 375 | + launch_server, # pylint: disable=unused-argument |
| 376 | + stream: bool, |
| 377 | + messages: List[Dict[str, str]], |
378 | 378 | ):
|
379 | 379 | # `served_model` and `launch_server` are pytest fixtures
|
380 | 380 | # defined in conftest.py.
|
|
0 commit comments