@@ -1018,6 +1018,7 @@ async def _complete_stream(
10181018 * ,
10191019 response_format : Literal [None ] = ...,
10201020 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1021+ preset : str | None = ...,
10211022 on_message : PredictionMessageCallback | None = ...,
10221023 on_first_token : PredictionFirstTokenCallback | None = ...,
10231024 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1031,6 +1032,7 @@ async def _complete_stream(
10311032 * ,
10321033 response_format : Type [ModelSchema ] | DictSchema = ...,
10331034 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1035+ preset : str | None = ...,
10341036 on_message : PredictionMessageCallback | None = ...,
10351037 on_first_token : PredictionFirstTokenCallback | None = ...,
10361038 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1043,6 +1045,7 @@ async def _complete_stream(
10431045 * ,
10441046 response_format : Type [ModelSchema ] | DictSchema | None = None ,
10451047 config : LlmPredictionConfig | LlmPredictionConfigDict | None = None ,
1048+ preset : str | None = None ,
10461049 on_message : PredictionMessageCallback | None = None ,
10471050 on_first_token : PredictionFirstTokenCallback | None = None ,
10481051 on_prediction_fragment : PredictionFragmentCallback | None = None ,
@@ -1057,6 +1060,7 @@ async def _complete_stream(
10571060 prompt ,
10581061 response_format ,
10591062 config ,
1063+ preset ,
10601064 on_message ,
10611065 on_first_token ,
10621066 on_prediction_fragment ,
@@ -1074,6 +1078,7 @@ async def _respond_stream(
10741078 * ,
10751079 response_format : Literal [None ] = ...,
10761080 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1081+ preset : str | None = ...,
10771082 on_message : PredictionMessageCallback | None = ...,
10781083 on_first_token : PredictionFirstTokenCallback | None = ...,
10791084 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1087,6 +1092,7 @@ async def _respond_stream(
10871092 * ,
10881093 response_format : Type [ModelSchema ] | DictSchema = ...,
10891094 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1095+ preset : str | None = ...,
10901096 on_message : PredictionMessageCallback | None = ...,
10911097 on_first_token : PredictionFirstTokenCallback | None = ...,
10921098 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1100,6 +1106,7 @@ async def _respond_stream(
11001106 response_format : Type [ModelSchema ] | DictSchema | None = None ,
11011107 on_message : PredictionMessageCallback | None = None ,
11021108 config : LlmPredictionConfig | LlmPredictionConfigDict | None = None ,
1109+ preset : str | None = None ,
11031110 on_first_token : PredictionFirstTokenCallback | None = None ,
11041111 on_prediction_fragment : PredictionFragmentCallback | None = None ,
11051112 on_prompt_processing_progress : PromptProcessingCallback | None = None ,
@@ -1115,6 +1122,7 @@ async def _respond_stream(
11151122 history ,
11161123 response_format ,
11171124 config ,
1125+ preset ,
11181126 on_message ,
11191127 on_first_token ,
11201128 on_prediction_fragment ,
@@ -1248,6 +1256,7 @@ async def complete_stream(
12481256 * ,
12491257 response_format : Literal [None ] = ...,
12501258 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1259+ preset : str | None = ...,
12511260 on_message : PredictionMessageCallback | None = ...,
12521261 on_first_token : PredictionFirstTokenCallback | None = ...,
12531262 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1260,6 +1269,7 @@ async def complete_stream(
12601269 * ,
12611270 response_format : Type [ModelSchema ] | DictSchema = ...,
12621271 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1272+ preset : str | None = ...,
12631273 on_message : PredictionMessageCallback | None = ...,
12641274 on_first_token : PredictionFirstTokenCallback | None = ...,
12651275 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1272,6 +1282,7 @@ async def complete_stream(
12721282 * ,
12731283 response_format : Type [ModelSchema ] | DictSchema | None = None ,
12741284 config : LlmPredictionConfig | LlmPredictionConfigDict | None = None ,
1285+ preset : str | None = None ,
12751286 on_message : PredictionMessageCallback | None = None ,
12761287 on_first_token : PredictionFirstTokenCallback | None = None ,
12771288 on_prediction_fragment : PredictionFragmentCallback | None = None ,
@@ -1286,6 +1297,7 @@ async def complete_stream(
12861297 prompt ,
12871298 response_format = response_format ,
12881299 config = config ,
1300+ preset = preset ,
12891301 on_message = on_message ,
12901302 on_first_token = on_first_token ,
12911303 on_prediction_fragment = on_prediction_fragment ,
@@ -1299,6 +1311,7 @@ async def complete(
12991311 * ,
13001312 response_format : Literal [None ] = ...,
13011313 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1314+ preset : str | None = ...,
13021315 on_message : PredictionMessageCallback | None = ...,
13031316 on_first_token : PredictionFirstTokenCallback | None = ...,
13041317 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1311,6 +1324,7 @@ async def complete(
13111324 * ,
13121325 response_format : Type [ModelSchema ] | DictSchema = ...,
13131326 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1327+ preset : str | None = ...,
13141328 on_message : PredictionMessageCallback | None = ...,
13151329 on_first_token : PredictionFirstTokenCallback | None = ...,
13161330 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1323,6 +1337,7 @@ async def complete(
13231337 * ,
13241338 response_format : Type [ModelSchema ] | DictSchema | None = None ,
13251339 config : LlmPredictionConfig | LlmPredictionConfigDict | None = None ,
1340+ preset : str | None = None ,
13261341 on_message : PredictionMessageCallback | None = None ,
13271342 on_first_token : PredictionFirstTokenCallback | None = None ,
13281343 on_prediction_fragment : PredictionFragmentCallback | None = None ,
@@ -1337,6 +1352,7 @@ async def complete(
13371352 prompt ,
13381353 response_format = response_format ,
13391354 config = config ,
1355+ preset = preset ,
13401356 on_message = on_message ,
13411357 on_first_token = on_first_token ,
13421358 on_prediction_fragment = on_prediction_fragment ,
@@ -1355,6 +1371,7 @@ async def respond_stream(
13551371 * ,
13561372 response_format : Literal [None ] = ...,
13571373 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1374+ preset : str | None = ...,
13581375 on_message : PredictionMessageCallback | None = ...,
13591376 on_first_token : PredictionFirstTokenCallback | None = ...,
13601377 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1367,6 +1384,7 @@ async def respond_stream(
13671384 * ,
13681385 response_format : Type [ModelSchema ] | DictSchema = ...,
13691386 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1387+ preset : str | None = ...,
13701388 on_message : PredictionMessageCallback | None = ...,
13711389 on_first_token : PredictionFirstTokenCallback | None = ...,
13721390 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1379,6 +1397,7 @@ async def respond_stream(
13791397 * ,
13801398 response_format : Type [ModelSchema ] | DictSchema | None = None ,
13811399 config : LlmPredictionConfig | LlmPredictionConfigDict | None = None ,
1400+ preset : str | None = None ,
13821401 on_message : PredictionMessageCallback | None = None ,
13831402 on_first_token : PredictionFirstTokenCallback | None = None ,
13841403 on_prediction_fragment : PredictionFragmentCallback | None = None ,
@@ -1393,6 +1412,7 @@ async def respond_stream(
13931412 history ,
13941413 response_format = response_format ,
13951414 config = config ,
1415+ preset = preset ,
13961416 on_message = on_message ,
13971417 on_first_token = on_first_token ,
13981418 on_prediction_fragment = on_prediction_fragment ,
@@ -1406,6 +1426,7 @@ async def respond(
14061426 * ,
14071427 response_format : Literal [None ] = ...,
14081428 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1429+ preset : str | None = ...,
14091430 on_message : PredictionMessageCallback | None = ...,
14101431 on_first_token : PredictionFirstTokenCallback | None = ...,
14111432 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1418,6 +1439,7 @@ async def respond(
14181439 * ,
14191440 response_format : Type [ModelSchema ] | DictSchema = ...,
14201441 config : LlmPredictionConfig | LlmPredictionConfigDict | None = ...,
1442+ preset : str | None = ...,
14211443 on_message : PredictionMessageCallback | None = ...,
14221444 on_first_token : PredictionFirstTokenCallback | None = ...,
14231445 on_prediction_fragment : PredictionFragmentCallback | None = ...,
@@ -1430,6 +1452,7 @@ async def respond(
14301452 * ,
14311453 response_format : Type [ModelSchema ] | DictSchema | None = None ,
14321454 config : LlmPredictionConfig | LlmPredictionConfigDict | None = None ,
1455+ preset : str | None = None ,
14331456 on_message : PredictionMessageCallback | None = None ,
14341457 on_first_token : PredictionFirstTokenCallback | None = None ,
14351458 on_prediction_fragment : PredictionFragmentCallback | None = None ,
@@ -1444,6 +1467,7 @@ async def respond(
14441467 history ,
14451468 response_format = response_format ,
14461469 config = config ,
1470+ preset = preset ,
14471471 on_message = on_message ,
14481472 on_first_token = on_first_token ,
14491473 on_prediction_fragment = on_prediction_fragment ,
0 commit comments