We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e4a4cf3 commit ac63724Copy full SHA for ac63724
src/lib/components/InferencePlayground/inferencePlaygroundUtils.ts
@@ -24,7 +24,7 @@ export async function handleStreamingResponse(
24
model: model.id,
25
messages,
26
temperature: conversation.config.temperature,
27
- max_tokens: conversation.config.maxTokens,
+ max_tokens: conversation.config.max_tokens,
28
},
29
{ signal: abortController.signal, use_cache: false }
30
)) {
@@ -50,7 +50,7 @@ export async function handleNonStreamingResponse(
50
51
52
53
54
55
{ use_cache: false }
56
);
0 commit comments