Skip to content

Commit f4db7fa

Browse files
committed
fix snippets
1 parent 6ca8d92 commit f4db7fa

File tree

6 files changed

+28
-22
lines changed

6 files changed

+28
-22
lines changed

.prettierrc.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
export default {
22
arrowParens: "avoid",
33
quoteProps: "consistent",
4-
trailingComma: "es5",
4+
trailingComma: "all",
55
useTabs: true,
66
tabWidth: 2,
77
printWidth: 120,

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
"@eslint/js": "^9.22.0",
2323
"@floating-ui/dom": "^1.6.13",
2424
"@huggingface/hub": "^2.1.0",
25-
"@huggingface/inference": "^3.13.2",
25+
"@huggingface/inference": "^4.4.0",
2626
"@huggingface/tasks": "^0.19.8",
2727
"@huggingface/transformers": "^3.5.1",
2828
"@iconify-json/carbon": "^1.2.8",

pnpm-lock.yaml

Lines changed: 11 additions & 6 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/lib/components/inference-playground/code-snippets.svelte

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,8 @@
8282
fromEntries(
8383
keys(labelsByLanguage).map(lang => {
8484
return [lang, 0];
85-
})
86-
)
85+
}),
86+
),
8787
);
8888
8989
type InstallInstructions = {
@@ -147,7 +147,9 @@
147147
class="border-b border-gray-200 text-center text-sm font-medium text-gray-500 dark:border-gray-700 dark:text-gray-400"
148148
>
149149
<ul class="-mb-px flex flex-wrap">
150-
{#each entries(labelsByLanguage) as [language, label]}
150+
{#each entries(labelsByLanguage).filter(([lang]) => {
151+
return snippetsByLang[lang]?.length;
152+
}) as [language, label]}
151153
<li>
152154
<button
153155
onclick={() => (lang = language)}
@@ -218,7 +220,7 @@
218220
<pre
219221
class="overflow-x-auto rounded-lg border border-gray-200/80 bg-white px-4 py-6 text-sm shadow-xs dark:border-gray-800 dark:bg-gray-800/50">{@html highlight(
220222
installInstructions.content,
221-
selectedSnippet?.language
223+
selectedSnippet?.language,
222224
)}</pre>
223225
{/if}
224226

@@ -253,6 +255,6 @@
253255
<pre
254256
class="overflow-x-auto rounded-lg border border-gray-200/80 bg-white px-4 py-6 text-sm shadow-xs dark:border-gray-800 dark:bg-gray-800/50">{@html highlight(
255257
selectedSnippet?.content,
256-
selectedSnippet?.language
258+
selectedSnippet?.language,
257259
)}</pre>
258260
</div>

src/lib/types.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,7 @@ export enum Provider {
161161
Together = "together",
162162
Cohere = "cohere",
163163
Groq = "groq",
164+
Auto = "auto",
164165
}
165166

166167
export enum Status {

src/lib/utils/business.svelte.ts

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ import {
2121
} from "$lib/types.js";
2222
import { safeParse } from "$lib/utils/json.js";
2323
import { omit, tryGet } from "$lib/utils/object.svelte.js";
24-
import { type InferenceProvider } from "@huggingface/inference";
2524
import type { ChatCompletionInputMessage, InferenceSnippet } from "@huggingface/tasks";
2625
import { type ChatCompletionOutputMessage } from "@huggingface/tasks";
2726
import { AutoTokenizer, PreTrainedTokenizer } from "@huggingface/transformers";
@@ -120,7 +119,7 @@ function getResponseFormatObj(conversation: ConversationClass | Conversation) {
120119

121120
async function getCompletionMetadata(
122121
conversation: ConversationClass | Conversation,
123-
signal?: AbortSignal
122+
signal?: AbortSignal,
124123
): Promise<CompletionMetadata> {
125124
const data = conversation instanceof ConversationClass ? conversation.data : conversation;
126125
const model = conversation.model;
@@ -180,7 +179,7 @@ async function getCompletionMetadata(
180179
export async function handleStreamingResponse(
181180
conversation: ConversationClass | Conversation,
182181
onChunk: (content: string) => void,
183-
abortController: AbortController
182+
abortController: AbortController,
184183
): Promise<void> {
185184
const metadata = await getCompletionMetadata(conversation, abortController.signal);
186185

@@ -211,7 +210,7 @@ export async function handleStreamingResponse(
211210
}
212211

213212
export async function handleNonStreamingResponse(
214-
conversation: ConversationClass | Conversation
213+
conversation: ConversationClass | Conversation,
215214
): Promise<{ message: ChatCompletionOutputMessage; completion_tokens: number }> {
216215
const metadata = await getCompletionMetadata(conversation);
217216

@@ -325,26 +324,25 @@ export function getInferenceSnippet(
325324
temperature?: ConversationEntityMembers["config"]["temperature"];
326325
top_p?: ConversationEntityMembers["config"]["top_p"];
327326
structured_output?: ConversationEntityMembers["structuredOutput"];
328-
}
327+
},
329328
): GetInferenceSnippetReturn {
330329
const model = conversation.model;
331330
const data = conversation.data;
332-
const provider = (isCustomModel(model) ? "hf-inference" : data.provider) as InferenceProvider;
331+
const provider = (isCustomModel(model) ? "hf-inference" : data.provider) as Provider;
333332

334333
// If it's a custom model, we don't generate inference snippets
335334
if (isCustomModel(model)) {
336335
return [];
337336
}
338337

339338
const providerMapping = model.inferenceProviderMapping.find(p => p.provider === provider);
340-
if (!providerMapping) return [];
339+
if (!providerMapping && provider !== "auto") return [];
341340
const allSnippets = snippets.getInferenceSnippets(
342341
{ ...model, inference: "" },
343-
accessToken,
344342
provider,
345343
// eslint-disable-next-line @typescript-eslint/no-explicit-any
346344
{ ...providerMapping, hfModelId: model.id } as any,
347-
opts
345+
{ ...opts, accessToken },
348346
);
349347

350348
return allSnippets

0 commit comments

Comments
 (0)