Skip to content

Commit 3cbea34

Browse files
Wauplinnsarrazin
andauthored
Standardize HF_ACCESS_TOKEN -> HF_TOKEN (#610)
* Standardize HF_ACCESS_TOKEN -> HF_TOKEN * Replace HF_ACCESS_TOKEN by HF_TOKEN in .env * Add legacy support for HF_ACCESS_TOKEN --------- Co-authored-by: Nathan Sarrazin <sarrazin.nathan@gmail.com>
1 parent ea856f7 commit 3cbea34

File tree

8 files changed

+32
-20
lines changed

8 files changed

+32
-20
lines changed

.env

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,12 @@ MONGODB_DB_NAME=chat-ui
66
MONGODB_DIRECT_CONNECTION=false
77

88
COOKIE_NAME=hf-chat
9-
HF_ACCESS_TOKEN=#hf_<token> from from https://huggingface.co/settings/token
9+
HF_TOKEN=#hf_<token> from from https://huggingface.co/settings/token
1010
HF_API_ROOT=https://api-inference.huggingface.co/models
1111
OPENAI_API_KEY=#your openai api key here
1212

13+
HF_ACCESS_TOKEN=#LEGACY! Use HF_TOKEN instead
14+
1315
# used to activate search with web functionality. disabled if none are defined. choose one of the following:
1416
YDC_API_KEY=#your docs.you.com api key here
1517
SERPER_API_KEY=#your serper.dev api key here

.github/workflows/deploy-release.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
SERPER_API_KEY: ${{ secrets.SERPER_API_KEY }}
2525
OPENID_CONFIG: ${{ secrets.OPENID_CONFIG }}
2626
MONGODB_URL: ${{ secrets.MONGODB_URL }}
27-
HF_ACCESS_TOKEN: ${{ secrets.HF_ACCESS_TOKEN }}
27+
HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
2828
run: npm run updateProdEnv
2929
sync-to-hub:
3030
runs-on: ubuntu-latest
@@ -39,5 +39,5 @@ jobs:
3939
lfs: true
4040
- name: Push to hub
4141
env:
42-
HF_TOKEN: ${{ secrets.HF_TOKEN }}
43-
run: git push https://nsarrazin:$HF_TOKEN@huggingface.co/spaces/huggingchat/chat-ui main
42+
HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
43+
run: git push https://nsarrazin:$HF_DEPLOYMENT_TOKEN@huggingface.co/spaces/huggingchat/chat-ui main

.github/workflows/deploy-staging.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,5 +20,5 @@ jobs:
2020
lfs: true
2121
- name: Push to hub
2222
env:
23-
HF_TOKEN: ${{ secrets.HF_TOKEN }}
24-
run: git push https://nsarrazin:$HF_TOKEN@huggingface.co/spaces/huggingchat/chat-ui-staging main
23+
HF_DEPLOYMENT_TOKEN: ${{ secrets.HF_DEPLOYMENT_TOKEN }}
24+
run: git push https://nsarrazin:$HF_DEPLOYMENT_TOKEN@huggingface.co/spaces/huggingchat/chat-ui-staging main

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ If you don't want to configure, setup, and launch your own Chat UI yourself, you
3030

3131
You can deploy your own customized Chat UI instance with any supported [LLM](https://huggingface.co/models?pipeline_tag=text-generation&sort=trending) of your choice on [Hugging Face Spaces](https://huggingface.co/spaces). To do so, use the chat-ui template [available here](https://huggingface.co/new-space?template=huggingchat/chat-ui-template).
3232

33-
Set `HUGGING_FACE_HUB_TOKEN` in [Space secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables) to deploy a model with gated access or a model in a private repository. It's also compatible with [Inference for PROs](https://huggingface.co/blog/inference-pro) curated list of powerful models with higher rate limits. Make sure to create your personal token first in your [User Access Tokens settings](https://huggingface.co/settings/tokens).
33+
Set `HF_TOKEN` in [Space secrets](https://huggingface.co/docs/hub/spaces-overview#managing-secrets-and-environment-variables) to deploy a model with gated access or a model in a private repository. It's also compatible with [Inference for PROs](https://huggingface.co/blog/inference-pro) curated list of powerful models with higher rate limits. Make sure to create your personal token first in your [User Access Tokens settings](https://huggingface.co/settings/tokens).
3434

3535
Read the full tutorial [here](https://huggingface.co/docs/hub/spaces-sdks-docker-chatui#chatui-on-spaces).
3636

@@ -42,7 +42,7 @@ Start by creating a `.env.local` file in the root of the repository. The bare mi
4242

4343
```env
4444
MONGODB_URL=<the URL to your MongoDB instance>
45-
HF_ACCESS_TOKEN=<your access token>
45+
HF_TOKEN=<your access token>
4646
```
4747

4848
### Database
@@ -397,7 +397,7 @@ You can then add the generated information and the `authorization` parameter to
397397
]
398398
```
399399

400-
Please note that if `HF_ACCESS_TOKEN` is also set or not empty, it will take precedence.
400+
Please note that if `HF_TOKEN` is also set or not empty, it will take precedence.
401401

402402
#### Models hosted on multiple custom endpoints
403403

scripts/updateProdEnv.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
import fs from "fs";
22

3-
const HF_TOKEN = process.env.HF_TOKEN; // token used for pushing to hub
3+
const HF_DEPLOYMENT_TOKEN = process.env.HF_DEPLOYMENT_TOKEN; // token used for pushing to hub
44

55
const SERPER_API_KEY = process.env.SERPER_API_KEY;
66
const OPENID_CONFIG = process.env.OPENID_CONFIG;
77
const MONGODB_URL = process.env.MONGODB_URL;
8-
const HF_ACCESS_TOKEN = process.env.HF_ACCESS_TOKEN; // token used for API requests in prod
8+
const HF_TOKEN = process.env.HF_TOKEN ?? process.env.HF_ACCESS_TOKEN; // token used for API requests in prod
99

1010
// Read the content of the file .env.template
1111
const PUBLIC_CONFIG = fs.readFileSync(".env.template", "utf8");
@@ -15,7 +15,7 @@ const full_config = `${PUBLIC_CONFIG}
1515
MONGODB_URL=${MONGODB_URL}
1616
OPENID_CONFIG=${OPENID_CONFIG}
1717
SERPER_API_KEY=${SERPER_API_KEY}
18-
HF_ACCESS_TOKEN=${HF_ACCESS_TOKEN}
18+
HF_TOKEN=${HF_TOKEN}
1919
`;
2020

2121
// Make an HTTP POST request to add the space secrets
@@ -27,7 +27,7 @@ fetch(`https://huggingface.co/api/spaces/huggingchat/chat-ui/secrets`, {
2727
description: `Env variable for HuggingChat. Last updated ${new Date().toISOString()}`,
2828
}),
2929
headers: {
30-
Authorization: `Bearer ${HF_TOKEN}`,
30+
Authorization: `Bearer ${HF_DEPLOYMENT_TOKEN}`,
3131
"Content-Type": "application/json",
3232
},
3333
});

src/lib/server/endpoints/llamacpp/endpointLlamacpp.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { HF_ACCESS_TOKEN } from "$env/static/private";
1+
import { HF_ACCESS_TOKEN, HF_TOKEN } from "$env/static/private";
22
import { buildPrompt } from "$lib/buildPrompt";
33
import type { TextGenerationStreamOutput } from "@huggingface/inference";
44
import type { Endpoint } from "../endpoints";
@@ -9,7 +9,10 @@ export const endpointLlamacppParametersSchema = z.object({
99
model: z.any(),
1010
type: z.literal("llamacpp"),
1111
url: z.string().url().default("http://127.0.0.1:8080"),
12-
accessToken: z.string().min(1).default(HF_ACCESS_TOKEN),
12+
accessToken: z
13+
.string()
14+
.min(1)
15+
.default(HF_TOKEN ?? HF_ACCESS_TOKEN),
1316
});
1417

1518
export function endpointLlamacpp(

src/lib/server/endpoints/tgi/endpointTgi.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { HF_ACCESS_TOKEN } from "$env/static/private";
1+
import { HF_ACCESS_TOKEN, HF_TOKEN } from "$env/static/private";
22
import { buildPrompt } from "$lib/buildPrompt";
33
import { textGenerationStream } from "@huggingface/inference";
44
import type { Endpoint } from "../endpoints";
@@ -9,7 +9,7 @@ export const endpointTgiParametersSchema = z.object({
99
model: z.any(),
1010
type: z.literal("tgi"),
1111
url: z.string().url(),
12-
accessToken: z.string().default(HF_ACCESS_TOKEN),
12+
accessToken: z.string().default(HF_TOKEN ?? HF_ACCESS_TOKEN),
1313
authorization: z.string().optional(),
1414
});
1515

@@ -35,7 +35,7 @@ export function endpointTgi(input: z.input<typeof endpointTgiParametersSchema>):
3535
use_cache: false,
3636
fetch: async (endpointUrl, info) => {
3737
if (info && authorization && !accessToken) {
38-
// Set authorization header if it is defined and HF_ACCESS_TOKEN is empty
38+
// Set authorization header if it is defined and HF_TOKEN is empty
3939
info.headers = {
4040
...info.headers,
4141
Authorization: authorization,

src/lib/server/models.ts

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
1-
import { HF_ACCESS_TOKEN, HF_API_ROOT, MODELS, OLD_MODELS, TASK_MODEL } from "$env/static/private";
1+
import {
2+
HF_TOKEN,
3+
HF_API_ROOT,
4+
MODELS,
5+
OLD_MODELS,
6+
TASK_MODEL,
7+
HF_ACCESS_TOKEN,
8+
} from "$env/static/private";
29
import type { ChatTemplateInput } from "$lib/types/Template";
310
import { compileTemplate } from "$lib/utils/template";
411
import { z } from "zod";
@@ -80,7 +87,7 @@ const addEndpoint = (m: Awaited<ReturnType<typeof processModel>>) => ({
8087
return endpointTgi({
8188
type: "tgi",
8289
url: `${HF_API_ROOT}/${m.name}`,
83-
accessToken: HF_ACCESS_TOKEN,
90+
accessToken: HF_TOKEN ?? HF_ACCESS_TOKEN,
8491
weight: 1,
8592
model: m,
8693
});

0 commit comments

Comments
 (0)