Skip to content

Commit 72fcd0c

Browse files
authored
fix: https proxy config (#620)
Signed-off-by: Bob Du <i@bobdu.cc>
1 parent fd8d2fa commit 72fcd0c

File tree

3 files changed

+28
-17
lines changed

3 files changed

+28
-17
lines changed

service/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,15 +35,15 @@
3535
"express": "^5.1.0",
3636
"express-rate-limit": "^6.7.0",
3737
"file-type": "^19.0.0",
38-
"https-proxy-agent": "^7.0.6",
3938
"jsonwebtoken": "^9.0.0",
4039
"mongodb": "^6.16.0",
4140
"multer": "^2.0.0",
4241
"nodemailer": "^6.9.13",
4342
"openai": "^5.1.0",
4443
"request-ip": "^3.3.0",
4544
"speakeasy": "^2.0.0",
46-
"tsx": "^4.7.0"
45+
"tsx": "^4.7.0",
46+
"undici": "^6.21.3"
4747
},
4848
"devDependencies": {
4949
"@antfu/eslint-config": "^4.13.2",

service/pnpm-lock.yaml

Lines changed: 8 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

service/src/chatgpt/index.ts

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
1+
import type { ClientOptions } from 'openai'
2+
import type { RequestInit } from 'undici'
13
import type { AuditConfig, Config, KeyConfig, UserInfo } from '../storage/model'
24
import type { TextAuditService } from '../utils/textAudit'
35
import type { ChatMessage, RequestOptions } from './types'
46
import { tavily } from '@tavily/core'
57
import dayjs from 'dayjs'
68
import * as dotenv from 'dotenv'
7-
import { HttpsProxyAgent } from 'https-proxy-agent'
89
import OpenAI from 'openai'
10+
import * as undici from 'undici'
911
import { getCacheApiKeys, getCacheConfig, getOriginConfig } from '../storage/config'
1012
import { Status, UsageResponse } from '../storage/model'
1113
import { getChatByMessageId, updateChatSearchQuery, updateChatSearchResult } from '../storage/mongo'
@@ -36,19 +38,23 @@ export async function initApi(key: KeyConfig) {
3638
const config = await getCacheConfig()
3739
const openaiBaseUrl = isNotEmptyString(key.baseUrl) ? key.baseUrl : config.apiBaseUrl
3840

39-
let httpAgent: HttpsProxyAgent<any> | undefined
40-
if (isNotEmptyString(config.httpsProxy)) {
41-
const httpsProxy = config.httpsProxy
42-
if (httpsProxy)
43-
httpAgent = new HttpsProxyAgent(httpsProxy)
44-
}
45-
46-
const client = new OpenAI({
41+
const clientOptions: ClientOptions = {
4742
baseURL: openaiBaseUrl,
4843
apiKey: key.key,
49-
httpAgent,
50-
})
51-
return client
44+
}
45+
46+
const httpsProxy = config.httpsProxy
47+
if (httpsProxy && isNotEmptyString(httpsProxy)) {
48+
clientOptions.fetch = (input: string | URL | Request, init: RequestInit) => {
49+
return undici.fetch(input, {
50+
...init,
51+
dispatcher: new undici.ProxyAgent({
52+
uri: httpsProxy,
53+
}),
54+
})
55+
}
56+
}
57+
return new OpenAI(clientOptions)
5258
}
5359

5460
const processThreads: { userId: string, abort: AbortController, messageId: string }[] = []

0 commit comments

Comments
 (0)