Skip to content

Commit 2e61524

Browse files
committed
docs: add readme
1 parent 5746931 commit 2e61524

File tree

4 files changed

+102
-18
lines changed

4 files changed

+102
-18
lines changed

README.md

Lines changed: 75 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,77 @@
11
# ChatGPT Browser API Proxy
22

3-
Exposes OpenAI APIs using ChatGPT Browser API.
3+
The **ChatGPT Browser API Proxy** is a project that enables the use of OpenAI APIs by leveraging the ChatGPT unofficial browser API while bypassing Cloudflare anti-bot detection. This proxy allows you to make API requests to OpenAI's services directly from your local machine.
4+
5+
## Prerequisites
6+
7+
Before using this API proxy, ensure that you have the following:
8+
9+
- Node.js installed on your machine
10+
- Yarn package manager installed
11+
12+
## Getting Started
13+
14+
To set up and use the ChatGPT Browser API Proxy, follow these steps:
15+
16+
1. Clone the repository to your local machine:
17+
18+
```shell
19+
git clone https://github.com/rpidanny/chatgpt-browser-api-proxy.git
20+
```
21+
22+
2. Navigate to the project directory:
23+
24+
```shell
25+
cd chatgpt-browser-api-proxy
26+
```
27+
28+
3. Install the project dependencies:
29+
30+
```shell
31+
yarn install
32+
```
33+
34+
4. Copy the example environment file and rename it to `.env`:
35+
36+
```shell
37+
cp example.env .env
38+
```
39+
40+
5. Open the `.env` file and add your OpenAI Access Token obtained from the OpenAI platform. Replace `<YOUR_ACCESS_TOKEN>` with your actual token.
41+
42+
6. Start the proxy server in development mode:
43+
44+
```shell
45+
yarn start:dev
46+
```
47+
48+
*Note:*
49+
50+
You can get an Access Token by logging in to the ChatGPT webapp and then opening `https://chat.openai.com/api/auth/session`, which will return a JSON object containing your Access Token string.
51+
52+
Access tokens last for few days.
53+
54+
## Configuring for LangChain
55+
56+
If you intend to use the proxy with _LangChain_, you need to set the `OPENAI_API_BASE` environment variable to specify the API base URL.
57+
58+
```shell
59+
export OPENAI_API_BASE=http://localhost:3000/v1
60+
```
61+
62+
## Making API Requests
63+
64+
Once the proxy server is running, you can make API requests to OpenAI's services using the provided routes and endpoints. The proxy will handle the communication with the ChatGPT unofficial browser API and forward the responses to your local machine.
65+
66+
## Notes
67+
68+
- This project is an unofficial implementation and may not provide the same level of reliability or stability as official OpenAI APIs.
69+
- Usage of this project may be subject to OpenAI's terms of service. Please ensure compliance with their guidelines and policies.
70+
71+
## Disclaimer
72+
73+
This project is provided as-is, without any warranty or guarantee of its functionality. The developers and contributors are not responsible for any damages or issues arising from the use of this project.
74+
75+
## License
76+
77+
This project is licensed under the [MIT License](LICENSE).

example.env

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
OPENAI_API_TOKEN=<Your OpenAI API Token>
1+
OPENAI_API_TOKEN=<YOUR_ACCESS_TOKEN>

src/global/clients/chatgpt.client.ts

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,20 @@
11
/* eslint-disable camelcase */
2-
import { Injectable } from '@nestjs/common';
2+
import { Inject, Injectable, Logger } from '@nestjs/common';
33
import { randomUUID } from 'crypto';
44
import { Browser, chromium } from 'playwright';
55

66
@Injectable()
77
export class ChatGPTClient {
88
baseUrl = 'https://chat.openai.com';
9+
parentMessageId = randomUUID();
910
browser: Browser;
1011

12+
constructor(@Inject(Logger) private readonly logger: Logger) {}
13+
1114
async init() {
12-
console.log('Initializing ChatGPTClient');
15+
this.logger.log('Initializing ChatGPTClient');
1316
this.browser = await chromium.launch({ headless: false });
14-
console.log('ChatGPTClient initialized');
17+
this.logger.log('ChatGPTClient initialized');
1518
}
1619

1720
async conversation(prompt: string): Promise<string> {
@@ -20,7 +23,12 @@ export class ChatGPTClient {
2023
const headers = this.getHeaders();
2124
const payload = this.generateConversationPayload(prompt);
2225

23-
const answer = await this.call('POST', url, headers, payload);
26+
const { answer } = await this.call(
27+
'POST',
28+
url,
29+
headers,
30+
payload
31+
);
2432

2533
return answer;
2634
}
@@ -47,13 +55,11 @@ export class ChatGPTClient {
4755
},
4856
},
4957
],
50-
parent_message_id: randomUUID(),
51-
// conversation_id: "32326597-ad27-470f-85a0-d4514551bd15",
58+
parent_message_id: this.parentMessageId,
5259
model: 'text-davinci-002-render-sha',
5360
timezone_offset_min: -120,
5461
history_and_training_disabled: false,
5562
supports_modapi: true,
56-
stream: false
5763
};
5864
}
5965

@@ -62,12 +68,12 @@ export class ChatGPTClient {
6268
url: string,
6369
headers?: Record<string, string>,
6470
body?: Record<string, unknown>
65-
): Promise<string> {
71+
): Promise<{ answer: string; conversationId: string }> {
6672
const context = await this.browser.newContext();
6773
const page = await context.newPage();
6874
await page.goto(this.baseUrl);
6975

70-
const answer = await page.evaluate(
76+
const { answer, conversationId } = await page.evaluate(
7177
async ({ method, url, headers, body }) => {
7278
const textDecoder = new TextDecoder();
7379

@@ -85,6 +91,7 @@ export class ChatGPTClient {
8591

8692
let chunk: ReadableStreamReadResult<Uint8Array>;
8793
let answer = '';
94+
let conversationId = '';
8895

8996
while ((chunk = await reader.read()).done === false) {
9097
textDecoder
@@ -93,25 +100,27 @@ export class ChatGPTClient {
93100
.map((e) => e.trim().replace(/^\n+/, '').replace(/\n+$/, ''))
94101
.filter((e) => e.length > 0 || e !== '[DONE]')
95102
.map((e) => {
103+
console.log(e);
96104
try {
97105
const parsedEvent = JSON.parse(e);
98106

99107
if (parsedEvent.message.author.role === 'assistant') {
100108
answer = parsedEvent.message.content.parts.join(' ');
109+
conversationId = parsedEvent.message.conversation_id;
101110
return answer;
102111
}
103112
return '';
104113
} catch (error) {}
105114
});
106115
}
107116

108-
return answer;
117+
return { answer, conversationId };
109118
},
110119
{ method, url, headers, body }
111120
);
112121

113-
context.close();
122+
await context.close();
114123

115-
return answer;
124+
return { answer, conversationId };
116125
}
117126
}

src/global/global.module.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { DynamicModule, Global, Module } from '@nestjs/common';
1+
import { DynamicModule, Global, Logger, Module } from '@nestjs/common';
22

33
import { ChatGPTClient } from './clients/chatgpt.client.js';
44

@@ -10,15 +10,16 @@ export class GlobalModule {
1010
module: GlobalModule,
1111
imports: [],
1212
providers: [
13+
Logger,
1314
{
1415
provide: ChatGPTClient,
15-
useFactory: async () => {
16-
const client = new ChatGPTClient();
16+
useFactory: async (logger: Logger) => {
17+
const client = new ChatGPTClient(logger);
1718
await client.init();
1819

1920
return client;
2021
},
21-
inject: [],
22+
inject: [Logger],
2223
},
2324
],
2425
exports: [ChatGPTClient],

0 commit comments

Comments
 (0)