Skip to content

Commit

Permalink
feat: add experimental ChatGPT browser client (waylaidwanderer#95)
Browse files Browse the repository at this point in the history
* feat: add browser client

* fix: remove unused function

* feat: add browser client to api server and update docs

* fix: update demo to use stdout write
  • Loading branch information
waylaidwanderer authored Feb 18, 2023
1 parent 98eec6e commit 4be0a6a
Show file tree
Hide file tree
Showing 6 changed files with 248 additions and 2 deletions.
7 changes: 7 additions & 0 deletions bin/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { FastifySSEPlugin } from "fastify-sse-v2";
import fs from 'fs';
import { pathToFileURL } from 'url'
import ChatGPTClient from '../src/ChatGPTClient.js';
import ChatGPTBrowserClient from '../src/ChatGPTBrowserClient.js';
import BingAIClient from '../src/BingAIClient.js';
import { KeyvFile } from 'keyv-file';

Expand Down Expand Up @@ -50,6 +51,12 @@ switch (clientToUse) {
case 'bing':
client = new BingAIClient(settings.bingAiClient);
break;
case 'chatgpt-browser':
client = new ChatGPTBrowserClient(
settings.chatGptBrowserClient,
settings.cacheOptions,
);
break;
default:
client = new ChatGPTClient(
settings.openaiApiKey,
Expand Down
32 changes: 32 additions & 0 deletions demos/use-browser-client.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// import { ChatGPTBrowserClient } from '@waylaidwanderer/chatgpt-api';
import { ChatGPTBrowserClient } from '../index.js';

const clientOptions = {
// (Optional) Support for a reverse proxy for the completions endpoint (private API server).
// Warning: This will expose your `openaiApiKey` to a third-party. Consider the risks before using this.
reverseProxyUrl: 'https://chatgpt.duti.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
accessToken: '',
// Cookies from chat.openai.com (likely not required if using reverse proxy server).
cookies: '',
// (Optional) Set to true to enable `console.debug()` logging
// debug: true,
};

const chatGptClient = new ChatGPTBrowserClient(clientOptions);

const response = await chatGptClient.sendMessage('Hello!');
console.log(response); // { response: 'Hi! How can I help you today?', conversationId: '...', messageId: '...' }

const response2 = await chatGptClient.sendMessage('Write a poem about cats.', { conversationId: response.conversationId, parentMessageId: response.messageId });
console.log(response2.response); // Cats are the best pets in the world.

const response3 = await chatGptClient.sendMessage('Now write it in French.', {
conversationId: response2.conversationId,
parentMessageId: response2.messageId,
// If you want streamed responses, you can set the `onProgress` callback to receive the response as it's generated.
// You will receive one token at a time, so you will need to concatenate them yourself.
onProgress: (token) => process.stdout.write(token),
});
console.log();
console.log(response3.response); // Les chats sont les meilleurs animaux de compagnie du monde.
3 changes: 2 additions & 1 deletion demos/use-client.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ const response3 = await chatGptClient.sendMessage('Now write it in French.', {
parentMessageId: response2.messageId,
// If you want streamed responses, you can set the `onProgress` callback to receive the response as it's generated.
// You will receive one token at a time, so you will need to concatenate them yourself.
onProgress: (token) => console.log(token),
onProgress: (token) => process.stdout.write(token),
});
console.log();
console.log(response3.response); // Les chats sont les meilleurs animaux de compagnie du monde.
3 changes: 2 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import ChatGPTClient from './src/ChatGPTClient.js';
import ChatGPTBrowserClient from './src/ChatGPTBrowserClient.js';
import BingAIClient from './src/BingAIClient.js';

export { ChatGPTClient, BingAIClient };
export { ChatGPTClient, ChatGPTBrowserClient, BingAIClient };
export default ChatGPTClient;
11 changes: 11 additions & 0 deletions settings.example.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,17 @@ export default {
// (Optional) Set to true to enable `console.debug()` logging
debug: false,
},
chatGptBrowserClient: {
// (Optional) Support for a reverse proxy for the completions endpoint (private API server).
// Warning: This will expose your `openaiApiKey` to a third-party. Consider the risks before using this.
reverseProxyUrl: 'https://chatgpt.duti.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
accessToken: '',
// Cookies from chat.openai.com (likely not required if using reverse proxy server).
cookies: '',
// (Optional) Set to true to enable `console.debug()` logging
// debug: true,
},
// Options for the API server
apiOptions: {
port: process.env.API_PORT || 3000,
Expand Down
194 changes: 194 additions & 0 deletions src/ChatGPTBrowserClient.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
import './fetch-polyfill.js';
import crypto from 'crypto';
import Keyv from 'keyv';
import { fetchEventSource } from '@waylaidwanderer/fetch-event-source';

export default class ChatGPTBrowserClient {
constructor(
options = {},
cacheOptions = {},
) {
this.options = options;
this.accessToken = options.accessToken;
this.cookies = options.cookies;
this.model = options.model || 'text-davinci-002-render-sha';

cacheOptions.namespace = cacheOptions.namespace || 'chatgpt-browser';
this.conversationsCache = new Keyv(cacheOptions);
}

async postConversation(conversation, onProgress) {
const {
action = 'next',
conversationId,
parentMessageId = crypto.randomUUID(),
message,
} = conversation;

const debug = this.options.debug;
const url = this.options.reverseProxyUrl || 'https://chat.openai.com/backend-api/conversation';
const opts = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.accessToken}`,
Cookie: this.cookies || undefined,
},
body: JSON.stringify({
conversation_id: conversationId,
action,
messages: message ? [
{
id: crypto.randomUUID(),
role: 'user',
content: {
content_type: 'text',
parts: [message],
},
}
] : undefined,
parent_message_id: parentMessageId,
model: this.model,
}),
};
if (debug) {
console.debug();
console.debug(url);
console.debug(opts);
console.debug();
}
// data: {"message": {"id": "UUID", "role": "assistant", "user": null, "create_time": null, "update_time": null, "content": {"content_type": "text", "parts": ["That's alright! If you don't have a specific question or topic in mind, I can suggest some general conversation starters or topics to explore. \n\nFor example, we could talk about your interests, hobbies, or goals. Alternatively, we could discuss current events, pop culture, or science and technology. Is there anything in particular that you're curious about or would like to learn more about?"]}, "end_turn": true, "weight": 1.0, "metadata": {"message_type": "next", "model_slug": "text-davinci-002-render-sha", "finish_details": {"type": "stop", "stop": "<|im_end|>"}}, "recipient": "all"}, "conversation_id": "UUID", "error": null}
return new Promise(async (resolve, reject) => {
let lastEvent = null;
const controller = new AbortController();
try {
let done = false;
await fetchEventSource(url, {
...opts,
signal: controller.signal,
async onopen(response) {
if (response.status === 200) {
return;
}
if (debug) {
console.debug(response);
}
let error;
try {
const body = await response.text();
error = new Error(`Failed to send message. HTTP ${response.status} - ${body}`);
error.status = response.status;
error.json = JSON.parse(body);
} catch {
error = error || new Error(`Failed to send message. HTTP ${response.status}`);
}
throw error;
},
onclose() {
if (debug) {
console.debug('Server closed the connection unexpectedly, returning...');
}
if (!done) {
onProgress('[DONE]');
controller.abort();
resolve(lastEvent);
}
},
onerror(err) {
if (debug) {
console.debug(err);
}
// rethrow to stop the operation
throw err;
},
onmessage(message) {
if (debug) {
console.debug(message);
}
if (!message.data) {
return;
}
if (message.data === '[DONE]') {
onProgress('[DONE]');
controller.abort();
resolve(lastEvent);
done = true;
return;
}
const lastMessage = lastEvent ? lastEvent.message.content.parts[0] : '';
const data = JSON.parse(message.data);
const newMessage = data.message.content.parts[0];
// get the difference between the current text and the previous text
const difference = newMessage.substring(lastMessage.length);
lastEvent = data;
onProgress(difference);
},
});
} catch (err) {
reject(err);
}
});
}

async sendMessage(
message,
opts = {},
) {
let conversationId = opts.conversationId;
const parentMessageId = opts.parentMessageId || crypto.randomUUID();

let conversation;
if (conversationId) {
conversation = await this.conversationsCache.get(conversationId);
} else {
conversation = {
messages: [],
createdAt: Date.now(),
};
}

const userMessage = {
id: crypto.randomUUID(),
parentMessageId,
role: 'User',
message,
};

conversation.messages.push(userMessage);

const result = await this.postConversation(
{
conversationId,
parentMessageId,
message,
},
opts.onProgress || (() => {}),
);

if (this.options.debug) {
console.debug(JSON.stringify(result));
console.debug();
}

conversationId = result.conversation_id;
const reply = result.message.content.parts[0].trim();

const replyMessage = {
id: result.message.id,
parentMessageId: userMessage.id,
role: 'ChatGPT',
message: reply,
};

conversation.messages.push(replyMessage);

await this.conversationsCache.set(conversationId, conversation);

return {
response: replyMessage.message,
conversationId,
messageId: replyMessage.id,
details: result,
};
}
}

0 comments on commit 4be0a6a

Please sign in to comment.