Skip to content

Commit

Permalink
Add chat api
Browse files Browse the repository at this point in the history
  • Loading branch information
memochou1993 committed Mar 2, 2023
1 parent 1fffc6f commit 5fbbebb
Show file tree
Hide file tree
Showing 28 changed files with 106 additions and 78 deletions.
2 changes: 1 addition & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ APP_WEBHOOK_PATH=
APP_API_TIMEOUT=
APP_MAX_GROUPS=
APP_MAX_USERS=
APP_MAX_PROMPT_SENTENCES=
APP_MAX_PROMPT_MESSAGES=
APP_MAX_PROMPT_TOKENS=

HUMAN_NAME=
Expand Down
12 changes: 6 additions & 6 deletions app/handlers/continue.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ const exec = (context) => check(context) && (
async () => {
updateHistory(context.id, (history) => history.erase());
const prompt = getPrompt(context.userId);
const { lastSentence } = prompt;
if (lastSentence.isEnquiring) prompt.erase();
const { lastMessage } = prompt;
if (lastMessage.isEnquiring) prompt.erase();
try {
const { text, isFinishReasonStop } = await generateCompletion({ prompt: prompt.toString() });
const { text, isFinishReasonStop } = await generateCompletion({ prompt });
prompt.patch(text);
if (lastSentence.isEnquiring && !isFinishReasonStop) prompt.write('', lastSentence.text);
if (lastMessage.isEnquiring && !isFinishReasonStop) prompt.write('', lastMessage.text);
setPrompt(context.userId, prompt);
if (!lastSentence.isEnquiring) updateHistory(context.id, (history) => history.patch(text));
const defaultActions = ALL_COMMANDS.filter(({ type }) => type === lastSentence.text);
if (!lastMessage.isEnquiring) updateHistory(context.id, (history) => history.patch(text));
const defaultActions = ALL_COMMANDS.filter(({ type }) => type === lastMessage.text);
const actions = isFinishReasonStop ? defaultActions : [COMMAND_BOT_CONTINUE];
context.pushText(text, actions);
} catch (err) {
Expand Down
9 changes: 5 additions & 4 deletions app/handlers/enquire.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { generateCompletion, getCommand } from '../../utils/index.js';
import { ALL_COMMANDS, COMMAND_BOT_CONTINUE, ENQUIRE_COMMANDS } from '../commands/index.js';
import Context from '../context.js';
import { getHistory, updateHistory } from '../history/index.js';
import { getPrompt, setPrompt } from '../prompt/index.js';
import { getPrompt, setPrompt, Prompt } from '../prompt/index.js';

/**
* @param {Context} context
Expand All @@ -26,13 +26,14 @@ const exec = (context) => check(context) && (
updateHistory(context.id, (history) => history.erase());
const command = getCommand(context.trimmedText);
const history = getHistory(context.id);
if (!history.lastRecord) return context;
const reference = command.type === TYPE_TRANSLATE ? history.lastRecord.text : history.toString();
if (!history.lastMessage) return context;
const reference = command.type === TYPE_TRANSLATE ? history.lastMessage.text : history.toString();
const content = `${command.prompt}\n${t('__COMPLETION_QUOTATION_MARK_OPENING')}\n${reference}\n${t('__COMPLETION_QUOTATION_MARK_CLOSING')}`;
const partial = (new Prompt()).write(ROLE_HUMAN, content);
const prompt = getPrompt(context.userId);
prompt.write(ROLE_HUMAN, content).write(ROLE_AI);
try {
const { text, isFinishReasonStop } = await generateCompletion({ prompt: content });
const { text, isFinishReasonStop } = await generateCompletion({ prompt: partial });
prompt.patch(text);
if (!isFinishReasonStop) prompt.write('', command.type);
setPrompt(context.userId, prompt);
Expand Down
2 changes: 1 addition & 1 deletion app/handlers/retry.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ const exec = (context) => check(context) && (
const prompt = getPrompt(context.userId);
prompt.erase().write(ROLE_AI);
try {
const { text, isFinishReasonStop } = await generateCompletion({ prompt: prompt.toString() });
const { text, isFinishReasonStop } = await generateCompletion({ prompt });
prompt.patch(text);
setPrompt(context.userId, prompt);
updateHistory(context.id, (history) => history.write(config.BOT_NAME, text));
Expand Down
2 changes: 1 addition & 1 deletion app/handlers/search.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ const exec = (context) => check(context) && (
}
prompt.write(ROLE_HUMAN, `${trimmedText}。`).write(ROLE_AI);
try {
const { text, isFinishReasonStop } = await generateCompletion({ prompt: prompt.toString() });
const { text, isFinishReasonStop } = await generateCompletion({ prompt });
prompt.patch(text);
setPrompt(context.userId, prompt);
updateHistory(context.id, (history) => history.write(config.BOT_NAME, text));
Expand Down
2 changes: 1 addition & 1 deletion app/handlers/talk.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ const exec = (context) => check(context) && (
const prompt = getPrompt(context.userId);
prompt.write(ROLE_HUMAN, `${t('__COMPLETION_DEFAULT_AI_TONE')(config.BOT_TONE)}${context.trimmedText}。`).write(ROLE_AI);
try {
const { text, isFinishReasonStop } = await generateCompletion({ prompt: prompt.toString() });
const { text, isFinishReasonStop } = await generateCompletion({ prompt });
prompt.patch(text);
setPrompt(context.userId, prompt);
updateHistory(context.id, (history) => history.write(config.BOT_NAME, text));
Expand Down
28 changes: 14 additions & 14 deletions app/history/history.js
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
import { encode } from 'gpt-3-encoder';
import config from '../../config/index.js';
import Record from './record.js';
import Message from './message.js';

const MAX_RECORDS = config.APP_MAX_PROMPT_SENTENCES / 2;
const MAX_MESSAGES = config.APP_MAX_PROMPT_MESSAGES / 2;
const MAX_TOKENS = config.APP_MAX_PROMPT_TOKENS / 2;

class History {
records = [];
messages = [];

/**
* @returns {Record}
* @returns {Message}
*/
get lastRecord() {
return this.records.length > 0 ? this.records[this.records.length - 1] : null;
get lastMessage() {
return this.messages.length > 0 ? this.messages[this.messages.length - 1] : null;
}

get tokenCount() {
Expand All @@ -21,8 +21,8 @@ class History {
}

erase() {
if (this.records.length > 0) {
this.records.pop();
if (this.messages.length > 0) {
this.messages.pop();
}
return this;
}
Expand All @@ -32,23 +32,23 @@ class History {
* @param {string} content
*/
write(role, content) {
if (this.records.length >= MAX_RECORDS || this.tokenCount >= MAX_TOKENS) {
this.records.shift();
if (this.messages.length >= MAX_MESSAGES || this.tokenCount >= MAX_TOKENS) {
this.messages.shift();
}
this.records.push(new Record({ role, content }));
this.messages.push(new Message({ role, content }));
return this;
}

/**
* @param {string} content
*/
patch(content) {
if (this.records.length < 1) return;
this.records[this.records.length - 1].content += content;
if (this.messages.length < 1) return;
this.messages[this.messages.length - 1].content += content;
}

toString() {
return this.records.map((record) => record.toString()).join('\n');
return this.messages.map((record) => record.toString()).join('\n');
}
}

Expand Down
8 changes: 4 additions & 4 deletions app/history/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,11 @@ const removeHistory = (userId) => {
};

const printHistories = () => {
const records = Array.from(histories.keys())
.filter((contextId) => getHistory(contextId).records.length > 0)
const messages = Array.from(histories.keys())
.filter((contextId) => getHistory(contextId).messages.length > 0)
.map((contextId) => `\n=== ${contextId.slice(0, 6)} ===\n\n${getHistory(contextId).toString()}`);
if (records.length < 1) return;
console.info(records.join('\n'));
if (messages.length < 1) return;
console.info(messages.join('\n'));
};

export {
Expand Down
4 changes: 2 additions & 2 deletions app/history/record.js → app/history/message.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
class Record {
class Message {
role;

content;
Expand All @@ -16,4 +16,4 @@ class Record {
}
}

export default Record;
export default Message;
1 change: 1 addition & 0 deletions app/prompt/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ const printPrompts = () => {
};

export {
Prompt,
getPrompt,
setPrompt,
removePrompt,
Expand Down
4 changes: 2 additions & 2 deletions app/prompt/sentence.js → app/prompt/message.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { TYPE_SUM, TYPE_ANALYZE, TYPE_TRANSLATE } from '../../constants/command.js';

class Sentence {
class Message {
role;

content;
Expand All @@ -24,4 +24,4 @@ class Sentence {
}
}

export default Sentence;
export default Message;
33 changes: 17 additions & 16 deletions app/prompt/prompt.js
Original file line number Diff line number Diff line change
@@ -1,26 +1,27 @@
import { encode } from 'gpt-3-encoder';
import config from '../../config/index.js';
import { t } from '../../locales/index.js';
import { ROLE_AI, ROLE_HUMAN } from '../../services/openai.js';
import Sentence from './sentence.js';
import { ROLE_AI, ROLE_HUMAN, ROLE_SYSTEM } from '../../services/openai.js';
import Message from './message.js';

const MAX_SENTENCES = config.APP_MAX_PROMPT_SENTENCES;
const MAX_MESSAGES = config.APP_MAX_PROMPT_MESSAGES;
const MAX_TOKENS = config.APP_MAX_PROMPT_TOKENS;

class Prompt {
sentences = [];
messages = [];

constructor() {
this
.write(ROLE_HUMAN, `${t('__COMPLETION_DEFAULT_HUMAN_GREETING')(config.HUMAN_NAME)}${config.HUMAN_INIT_PROMPT}。`)
.write(ROLE_AI, `${t('__COMPLETION_DEFAULT_AI_GREETING')(config.BOT_NAME)}${config.BOT_INIT_PROMPT}。`);
.write(ROLE_SYSTEM, `${t('__COMPLETION_DEFAULT_SYSTEM_PROMPT')}`)
.write(ROLE_HUMAN, `${t('__COMPLETION_DEFAULT_HUMAN_PROMPT')(config.HUMAN_NAME)}${config.HUMAN_INIT_PROMPT}。`)
.write(ROLE_AI, `${t('__COMPLETION_DEFAULT_AI_PROMPT')(config.BOT_NAME)}${config.BOT_INIT_PROMPT}。`);
}

/**
* @returns {Sentence}
* @returns {Message}
*/
get lastSentence() {
return this.sentences.length > 0 ? this.sentences[this.sentences.length - 1] : null;
get lastMessage() {
return this.messages.length > 0 ? this.messages[this.messages.length - 1] : null;
}

get tokenCount() {
Expand All @@ -29,8 +30,8 @@ class Prompt {
}

erase() {
if (this.sentences.length > 0) {
this.sentences.pop();
if (this.messages.length > 0) {
this.messages.pop();
}
return this;
}
Expand All @@ -40,22 +41,22 @@ class Prompt {
* @param {string} content
*/
write(role, content = '') {
if (this.sentences.length >= MAX_SENTENCES || this.tokenCount >= MAX_TOKENS) {
this.sentences.splice(2, 1);
if (this.messages.length >= MAX_MESSAGES || this.tokenCount >= MAX_TOKENS) {
this.messages.splice(2, 1);
}
this.sentences.push(new Sentence({ role, content }));
this.messages.push(new Message({ role, content }));
return this;
}

/**
* @param {string} content
*/
patch(content) {
this.sentences[this.sentences.length - 1].content += content;
this.messages[this.messages.length - 1].content += content;
}

toString() {
return this.sentences.map((sentence) => sentence.toString()).join('');
return this.messages.map((sentence) => sentence.toString()).join('');
}
}

Expand Down
4 changes: 2 additions & 2 deletions config/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const config = Object.freeze({
APP_API_TIMEOUT: env.APP_API_TIMEOUT || 9000,
APP_MAX_GROUPS: Number(env.APP_MAX_GROUPS) || 1,
APP_MAX_USERS: Number(env.APP_MAX_USERS) || 5,
APP_MAX_PROMPT_SENTENCES: Number(env.APP_MAX_PROMPT_SENTENCES) || 12,
APP_MAX_PROMPT_MESSAGES: Number(env.APP_MAX_PROMPT_MESSAGES) || 12,
APP_MAX_PROMPT_TOKENS: Number(env.APP_MAX_PROMPT_TOKENS) || 1024,
HUMAN_NAME: env.HUMAN_NAME || '',
HUMAN_INIT_PROMPT: env.HUMAN_INIT_PROMPT || '',
Expand All @@ -32,7 +32,7 @@ const config = Object.freeze({
VERCEL_DEPLOY_HOOK_URL: env.VERCEL_DEPLOY_HOOK_URL || null,
OPENAI_TIMEOUT: env.OPENAI_TIMEOUT || env.APP_API_TIMEOUT,
OPENAI_API_KEY: env.OPENAI_API_KEY || null,
OPENAI_COMPLETION_MODEL: env.OPENAI_COMPLETION_MODEL || 'text-davinci-003',
OPENAI_COMPLETION_MODEL: env.OPENAI_COMPLETION_MODEL || 'gpt-3.5-turbo',
OPENAI_COMPLETION_TEMPERATURE: Number(env.OPENAI_COMPLETION_TEMPERATURE) || 0.9,
OPENAI_COMPLETION_MAX_TOKENS: Number(env.OPENAI_COMPLETION_MAX_TOKENS) || 160,
OPENAI_COMPLETION_FREQUENCY_PENALTY: Number(env.OPENAI_COMPLETION_FREQUENCY_PENALTY) || 0,
Expand Down
5 changes: 3 additions & 2 deletions locales/en.js
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,9 @@ const en = {
__COMMAND_TRANSLATE_TO_JA_LABEL: '翻成日文', // TODO
__COMMAND_TRANSLATE_TO_JA_TEXT: '翻成日文', // TODO
__COMMAND_TRANSLATE_TO_JA_PROMPT: '請將以下內容翻譯成日文。', // TODO
__COMPLETION_DEFAULT_HUMAN_GREETING: (name) => (name ? `I am ${name}.` : 'Hello!'),
__COMPLETION_DEFAULT_AI_GREETING: (name) => (name ? `I am ${name}.` : 'Hello!'),
__COMPLETION_DEFAULT_SYSTEM_PROMPT: '', // TODO
__COMPLETION_DEFAULT_HUMAN_PROMPT: (name) => (name ? `I am ${name}.` : 'Hello!'),
__COMPLETION_DEFAULT_AI_PROMPT: (name) => (name ? `I am ${name}.` : 'Hello!'),
__COMPLETION_DEFAULT_AI_TONE: (tone) => (tone ? `以${tone}的語氣回應我:` : ''), // TODO
__COMPLETION_SEARCH: (a, q) => `根據「${a}」查詢結果,回答「${q}」問題`, // TODO
__COMPLETION_SEARCH_NOT_FOUND: '查無資料', // TODO
Expand Down
5 changes: 3 additions & 2 deletions locales/ja.js
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,9 @@ const ja = {
__COMMAND_TRANSLATE_TO_JA_LABEL: '翻成日文', // TODO
__COMMAND_TRANSLATE_TO_JA_TEXT: '翻成日文', // TODO
__COMMAND_TRANSLATE_TO_JA_PROMPT: '請將以下內容翻譯成日文。', // TODO
__COMPLETION_DEFAULT_HUMAN_GREETING: (name) => (name ? `私は${name}です。` : 'こんにちは。'),
__COMPLETION_DEFAULT_AI_GREETING: (name) => (name ? `私は${name}です。` : 'こんにちは。'),
__COMPLETION_DEFAULT_SYSTEM_PROMPT: '', // TODO
__COMPLETION_DEFAULT_HUMAN_PROMPT: (name) => (name ? `私は${name}です。` : 'こんにちは。'),
__COMPLETION_DEFAULT_AI_PROMPT: (name) => (name ? `私は${name}です。` : 'こんにちは。'),
__COMPLETION_DEFAULT_AI_TONE: (tone) => (tone ? `以${tone}的語氣回應我:` : ''), // TODO
__COMPLETION_SEARCH: (a, q) => `根據「${a}」查詢結果,回答「${q}」問題`, // TODO
__COMPLETION_SEARCH_NOT_FOUND: '查無資料', // TODO
Expand Down
5 changes: 3 additions & 2 deletions locales/zh.js
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,9 @@ const zh = {
__COMMAND_TRANSLATE_TO_JA_LABEL: '翻成日文',
__COMMAND_TRANSLATE_TO_JA_TEXT: '翻成日文',
__COMMAND_TRANSLATE_TO_JA_PROMPT: '將以下內容翻譯成日文。',
__COMPLETION_DEFAULT_HUMAN_GREETING: (name) => (name ? `我是${name}。` : '哈囉!'),
__COMPLETION_DEFAULT_AI_GREETING: (name) => (name ? `我是${name}。` : '哈囉!'),
__COMPLETION_DEFAULT_SYSTEM_PROMPT: '以下將使用繁體中文進行對話。',
__COMPLETION_DEFAULT_HUMAN_PROMPT: (name) => (name ? `我是${name}。` : '哈囉!'),
__COMPLETION_DEFAULT_AI_PROMPT: (name) => (name ? `我是${name}。` : '哈囉!'),
__COMPLETION_DEFAULT_AI_TONE: (tone) => (tone ? `以${tone}的語氣回應我:` : ''),
__COMPLETION_SEARCH: (a, q) => `根據「${a}」查詢結果,回答「${q}」問題`,
__COMPLETION_SEARCH_NOT_FOUND: '查無資料',
Expand Down
19 changes: 15 additions & 4 deletions services/openai.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import axios from 'axios';
import config from '../config/index.js';

export const ROLE_AI = 'AI';
export const ROLE_HUMAN = 'Human';
export const ROLE_SYSTEM = 'system';
export const ROLE_AI = 'assistant';
export const ROLE_HUMAN = 'user';

export const FINISH_REASON_STOP = 'stop';
export const FINISH_REASON_LENGTH = 'length';
Expand All @@ -11,6 +12,8 @@ export const IMAGE_SIZE_256 = '256x256';
export const IMAGE_SIZE_512 = '512x512';
export const IMAGE_SIZE_1024 = '1024x1024';

export const MODEL_GPT_3_5_TURBO = 'gpt-3.5-turbo';

const instance = axios.create({
baseURL: 'https://api.openai.com',
timeout: config.OPENAI_TIMEOUT,
Expand All @@ -24,7 +27,14 @@ instance.interceptors.request.use((c) => {
return c;
});

const createCompletion = ({
const createChatCompletion = ({
messages,
}) => instance.post('/v1/chat/completions', {
model: 'gpt-3.5-turbo',
messages,
});

const createTextCompletion = ({
model = config.OPENAI_COMPLETION_MODEL,
prompt,
temperature = config.OPENAI_COMPLETION_TEMPERATURE,
Expand Down Expand Up @@ -56,6 +66,7 @@ const createImage = ({
});

export {
createCompletion,
createChatCompletion,
createTextCompletion,
createImage,
};
2 changes: 1 addition & 1 deletion tests/activate.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ test('COMMAND_BOT_ACTIVATE', async () => {
} catch (err) {
console.error(err);
}
expect(getPrompt(MOCK_USER_01).sentences.length).toEqual(4);
expect(getPrompt(MOCK_USER_01).messages.length).toEqual(5);
const replies = results.map(({ messages }) => messages.map(({ text }) => text));
expect(replies).toEqual(
[
Expand Down
2 changes: 1 addition & 1 deletion tests/command.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ test('COMMAND_SYS_COMMAND', async () => {
} catch (err) {
console.error(err);
}
expect(getPrompt(MOCK_USER_01).sentences.length).toEqual(2);
expect(getPrompt(MOCK_USER_01).messages.length).toEqual(3);
const replies = results.map(({ messages }) => messages.map(({ altText }) => altText));
expect(replies).toEqual(
[
Expand Down
Loading

0 comments on commit 5fbbebb

Please sign in to comment.