Skip to content

Commit

Permalink
refactor: delete unused gpt models select.
Browse files Browse the repository at this point in the history
  • Loading branch information
guangzhengli committed Jun 28, 2023
1 parent 5510005 commit 2c2bbd2
Show file tree
Hide file tree
Showing 7 changed files with 9 additions and 146 deletions.
26 changes: 2 additions & 24 deletions components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
@@ -1,20 +1,18 @@
import {Conversation, ErrorMessage, KeyConfiguration, KeyValuePair, Message, OpenAIModel,} from '@/types';
import {Conversation, ErrorMessage, KeyConfiguration, KeyValuePair, Message,} from '@/types';
import {throttle} from '@/utils';
import {IconClearAll, IconSettings} from '@tabler/icons-react';
import {IconClearAll} from '@tabler/icons-react';
import {useTranslation} from 'next-i18next';
import {FC, memo, MutableRefObject, useEffect, useRef, useState} from 'react';
import {ChatInput} from './ChatInput';
import {ChatLoader} from './ChatLoader';
import {ChatMessage} from './ChatMessage';
import {ErrorMessageDiv} from './ErrorMessageDiv';
import {ModelSelect} from './ModelSelect';
import {Upload} from "@/components/Chat/Upload";
import {CHAT_FILES_MAX_SIZE} from "@/utils/app/const";
import {humanFileSize} from "@/utils/app/files";

interface Props {
conversation: Conversation;
models: OpenAIModel[];
keyConfiguration: KeyConfiguration;
messageIsStreaming: boolean;
modelError: ErrorMessage | null;
Expand All @@ -33,7 +31,6 @@ interface Props {
export const Chat: FC<Props> = memo(
({
conversation,
models,
keyConfiguration,
messageIsStreaming,
modelError,
Expand Down Expand Up @@ -221,24 +218,6 @@ export const Chat: FC<Props> = memo(
size={18}
/>
</div>
{showSettings && (
<div className="mx-auto flex w-[200px] flex-col space-y-10 pt-8 sm:w-[300px]">
<div
className="flex h-full flex-col space-y-4 rounded border border-neutral-500 p-2">
<ModelSelect
model={conversation.model}
models={models}
onModelChange={(model) =>
onUpdateConversation(conversation, {
key: 'model',
value: model,
})
}
/>
</div>
</div>
)}

{conversation.messages.map((message, index) => (
<ChatMessage
key={index}
Expand All @@ -263,7 +242,6 @@ export const Chat: FC<Props> = memo(
textareaRef={textareaRef}
messageIsStreaming={messageIsStreaming}
conversationIsEmpty={conversation.messages.length > 0}
model={conversation.model}
onSend={(message) => {
setCurrentMessage(message);
onSend(message);
Expand Down
6 changes: 2 additions & 4 deletions components/Chat/ChatInput.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {Message, OpenAIModel, OpenAIModelID} from '@/types';
import {Message} from '@/types';
import {IconPlayerStop, IconRepeat, IconSend} from '@tabler/icons-react';
import {
FC,
Expand All @@ -11,7 +11,6 @@ import {useTranslation} from 'next-i18next';

interface Props {
messageIsStreaming: boolean;
model: OpenAIModel;
conversationIsEmpty: boolean;
onSend: (message: Message) => void;
onRegenerate: () => void;
Expand All @@ -22,7 +21,6 @@ interface Props {

export const ChatInput: FC<Props> = ({
messageIsStreaming,
model,
conversationIsEmpty,
onSend,
onRegenerate,
Expand All @@ -36,7 +34,7 @@ export const ChatInput: FC<Props> = ({

const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
const value = e.target.value;
const maxLength = model.id === OpenAIModelID.GPT_3_5 ? 12000 : 24000;
const maxLength = 12000;

if (value.length > maxLength) {
alert(
Expand Down
44 changes: 0 additions & 44 deletions components/Chat/ModelSelect.tsx

This file was deleted.

1 change: 0 additions & 1 deletion components/Chat/Upload.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,6 @@ export const Upload = ({
throw new Error(`save embedding failed: ' ${message}`);
}
});

}

const deleteFile = async (fileTempName: string) => {
Expand Down
31 changes: 3 additions & 28 deletions pages/index.tsx
Original file line number Diff line number Diff line change
@@ -1,29 +1,10 @@
import {Chat} from '@/components/Chat/Chat';
import {Navbar} from '@/components/Mobile/Navbar';
import {Sidebar} from '@/components/Sidebar/Sidebar';
import {
ChatBody,
ChatFolder,
Conversation,
ErrorMessage,
KeyConfiguration,
KeyValuePair,
Message,
ModelType,
OpenAIModel,
OpenAIModelID,
OpenAIModels,
} from '@/types';
import {
cleanConversationHistory,
cleanSelectedConversation,
} from '@/utils/app/clean';
import {ChatFolder, Conversation, ErrorMessage, KeyConfiguration, KeyValuePair, Message, ModelType,} from '@/types';
import {cleanConversationHistory, cleanSelectedConversation,} from '@/utils/app/clean';
import {DEFAULT_SYSTEM_PROMPT} from '@/utils/app/const';
import {
saveConversation,
saveConversations,
updateConversation,
} from '@/utils/app/conversation';
import {saveConversation, saveConversations, updateConversation,} from '@/utils/app/conversation';
import {saveFolders} from '@/utils/app/folders';
import {exportData, importData} from '@/utils/app/importExport';
import {IconArrowBarRight} from '@tabler/icons-react';
Expand All @@ -45,7 +26,6 @@ const Home: React.FC<HomeProps> = ({serverSideApiKeyIsSet}) => {
const [selectedConversation, setSelectedConversation] =
useState<Conversation>();
const [loading, setLoading] = useState<boolean>(false);
const [models, setModels] = useState<OpenAIModel[]>([]);
const [lightMode, setLightMode] = useState<'dark' | 'light'>('dark');
const [messageIsStreaming, setMessageIsStreaming] = useState<boolean>(false);
const [showSidebar, setShowSidebar] = useState<boolean>(true);
Expand Down Expand Up @@ -354,7 +334,6 @@ const Home: React.FC<HomeProps> = ({serverSideApiKeyIsSet}) => {
lastConversation ? lastConversation.id + 1 : 1
}`,
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: 0,
index: {
Expand Down Expand Up @@ -391,7 +370,6 @@ const Home: React.FC<HomeProps> = ({serverSideApiKeyIsSet}) => {
id: 1,
name: 'New conversation',
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: 0,
index: {
Expand Down Expand Up @@ -429,7 +407,6 @@ const Home: React.FC<HomeProps> = ({serverSideApiKeyIsSet}) => {
id: 1,
name: 'New conversation',
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: 0,
index: {
Expand Down Expand Up @@ -526,7 +503,6 @@ const Home: React.FC<HomeProps> = ({serverSideApiKeyIsSet}) => {
id: 1,
name: 'New conversation',
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5],
prompt: DEFAULT_SYSTEM_PROMPT,
folderId: 0,
index: {
Expand Down Expand Up @@ -604,7 +580,6 @@ const Home: React.FC<HomeProps> = ({serverSideApiKeyIsSet}) => {
keyConfiguration={keyConfiguration}
modelError={modelError}
messageError={messageError}
models={models}
loading={loading}
onSend={handleSend}
onUpdateConversation={handleUpdateConversation}
Expand Down
22 changes: 0 additions & 22 deletions types/index.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,8 @@
export interface OpenAIModel {
id: string;
name: string;
}

export enum OpenAIModelID {
GPT_3_5 = 'gpt-3.5-turbo',
GPT_4 = 'gpt-4',
}

export enum ModelType {
OPENAI = 'OPENAI',
AZURE_OPENAI = 'AZURE_OPENAI',
}

export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
[OpenAIModelID.GPT_3_5]: {
id: OpenAIModelID.GPT_3_5,
name: 'Default (GPT-3.5)',
},
[OpenAIModelID.GPT_4]: {
id: OpenAIModelID.GPT_4,
name: 'GPT-4',
},
};

export interface Message {
role: Role;
content: string;
Expand All @@ -40,7 +19,6 @@ export interface Conversation {
id: number;
name: string;
messages: Message[];
model: OpenAIModel;
prompt: string;
folderId: number;
index: LlamaIndex;
Expand Down
25 changes: 2 additions & 23 deletions utils/app/clean.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,9 @@
import { Conversation, OpenAIModelID, OpenAIModels } from '@/types';
import { DEFAULT_SYSTEM_PROMPT } from './const';
import {Conversation} from '@/types';
import {DEFAULT_SYSTEM_PROMPT} from './const';

export const cleanSelectedConversation = (conversation: Conversation) => {
// added model for each conversation (3/20/23)
// added system prompt for each conversation (3/21/23)
// added folders (3/23/23)

let updatedConversation = conversation;

// check for model on each conversation
if (!updatedConversation.model) {
updatedConversation = {
...updatedConversation,
model: updatedConversation.model || OpenAIModels[OpenAIModelID.GPT_3_5],
};
}

// check for system prompt on each conversation
if (!updatedConversation.prompt) {
updatedConversation = {
...updatedConversation,
Expand All @@ -35,16 +22,8 @@ export const cleanSelectedConversation = (conversation: Conversation) => {
};

export const cleanConversationHistory = (history: Conversation[]) => {
// added model for each conversation (3/20/23)
// added system prompt for each conversation (3/21/23)
// added folders (3/23/23)

return history.reduce((acc: Conversation[], conversation) => {
try {
if (!conversation.model) {
conversation.model = OpenAIModels[OpenAIModelID.GPT_3_5];
}

if (!conversation.prompt) {
conversation.prompt = DEFAULT_SYSTEM_PROMPT;
}
Expand Down

0 comments on commit 2c2bbd2

Please sign in to comment.