Skip to content

Commit

Permalink
perf: remove unnecessary rerender when user typing input (janhq#1818)
Browse files Browse the repository at this point in the history
Co-authored-by: Faisal Amir <[email protected]>
  • Loading branch information
namchuai and urmauur authored Jan 29, 2024
1 parent edaf6bb commit bb47d68
Show file tree
Hide file tree
Showing 8 changed files with 74 additions and 66 deletions.
4 changes: 0 additions & 4 deletions web/containers/Providers/EventHandler.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import { extensionManager } from '@/extension'
import {
addNewMessageAtom,
updateMessageAtom,
generateResponseAtom,
} from '@/helpers/atoms/ChatMessage.atom'
import {
updateThreadWaitingForResponseAtom,
Expand All @@ -35,7 +34,6 @@ export default function EventHandler({ children }: { children: ReactNode }) {
const { downloadedModels } = useGetDownloadedModels()
const setActiveModel = useSetAtom(activeModelAtom)
const setStateModel = useSetAtom(stateModelAtom)
const setGenerateResponse = useSetAtom(generateResponseAtom)

const updateThreadWaiting = useSetAtom(updateThreadWaitingForResponseAtom)
const threads = useAtomValue(threadsAtom)
Expand All @@ -52,7 +50,6 @@ export default function EventHandler({ children }: { children: ReactNode }) {

const onNewMessageResponse = useCallback(
(message: ThreadMessage) => {
setGenerateResponse(false)
addNewMessage(message)
},
[addNewMessage]
Expand Down Expand Up @@ -96,7 +93,6 @@ export default function EventHandler({ children }: { children: ReactNode }) {

const onMessageResponseUpdate = useCallback(
(message: ThreadMessage) => {
setGenerateResponse(false)
updateMessage(
message.id,
message.thread_id,
Expand Down
2 changes: 0 additions & 2 deletions web/helpers/atoms/ChatMessage.atom.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ import {
/**
* Stores all chat messages for all threads
*/
export const generateResponseAtom = atom<boolean>(false)

export const chatMessages = atom<Record<string, ThreadMessage[]>>({})

/**
Expand Down
15 changes: 15 additions & 0 deletions web/hooks/useInference.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { useAtomValue } from 'jotai'

import { threadStatesAtom } from '@/helpers/atoms/Thread.atom'

export default function useInference() {
const threadStates = useAtomValue(threadStatesAtom)

const isGeneratingResponse = Object.values(threadStates).some(
(threadState) => threadState.waitingForResponse
)

return {
isGeneratingResponse,
}
}
38 changes: 16 additions & 22 deletions web/hooks/useSendChatMessage.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import { useEffect, useRef, useState } from 'react'
import { useEffect, useRef } from 'react'

import {
ChatCompletionMessage,
Expand All @@ -18,7 +18,7 @@ import {
ChatCompletionMessageContentType,
AssistantTool,
} from '@janhq/core'
import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'

import { ulid } from 'ulid'

Expand All @@ -35,7 +35,6 @@ import { useActiveModel } from './useActiveModel'
import { extensionManager } from '@/extension/ExtensionManager'
import {
addNewMessageAtom,
generateResponseAtom,
getCurrentChatMessagesAtom,
} from '@/helpers/atoms/ChatMessage.atom'
import {
Expand All @@ -48,29 +47,30 @@ import {
updateThreadWaitingForResponseAtom,
} from '@/helpers/atoms/Thread.atom'

export const queuedMessageAtom = atom(false)
export const reloadModelAtom = atom(false)

export default function useSendChatMessage() {
const activeThread = useAtomValue(activeThreadAtom)
const addNewMessage = useSetAtom(addNewMessageAtom)
const updateThread = useSetAtom(updateThreadAtom)
const updateThreadWaiting = useSetAtom(updateThreadWaitingForResponseAtom)
const [currentPrompt, setCurrentPrompt] = useAtom(currentPromptAtom)
const setGenerateResponse = useSetAtom(generateResponseAtom)
const setCurrentPrompt = useSetAtom(currentPromptAtom)

const currentMessages = useAtomValue(getCurrentChatMessagesAtom)
const { activeModel } = useActiveModel()
const selectedModel = useAtomValue(selectedModelAtom)
const { startModel } = useActiveModel()
const [queuedMessage, setQueuedMessage] = useState(false)
const setQueuedMessage = useSetAtom(queuedMessageAtom)

const modelRef = useRef<Model | undefined>()
const threadStates = useAtomValue(threadStatesAtom)
const updateThreadInitSuccess = useSetAtom(updateThreadInitSuccessAtom)
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)

const engineParamsUpdate = useAtomValue(engineParamsUpdateAtom)
const setEngineParamsUpdate = useSetAtom(engineParamsUpdateAtom)

const [reloadModel, setReloadModel] = useState(false)
const setEngineParamsUpdate = useSetAtom(engineParamsUpdateAtom)
const setReloadModel = useSetAtom(reloadModelAtom)
const [fileUpload, setFileUpload] = useAtom(fileUploadAtom)

useEffect(() => {
Expand All @@ -82,9 +82,7 @@ export default function useSendChatMessage() {
console.error('No active thread')
return
}

updateThreadWaiting(activeThread.id, true)

const messages: ChatCompletionMessage[] = [
activeThread.assistants[0]?.instructions,
]
Expand Down Expand Up @@ -121,19 +119,19 @@ export default function useSendChatMessage() {
if (activeModel?.id !== modelId) {
setQueuedMessage(true)
startModel(modelId)
await WaitForModelStarting(modelId)
await waitForModelStarting(modelId)
setQueuedMessage(false)
}
events.emit(MessageEvent.OnMessageSent, messageRequest)
}

// TODO: Refactor @louis
const WaitForModelStarting = async (modelId: string) => {
const waitForModelStarting = async (modelId: string) => {
return new Promise<void>((resolve) => {
setTimeout(async () => {
if (modelRef.current?.id !== modelId) {
console.debug('waiting for model to start')
await WaitForModelStarting(modelId)
await waitForModelStarting(modelId)
resolve()
} else {
resolve()
Expand All @@ -142,10 +140,8 @@ export default function useSendChatMessage() {
})
}

const sendChatMessage = async () => {
setGenerateResponse(true)

if (!currentPrompt || currentPrompt.trim().length === 0) return
const sendChatMessage = async (message: string) => {
if (!message || message.trim().length === 0) return

if (!activeThread) {
console.error('No active thread')
Expand Down Expand Up @@ -199,7 +195,7 @@ export default function useSendChatMessage() {

updateThreadWaiting(activeThread.id, true)

const prompt = currentPrompt.trim()
const prompt = message.trim()
setCurrentPrompt('')

const base64Blob = fileUpload[0]
Expand Down Expand Up @@ -335,7 +331,7 @@ export default function useSendChatMessage() {
if (activeModel?.id !== modelId) {
setQueuedMessage(true)
startModel(modelId)
await WaitForModelStarting(modelId)
await waitForModelStarting(modelId)
setQueuedMessage(false)
}

Expand All @@ -346,9 +342,7 @@ export default function useSendChatMessage() {
}

return {
reloadModel,
sendChatMessage,
resendChatMessage,
queuedMessage,
}
}
10 changes: 4 additions & 6 deletions web/screens/Chat/ChatBody/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,21 @@ import { MainViewState } from '@/constants/screens'
import { activeModelAtom } from '@/hooks/useActiveModel'
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'

import useInference from '@/hooks/useInference'
import { useMainViewState } from '@/hooks/useMainViewState'

import ChatItem from '../ChatItem'

import ErrorMessage from '../ErrorMessage'

import {
generateResponseAtom,
getCurrentChatMessagesAtom,
} from '@/helpers/atoms/ChatMessage.atom'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'

const ChatBody: React.FC = () => {
const messages = useAtomValue(getCurrentChatMessagesAtom)
const activeModel = useAtomValue(activeModelAtom)
const { downloadedModels } = useGetDownloadedModels()
const { setMainViewState } = useMainViewState()
const generateResponse = useAtomValue(generateResponseAtom)
const { isGeneratingResponse } = useInference()

if (downloadedModels.length === 0)
return (
Expand Down Expand Up @@ -101,7 +99,7 @@ const ChatBody: React.FC = () => {
))}

{activeModel &&
(generateResponse ||
(isGeneratingResponse ||
(messages.length &&
messages[messages.length - 1].status ===
MessageStatus.Pending &&
Expand Down
25 changes: 14 additions & 11 deletions web/screens/Chat/ChatInput/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,18 @@ const ChatInput: React.FC = () => {
useEffect(() => {
if (isWaitingToSend && activeThreadId) {
setIsWaitingToSend(false)
sendChatMessage()
sendChatMessage(currentPrompt)
}
if (textareaRef.current) {
textareaRef.current.focus()
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [waitingToSendMessage, activeThreadId])
}, [
activeThreadId,
isWaitingToSend,
currentPrompt,
setIsWaitingToSend,
sendChatMessage,
])

useEffect(() => {
if (textareaRef.current) {
Expand All @@ -81,13 +86,11 @@ const ChatInput: React.FC = () => {
}, [currentPrompt])

const onKeyDown = async (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === 'Enter') {
if (!e.shiftKey) {
e.preventDefault()
if (messages[messages.length - 1]?.status !== MessageStatus.Pending)
sendChatMessage()
else onStopInferenceClick()
}
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault()
if (messages[messages.length - 1]?.status !== MessageStatus.Pending)
sendChatMessage(currentPrompt)
else onStopInferenceClick()
}
}

Expand Down Expand Up @@ -237,7 +240,7 @@ const ChatInput: React.FC = () => {
}
themes="primary"
className="min-w-[100px]"
onClick={sendChatMessage}
onClick={() => sendChatMessage(currentPrompt)}
>
Send
</Button>
Expand Down
6 changes: 4 additions & 2 deletions web/screens/Chat/MessageQueuedBanner/index.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import useSendChatMessage from '@/hooks/useSendChatMessage'
import { useAtomValue } from 'jotai'

import { queuedMessageAtom } from '@/hooks/useSendChatMessage'

const MessageQueuedBanner: React.FC = () => {
const { queuedMessage } = useSendChatMessage()
const queuedMessage = useAtomValue(queuedMessageAtom)

return (
<div>
Expand Down
40 changes: 21 additions & 19 deletions web/screens/Chat/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import ModelStart from '@/containers/Loader/ModelStart'
import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai'
import { showLeftSideBarAtom } from '@/containers/Providers/KeyListener'

import useSendChatMessage from '@/hooks/useSendChatMessage'
import { queuedMessageAtom, reloadModelAtom } from '@/hooks/useSendChatMessage'

import ChatBody from '@/screens/Chat/ChatBody'

Expand All @@ -30,20 +30,37 @@ import {
engineParamsUpdateAtom,
} from '@/helpers/atoms/Thread.atom'

const renderError = (code: string) => {
switch (code) {
case 'multiple-upload':
return 'Currently, we only support 1 attachment at the same time'

case 'retrieval-off':
return 'Turn on Retrieval in Assistant Settings to use this feature'

case 'file-invalid-type':
return 'We do not support this file type'

default:
return 'Oops, something error, please try again.'
}
}

const ChatScreen: React.FC = () => {
const setCurrentPrompt = useSetAtom(currentPromptAtom)
const activeThread = useAtomValue(activeThreadAtom)
const showLeftSideBar = useAtomValue(showLeftSideBarAtom)
const engineParamsUpdate = useAtomValue(engineParamsUpdateAtom)
const { queuedMessage, reloadModel } = useSendChatMessage()
const [dragOver, setDragOver] = useState(false)

const queuedMessage = useAtomValue(queuedMessageAtom)
const reloadModel = useAtomValue(reloadModelAtom)
const [dragRejected, setDragRejected] = useState({ code: '' })
const setFileUpload = useSetAtom(fileUploadAtom)
const { getRootProps, isDragReject } = useDropzone({
noClick: true,
multiple: false,
accept: {
// 'image/*': ['.png', '.jpg', '.jpeg'],
'application/pdf': ['.pdf'],
},

Expand Down Expand Up @@ -104,22 +121,6 @@ const ChatScreen: React.FC = () => {
}, 2000)
}, [dragRejected.code])

const renderError = (code: string) => {
switch (code) {
case 'multiple-upload':
return 'Currently, we only support 1 attachment at the same time'

case 'retrieval-off':
return 'Turn on Retrieval in Assistant Settings to use this feature'

case 'file-invalid-type':
return 'We do not support this file type'

default:
return 'Oops, something error, please try again.'
}
}

return (
<div className="flex h-full w-full">
{/* Left side bar */}
Expand Down Expand Up @@ -216,6 +217,7 @@ const ChatScreen: React.FC = () => {
<ChatInput />
</div>
</div>

{/* Right side bar */}
{activeThread && <Sidebar />}
</div>
Expand Down

0 comments on commit bb47d68

Please sign in to comment.