Skip to content

Commit

Permalink
Fix system prompt.
Browse files Browse the repository at this point in the history
  • Loading branch information
manyoso committed Nov 21, 2023
1 parent 34555c4 commit 9e27a11
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions gpt4all-chat/chatllm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -917,11 +917,11 @@ void ChatLLM::restoreState()
#if defined(DEBUG)
qDebug() << "restoreState" << m_llmThread.objectName() << "size:" << m_state.size();
#endif
m_processedSystemPrompt = true;

if (m_state.isEmpty())
return;

m_processedSystemPrompt = true;
m_llModelInfo.model->restoreState(static_cast<const uint8_t*>(reinterpret_cast<void*>(m_state.data())));
m_state.clear();
m_state.resize(0);
Expand All @@ -930,7 +930,7 @@ void ChatLLM::restoreState()
void ChatLLM::processSystemPrompt()
{
Q_ASSERT(isModelLoaded());
if (!isModelLoaded() || m_processedSystemPrompt || m_isServer)
if (!isModelLoaded() || m_processedSystemPrompt || m_restoreStateFromText || m_isServer)
return;

const std::string systemPrompt = MySettings::globalInstance()->modelSystemPrompt(m_modelInfo).toStdString();
Expand Down Expand Up @@ -974,7 +974,7 @@ void ChatLLM::processSystemPrompt()
fflush(stdout);
#endif

m_processedSystemPrompt = !m_stopGenerating;
m_processedSystemPrompt = m_stopGenerating == false;
}

void ChatLLM::processRestoreStateFromText()
Expand Down

0 comments on commit 9e27a11

Please sign in to comment.