Skip to content

Commit

Permalink
cr
Browse files Browse the repository at this point in the history
  • Loading branch information
hwchase17 committed Jan 17, 2023
1 parent 638f0ee commit 6db700e
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 47 deletions.
4 changes: 1 addition & 3 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,10 @@ def get_weaviate_store():
return Weaviate(client, "Paragraph", "content", attributes=["source"])


vectorstore = get_weaviate_store()


def set_openai_api_key(api_key, agent):
if api_key:
os.environ["OPENAI_API_KEY"] = api_key
vectorstore = get_weaviate_store()
qa_chain = get_new_chain1(vectorstore)
os.environ["OPENAI_API_KEY"] = ""
return qa_chain
Expand Down
89 changes: 45 additions & 44 deletions chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,53 +17,12 @@
from langchain.vectorstores import FAISS, Weaviate
from pydantic import BaseModel

WEAVIATE_URL = os.environ["WEAVIATE_URL"]
client = weaviate.Client(
url=WEAVIATE_URL,
additional_headers={"X-OpenAI-Api-Key": os.environ["OPENAI_API_KEY"]},
)

_eg_template = """## Example:
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question: {answer}"""
_eg_prompt = PromptTemplate(
template=_eg_template,
input_variables=["chat_history", "question", "answer"],
)


_prefix = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question. You should assume that the question is related to LangChain."""
_suffix = """## Example:
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
eg_store = Weaviate(
client,
"Rephrase",
"content",
attributes=["question", "answer", "chat_history"],
)
example_selector = SemanticSimilarityExampleSelector(vectorstore=eg_store, k=4)
prompt = FewShotPromptTemplate(
prefix=_prefix,
suffix=_suffix,
example_selector=example_selector,
example_prompt=_eg_prompt,
input_variables=["question", "chat_history"],
)
llm = OpenAI(temperature=0, model_name="text-davinci-003")
key_word_extractor = LLMChain(llm=llm, prompt=prompt)


class CustomChain(Chain, BaseModel):

vstore: Weaviate
chain: BaseCombineDocumentsChain
key_word_extractor: Chain

@property
def input_keys(self) -> List[str]:
Expand All @@ -77,7 +36,7 @@ def _call(self, inputs: Dict[str, str]) -> Dict[str, str]:
question = inputs["question"]
chat_history_str = _get_chat_history(inputs["chat_history"])
if chat_history_str:
new_question = key_word_extractor.run(
new_question = self.key_word_extractor.run(
question=question, chat_history=chat_history_str
)
else:
Expand All @@ -92,6 +51,46 @@ def _call(self, inputs: Dict[str, str]) -> Dict[str, str]:


def get_new_chain1(vectorstore) -> Chain:
WEAVIATE_URL = os.environ["WEAVIATE_URL"]
client = weaviate.Client(
url=WEAVIATE_URL,
additional_headers={"X-OpenAI-Api-Key": os.environ["OPENAI_API_KEY"]},
)

_eg_template = """## Example:
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question: {answer}"""
_eg_prompt = PromptTemplate(
template=_eg_template,
input_variables=["chat_history", "question", "answer"],
)

_prefix = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question. You should assume that the question is related to LangChain."""
_suffix = """## Example:
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
eg_store = Weaviate(
client,
"Rephrase",
"content",
attributes=["question", "answer", "chat_history"],
)
example_selector = SemanticSimilarityExampleSelector(vectorstore=eg_store, k=4)
prompt = FewShotPromptTemplate(
prefix=_prefix,
suffix=_suffix,
example_selector=example_selector,
example_prompt=_eg_prompt,
input_variables=["question", "chat_history"],
)
llm = OpenAI(temperature=0, model_name="text-davinci-003")
key_word_extractor = LLMChain(llm=llm, prompt=prompt)

EXAMPLE_PROMPT = PromptTemplate(
template=">Example:\nContent:\n---------\n{page_content}\n----------\nSource: {source}",
Expand All @@ -115,7 +114,9 @@ def get_new_chain1(vectorstore) -> Chain:
prompt=PROMPT,
document_prompt=EXAMPLE_PROMPT,
)
return CustomChain(chain=doc_chain, vstore=vectorstore)
return CustomChain(
chain=doc_chain, vstore=vectorstore, key_word_extractor=key_word_extractor
)


def _get_chat_history(chat_history: List[Tuple[str, str]]):
Expand Down

0 comments on commit 6db700e

Please sign in to comment.