Skip to content

Commit

Permalink
Merge pull request vanna-ai#484 from ThomasBarnette/allow-user-to-spe…
Browse files Browse the repository at this point in the history
…cicy-max-prompt-length

Allow user to specify max prompt length
  • Loading branch information
zainhoda authored Jun 7, 2024
2 parents 2bcc65f + 79d4ffc commit 14c09c3
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions src/vanna/base/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def __init__(self, config=None):
self.static_documentation = ""
self.dialect = self.config.get("dialect", "SQL")
self.language = self.config.get("language", None)
self.max_tokens = self.config.get("max_tokens", 14000)

def log(self, message: str, title: str = "Info"):
print(message)
Expand Down Expand Up @@ -559,14 +560,14 @@ def get_sql_prompt(
"Please help to generate a SQL query to answer the question. Your response should ONLY be based on the given context and follow the response guidelines and format instructions. "

initial_prompt = self.add_ddl_to_prompt(
initial_prompt, ddl_list, max_tokens=14000
initial_prompt, ddl_list, max_tokens=self.max_tokens
)

if self.static_documentation != "":
doc_list.append(self.static_documentation)

initial_prompt = self.add_documentation_to_prompt(
initial_prompt, doc_list, max_tokens=14000
initial_prompt, doc_list, max_tokens=self.max_tokens
)

initial_prompt += (
Expand Down Expand Up @@ -603,15 +604,15 @@ def get_followup_questions_prompt(
initial_prompt = f"The user initially asked the question: '{question}': \n\n"

initial_prompt = self.add_ddl_to_prompt(
initial_prompt, ddl_list, max_tokens=14000
initial_prompt, ddl_list, max_tokens=self.max_tokens
)

initial_prompt = self.add_documentation_to_prompt(
initial_prompt, doc_list, max_tokens=14000
initial_prompt, doc_list, max_tokens=self.max_tokens
)

initial_prompt = self.add_sql_to_prompt(
initial_prompt, question_sql_list, max_tokens=14000
initial_prompt, question_sql_list, max_tokens=self.max_tokens
)

message_log = [self.system_message(initial_prompt)]
Expand Down

0 comments on commit 14c09c3

Please sign in to comment.