Skip to content

Commit

Permalink
Add Python Builder
Browse files Browse the repository at this point in the history
  • Loading branch information
katiayn committed May 2, 2023
1 parent 3b4daf3 commit a5b5c31
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
7 changes: 5 additions & 2 deletions fly.toml
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
# fly.toml app configuration file generated for hello-fly-langchain on 2023-04-26T11:34:12+02:00
# fly.toml app configuration file generated for hello-fly-langchain on 2023-05-02T15:33:58+02:00
#
# See https://fly.io/docs/reference/configuration/ for information about how to use this file.
#

app = "hello-fly-langchain"
primary_region = "ams"
kill_signal = "SIGINT"
kill_timeout = 5
kill_timeout = "5s"

[experimental]
auto_rollback = true

[build]
builder = "paketobuildpacks/builder:base"

[env]
PORT = "8080"
PRIMARY_REGION = "ams"
Expand Down
4 changes: 2 additions & 2 deletions hello.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ def hello(place="Berlin"):
llm = OpenAI(temperature=0.9)
# https://python.langchain.com/en/latest/modules/prompts/prompt_templates.html
prompt = PromptTemplate(
input_variables=['place'], # list of variables
input_variables=["place"], # list of variables
template="What are the 3 best places to eat in {place}?", # prompt
)
question = prompt.format(place=place)
# split() is used to split the items into a list. The llm response will look like:
# "\n\n1. <first item>.\n\n2. <second item>..."
return render_template('hello.html', place=place, answer=llm(question).split("\n\n"))
return render_template("hello.html", place=place, answer=llm(question).split("\n\n"))

0 comments on commit a5b5c31

Please sign in to comment.