Skip to content

Commit

Permalink
onre more time, with passion
Browse files Browse the repository at this point in the history
  • Loading branch information
borisbat committed Apr 2, 2023
1 parent 36603d2 commit c7ddd3a
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 13 deletions.
4 changes: 2 additions & 2 deletions modules/dasTelegram/examples/godfather.das
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ def generate_chat_summary ( chat_id:int64; last_n_messages:int; extp:string)
if !empty(text)
let summary_prompt = "Previous section summary:\n{prev_summary}\n---\nText:\n{text}\n---\nSummary of the text above {extp}:"
to_log(LOG_INFO, "Generating summary for {length(summary_prompt)} bytes\n{summary_prompt}\n\n\n")
let summary = generate_completion(summary_prompt)
let summary = generate_summary(summary_prompt)
to_log(LOG_INFO, "Summary: {summary}\n\n")
total_summary = "{total_summary}\n{summary}"
prev_summary = summary
Expand Down Expand Up @@ -439,7 +439,7 @@ def main
write_message_to_log(*r.message)
if is_someone_asking_bot_to_say_something(*r.message)
if BOT_DOES_NOT_ANSWER
send_system_message(r.message.chat.id, "[THE GODFATHER IS SLEEPING]")
send_system_message(*r.message, "[THE GODFATHER IS SLEEPING]")
else
to_log(LOG_INFO, "someone is asking me to say something\n")
let reply = generate_bot_reply(r.message.chat.id)
Expand Down
25 changes: 18 additions & 7 deletions modules/dasTelegram/examples/godfather_chat.das
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ JSON: {"result":"nothing","message":"you answer followed by emoji of your mood"}
%%
let BOT_CHAT_PROMPT_SUFFIX = "Answer in character as the Godfather in JSON:"

let BOT_SUMMARY_PROMPT = "Give detailed combined summary. Focus on facts, names, dates. Answer with summary only."

def public sanitize_content ( text : string )
return text |> fix_broken_utf32 |> strip

Expand All @@ -58,7 +60,7 @@ struct public GodfatherMessage
draw : string
code : string

def generate_chat_completion_result ( messages : array<ChatCompletionMessage> ) : GodfatherMessage
def generate_chat_completion_result ( messages : array<ChatCompletionMessage>; temperature:float = 1.0; top_p : float = 1.0 ) : GodfatherMessage
//! generate completion result given chat completion messages
to_log(LOG_INFO,"\n\n\ngenerating chat completion:\n")
for msg in messages
Expand All @@ -68,8 +70,8 @@ def generate_chat_completion_result ( messages : array<ChatCompletionMessage> )
chat <- openai_create_chat_completion([[ChatCompletion()
model = "gpt-3.5-turbo",
max_tokens = 512,
temperature = 1.0,
top_p = 1.0,
temperature = temperature,
top_p = top_p,
messages := messages
]])
if chat |> is_valid
Expand Down Expand Up @@ -263,7 +265,7 @@ def public generate_completion_reply ( chat_id : int64 )
//! given chat_id generate bot reply
var messags <- generate_completion_request(chat_id)
let summary <- convert_completion(messags)
let completion = generate_completion(summary)
let completion = generate_completion(summary,1.0)
var result = [[GodfatherMessage result="ok", message="{completion}"]]
if result.result == "error"
return result
Expand Down Expand Up @@ -385,15 +387,24 @@ def public generate_previous_conversation_summary ( chat_id:int64; extra_prompt:
to_log(LOG_INFO, "Overall summary:\n{prev_summary}\n\n\n")
return prev_summary

def public generate_completion ( summary_prompt:string )
def public generate_summary ( text : string )
var inscope messages <- [{ChatCompletionMessage
role="system", content=BOT_SUMMARY_PROMPT;
role="user", content=text}]
var res = generate_chat_completion_result(messages,0.,0.)
if res.result=="error"
return ""
return res.message

def public generate_completion ( summary_prompt:string; temperature:float = 0. )
to_log(LOG_INFO, "generate_completion for prompt:\n{summary_prompt}\n\n")
var completion : CreateCompletionResponse
for i in range(BOT_RETRY_ATTEMPTS)
completion <- openai_create_completion([[Completion()
model = "text-davinci-003",
model = "text-davinci-003", // "text-curie-001", // "text-davinci-003",
prompt = summary_prompt |> fix_broken_escaping,
max_tokens = 1024,
temperature = 0.
temperature = temperature
]])
if completion |> is_valid
break
Expand Down
6 changes: 2 additions & 4 deletions modules/dasTelegram/examples/test_chat_log.das
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ require emoji

require godfather_chat

let chat_id = -1001849665593l
// let chat_id = -1001849665593l // in bots we trust
let chat_id = -1001666598976l; // remote work

[export]
def main
Expand All @@ -27,6 +28,3 @@ def main

let summary_so_far = generate_previous_conversation_summary(chat_id,"in english")
print("{summary_so_far}\n")



0 comments on commit c7ddd3a

Please sign in to comment.