forked from Emi1699/Jarvis
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathopenai_api.py
77 lines (48 loc) · 2.36 KB
/
openai_api.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import openai
import config
import modes
openai.api_key = config.OPENAI_API_KEY
text = True
'''
We need to clarify some things.
In order for our bot to make sense, it needs to have an idea of the history of our conversations.
For that reason, we need to store each reply (both from the user and from the chatbot; for now we will store it
in an array, might look into using a database in the future).
There are 3 possible roles that an entity can have in a conversation:
1. 'system' - this sets the tone of the chatbot and the way it will 'behave' (see below example)
2. 'user' - self explanatory
3. 'assistant' - this is the chatbot itself
'''
# this is where we will store the whole conversation between the chatbot and the user
messages = [
{"role": "system", "content": modes.JARVIS}
]
# method that transcribes spoken language into text
def audio_text(audio):
global messages
audio_file = open(audio, 'rb')
transcript = openai.Audio.transcribe('whisper-1', audio_file)
messages.append({"role": "user", "content": transcript['text']})
response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages)
system_message = response['choices'][0]['message']['content']
# subprocess.call(['say', system_message])
messages.append({"role": "assistant", "content": system_message})
chat_transcript = ""
for message in messages:
if message['role'] != 'system':
chat_transcript += message['role'] + ": " + message['content'] + "\n\n"
return chat_transcript + "\n\n" + response['usage']['total_tokens']
def text_text(user_text):
global messages
messages.append({"role": "user", "content": user_text})
response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages)
system_message = response['choices'][0]['message']['content']
# subprocess.call(['say', system_message])
messages.append({"role": "assistant", "content": system_message})
# in case we want to display the full history
# chat_transcript = ""
# for message in messages:
# if message['role'] != 'system':
# chat_transcript += message['role'] + ": " + message['content'] + "\n\n"
# print(response['usage']['total_tokens'])
return messages[-1]['content'] # return last message in the list, which should be the bot's response