Skip to content

Commit

Permalink
Working, but hangs on large files
Browse files Browse the repository at this point in the history
  • Loading branch information
chris-sanders committed Jul 22, 2024
1 parent c64778a commit 68396b7
Showing 1 changed file with 51 additions and 41 deletions.
92 changes: 51 additions & 41 deletions gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import os
import argparse
import glob
import mimetypes

# Get API key from environment variable
api_key = os.environ.get("GEMINI_API_KEY")
Expand All @@ -14,51 +15,56 @@
# Initialize Google API Client
genai.configure(api_key=api_key)

# List of supported MIME types
SUPPORTED_MIME_TYPES = [
'text/plain', 'text/html', 'text/css', 'text/javascript',
'application/x-javascript', 'text/x-typescript', 'application/x-typescript',
'text/csv', 'text/markdown', 'text/x-python', 'application/x-python-code',
'application/json', 'text/xml', 'application/rtf', 'text/rtf'
]

def get_mime_type(filepath):
mime_type, _ = mimetypes.guess_type(filepath)
if mime_type in SUPPORTED_MIME_TYPES:
return 'text/plain' # Treat YAML as plain text
else:
return 'text/plain' # Default to plain text for all other files

def upload_files(directory):
uploaded_files = []
skipped_files = []
for filepath in glob.glob(f"{directory}/**/*", recursive=True):
if os.path.isfile(filepath):
relative_path = os.path.relpath(filepath, directory)
display_name = relative_path
file_response = genai.upload_file(path=filepath, display_name=display_name)
uploaded_files.append(file_response)
print(f"Uploaded file {file_response.display_name} as: {file_response.uri}")
return uploaded_files

#def chat_with_model(model, uploaded_files):
# chat = model.start_chat(history=[])
#
# print("Chat started. Type 'exit' or press Ctrl+D to end the conversation.")
# while True:
# try:
# user_input = input("You: ")
# if user_input.lower() == 'exit':
# break
#
# response = chat.send_message([user_input] + uploaded_files)
# print("AI:", response.text)
# except EOFError:
# # This catches the Ctrl+D input
# print("\nExiting chat...")
# break

def cleanup_files(uploaded_files):
for file in uploaded_files:
genai.delete_file(name=file.name)
print(f'Deleted file {file.display_name}')
mime_type = get_mime_type(filepath)
try:
file_response = genai.upload_file(path=filepath, display_name=display_name, mime_type=mime_type)
uploaded_files.append((file_response, mime_type))
print(f"Uploaded file {file_response.display_name} as: {file_response.uri} (MIME: {mime_type})")
except Exception as e:
print(f"Failed to upload {filepath}: {str(e)}")
skipped_files.append((filepath, mime_type))
return uploaded_files, skipped_files

def chat_with_model(model, uploaded_files):
def chat_with_model(model, system_prompt, file_objects):
chat = model.start_chat(history=[])

print("Chat started. Type 'exit' or press Ctrl+D to end the conversation.")

while True:
try:
user_input = input("You: ")
if user_input.lower() == 'exit':
break

# Only send text input to the model, not the uploaded files
response = chat.send_message(user_input)
# Combine system prompt, user input, and files for each message
full_prompt = [
system_prompt,
f"User question: {user_input}",
"Use only information from the attached files and user input to answer the question."
]
response = chat.send_message(full_prompt + file_objects)
print("AI:", response.text)
except EOFError:
print("\nExiting chat...")
Expand All @@ -67,28 +73,32 @@ def chat_with_model(model, uploaded_files):
print(f"An error occurred: {str(e)}")
print("Continuing chat...")

def cleanup_files(uploaded_files):
for file, _ in uploaded_files:
try:
genai.delete_file(name=file.name)
print(f'Deleted file {file.display_name}')
except Exception as e:
print(f"Failed to delete {file.display_name}: {str(e)}")

def main():
parser = argparse.ArgumentParser(description="Upload files and chat with Gemini model.")
parser.add_argument("directory", help="Directory containing files to upload")
args = parser.parse_args()

uploaded_files = upload_files(args.directory)
print(f"Args initialized: {args}")
uploaded_files, skipped_files = upload_files(args.directory)

# Extract just the file objects from the list of tuples
file_objects = [file for file, _ in uploaded_files]

model_name = "models/gemini-1.5-pro-latest"
model = genai.GenerativeModel(model_name=model_name)

# Initialize the chat with the uploaded files
try:
chat = model.start_chat(history=[])
context = "I have uploaded some files. Please use them as context for our conversation."
response = chat.send_message([context] + uploaded_files)
print("AI:", response.text)
except Exception as e:
print(f"Error initializing chat with files: {str(e)}")
print("Continuing without file context...")

chat_with_model(model, []) # Pass an empty list instead of uploaded_files
# Create a system prompt
system_prompt = ("You are a Kubernetes expert helping people identify root causes for issues in logs. Always provide reasoning for your answers and ask clarifying questions if needed.")

chat_with_model(model, system_prompt, file_objects)
cleanup_files(uploaded_files)

if __name__ == "__main__":
Expand Down

0 comments on commit 68396b7

Please sign in to comment.