Skip to content

Commit

Permalink
test(test_completion.py-+-test_streaming.py): add ollama endpoint to …
Browse files Browse the repository at this point in the history
…ci/cd pipeline
  • Loading branch information
krrishdholakia committed Dec 22, 2023
1 parent 57607f1 commit eb2d13e
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 3 deletions.
5 changes: 3 additions & 2 deletions litellm/llms/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,13 +137,14 @@ def get_ollama_response(
additional_args={"api_base": url, "complete_input_dict": data, "headers": {}, "acompletion": acompletion,},
)
if acompletion is True:
if optional_params.get("stream", False):
if optional_params.get("stream", False) == True:
response = ollama_async_streaming(url=url, data=data, model_response=model_response, encoding=encoding, logging_obj=logging_obj)
else:
response = ollama_acompletion(url=url, data=data, model_response=model_response, encoding=encoding, logging_obj=logging_obj)
return response
elif optional_params.get("stream", False):
elif optional_params.get("stream", False) == True:
return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj)

response = requests.post(
url=f"{url}",
json=data,
Expand Down
18 changes: 17 additions & 1 deletion litellm/tests/test_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -571,6 +571,22 @@ def test_completion_openai_litellm_key():

# test_completion_openai_litellm_key()

def test_completion_ollama_hosted():
try:
litellm.set_verbose = True
response = completion(
model="ollama/phi",
messages=messages,
max_tokens=10,
api_base="https://test-ollama-endpoint.onrender.com"
)
# Add any assertions here to check the response
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")

# test_completion_ollama_hosted()

def test_completion_openrouter1():
try:
response = completion(
Expand Down Expand Up @@ -626,7 +642,7 @@ def test_completion_anyscale_with_functions():
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
test_completion_anyscale_with_functions()
# test_completion_anyscale_with_functions()

def test_completion_azure_key_completion_arg():
# this tests if we can pass api_key to completion, when it's not in the env
Expand Down
27 changes: 27 additions & 0 deletions litellm/tests/test_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,33 @@ def test_completion_azure_function_calling_stream():

# test_completion_azure_function_calling_stream()

def test_completion_ollama_hosted_stream():
try:
litellm.set_verbose = True
response = completion(
model="ollama/phi",
messages=messages,
max_tokens=10,
api_base="https://test-ollama-endpoint.onrender.com",
stream=True
)
# Add any assertions here to check the response
complete_response = ""
# Add any assertions here to check the response
for idx, init_chunk in enumerate(response):
chunk, finished = streaming_format_tests(idx, init_chunk)
complete_response += chunk
if finished:
assert isinstance(init_chunk.choices[0], litellm.utils.StreamingChoices)
break
if complete_response.strip() == "":
raise Exception("Empty response received")
print(f"complete_response: {complete_response}")
except Exception as e:
pytest.fail(f"Error occurred: {e}")

# test_completion_ollama_hosted_stream()

def test_completion_claude_stream():
try:
messages = [
Expand Down

0 comments on commit eb2d13e

Please sign in to comment.