-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcrew_ai_models.py
46 lines (41 loc) · 1.48 KB
/
crew_ai_models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import os
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
from langchain_community.llms.ollama import Ollama
from langchain_openai import ChatOpenAI
class CrewAiModels:
# ollama_instruct = Ollama(
# model=os.getenv("OLLAMA_MODEL_NAME"),
# base_url=os.getenv("OLLAMA_API_BASE")
# )
# ollama_python = Ollama(
# model=os.getenv("OLLAMA_MODEL_NAME_2"),
# base_url=os.getenv("OLLAMA_API_BASE_2")
# )
chatgpt = ChatOpenAI(
api_key=os.getenv("OPENAI_API_KEY"),
model_name=os.getenv("OPENAI_MODEL_NAME"),
)
# hugging_face = HuggingFaceEndpoint(
# endpoint_url=os.getenv("HUGGINGFACE_API_BASE"),
# huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_KEY"),
# task="text-generation",
# max_new_tokens=2000, # Moved from model_kwargs
# repetition_penalty=1.1, # Moved from model_kwargs
# model_kwargs={
# "max_length": 4000, # This can stay in model_kwargs
# }
# )
@classmethod
def get_llm(cls, llm_name):
llm_mapping = {
# "ollama_instruct": cls.ollama_instruct,
# "ollama_python": cls.ollama_python,
"chatgpt": cls.chatgpt,
# "hugging_face": cls.hugging_face
}
llm = llm_mapping.get(llm_name)
if llm is None:
raise ValueError(f"Invalid llm_name: {llm_name}")
return llm
default_llm = chatgpt
developer_llm = default_llm