Skip to content

Commit

Permalink
use claude 3 sonnet, gpt 3.5 0125 (langchain-ai#286)
Browse files Browse the repository at this point in the history
* use claude 3 sonnet

* fmt

* fmt
  • Loading branch information
baskaryan authored Mar 12, 2024
1 parent 715181a commit 82e6b0f
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 12 deletions.
12 changes: 6 additions & 6 deletions MODIFY.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ The LLM is used inside the `/chat` endpoint for generating the final answer, and
Without any modification, we offer a few LLM providers out of the box:

- `gpt-3.5-turbo` by OpenAI
- `claude-2.1` by Anthropic
- `gpt-3.5-turbo-0125` by OpenAI
- `claude-3-sonnet-20240229` by Anthropic
- `mixtral-8x7b` by Fireworks
- `gemini-pro` by Google
- `command` by Cohere
Expand All @@ -89,16 +89,16 @@ First, I'll demonstrate how to replace all options with a single provider, as it

```python
llm = ChatOpenAI(
model="gpt-3.5-turbo-1106",
model="gpt-3.5-turbo-0125",
streaming=True,
temperature=0,
).configurable_alternatives(
# This gives this field an id
# When configuring the end runnable, we can then use this id to configure this field
ConfigurableField(id="llm"),
default_key="openai_gpt_3_5_turbo",
anthropic_claude_2_1=ChatAnthropic(
model="claude-2.1",
anthropic_claude_3_sonnet=ChatAnthropic(
model="claude-3-sonnet-20240229",
max_tokens=16384,
temperature=0,
anthropic_api_key=os.environ.get("ANTHROPIC_API_KEY", "not_provided"),
Expand Down Expand Up @@ -138,7 +138,7 @@ response_synthesizer = (
default_response_synthesizer.configurable_alternatives(
ConfigurableField("llm"),
default_key="openai_gpt_3_5_turbo",
anthropic_claude_2_1=default_response_synthesizer,
anthropic_claude_3_sonnet=default_response_synthesizer,
...
local_ollama=default_response_synthesizer,
)
Expand Down
8 changes: 4 additions & 4 deletions backend/chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def cohere_response_synthesizer(input: dict) -> RunnableSequence:
default_response_synthesizer.configurable_alternatives(
ConfigurableField("llm"),
default_key="openai_gpt_3_5_turbo",
anthropic_claude_2_1=default_response_synthesizer,
anthropic_claude_3_sonnet=default_response_synthesizer,
fireworks_mixtral=default_response_synthesizer,
google_gemini_pro=default_response_synthesizer,
cohere_command=cohere_response_synthesizer,
Expand All @@ -237,16 +237,16 @@ def cohere_response_synthesizer(input: dict) -> RunnableSequence:


llm = ChatOpenAI(
model="gpt-3.5-turbo-1106",
model="gpt-3.5-turbo-0125",
temperature=0,
streaming=True,
).configurable_alternatives(
# This gives this field an id
# When configuring the end runnable, we can then use this id to configure this field
ConfigurableField(id="llm"),
default_key="openai_gpt_3_5_turbo",
anthropic_claude_2_1=ChatAnthropic(
model="claude-2.1",
anthropic_claude_3_sonnet=ChatAnthropic(
model="claude-3-sonnet-20240229",
temperature=0,
max_tokens=16384,
anthropic_api_key=os.environ.get("ANTHROPIC_API_KEY", "not_provided"),
Expand Down
4 changes: 2 additions & 2 deletions frontend/app/components/ChatWindow.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import { apiBaseUrl } from "../utils/constants";

const MODEL_TYPES = [
"openai_gpt_3_5_turbo",
"anthropic_claude_2_1",
"anthropic_claude_3_sonnet",
"google_gemini_pro",
"fireworks_mixtral",
"cohere_command",
Expand Down Expand Up @@ -255,7 +255,7 @@ export function ChatWindow(props: { conversationId: string }) {
width={"240px"}
>
<option value="openai_gpt_3_5_turbo">GPT-3.5-Turbo</option>
<option value="anthropic_claude_2_1">Claude-2.1</option>
<option value="anthropic_claude_3_sonnet">Claude-2.1</option>
<option value="google_gemini_pro">Google Gemini Pro</option>
<option value="fireworks_mixtral">
Mixtral (via Fireworks.ai)
Expand Down

0 comments on commit 82e6b0f

Please sign in to comment.