forked from MuhammadIbneRafiq/BACKEND_autolanding_ai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathjust_testing.js
150 lines (129 loc) · 4.71 KB
/
just_testing.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
// Imports
import dotenv from "dotenv";
import readline from "readline";
import { ChatGroq } from "@langchain/groq";
import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { HumanMessage } from "@langchain/core/messages";
import term from "terminal-kit";
// Initialize dotenv to load environment variables
dotenv.config();
// Initialize Terminal Kit for better output formatting and visualization
const tk = term.terminal;
// Initialize OpenAI LLM
const llm = new ChatGroq({
apiKey: process.env.GROQ_API_KEY,
model: "llama3-70b-8192",
});
// Initialize Tavily
const tavily = new TavilySearchResults({
maxResults: 3,
});
// Create a LangGraph agent
const langgraphAgent = createReactAgent({
llm: llm,
tools: [tavily],
});
// Define an asynchronous function to get the user question
function getUserQuestion(message) {
// Creating a readline interface for reading lines from the standard input (keyboard)
const rl = readline.createInterface({
input: process.stdin, // Setting the input stream to the standard input (keyboard)
output: process.stdout, // Setting the output stream to the standard output (console)
});
// Returning a Promise that resolves when the user enters something
return new Promise((resolve) => {
// Asking the user for input with the provided message
rl.question(message, (userQuestion) => {
// Closing the readline interface after receiving input
rl.close();
// Resolving the Promise with the user question
resolve(userQuestion);
});
});
}
// Define a function to process chunks from the agent
function processChunks(chunk) {
/**
* Processes a chunk from the agent and displays information about tool calls or the agent's answer.
*
* @param {Object} chunk - The chunk to be processed.
* @return {void}
*/
// Check if the chunk contains an agent's message
if ("agent" in chunk) {
// Iterate over the messages in the chunk
for (const message of chunk.agent.messages) {
// Check if the message contains tool calls
if (
"tool_calls" in message.additional_kwargs != undefined &&
Array.isArray(message.additional_kwargs.tool_calls)
) {
// If the message contains tool calls, extract and display an informative message with tool call details
// Extract all the tool calls
const toolCalls = message.additional_kwargs.tool_calls;
// Iterate over the tool calls
toolCalls.forEach((toolCall) => {
// Extract the tool name
const toolName = toolCall.function.name;
// Extract the tool input
const toolArguments = JSON.parse(
toolCall.function.arguments.replace(/'/g, '"')
);
const toolInput = toolArguments.input;
// Display an informative message with tool call details
tk
.colorRgbHex("#00afff")(`\nThe agent is calling the tool `)
.bgColorRgbHex("#00afff")
.color("black")(`${toolName}`)
.bgColor("black")
.colorRgbHex("#00afff")(` with the query `)
.bgColorRgbHex("#00afff")
.color("black")(`${toolInput}`)
.bgColor("black")
.colorRgbHex("#00afff")(
`. Please wait for the agent's answer...\n`
);
});
} else {
// If the message doesn't contain tool calls, extract and display the agent's answer
// Extract the agent's answer
const agentAnswer = message.content;
// Display the agent's answer
tk.bgColor("white")
.color("black")(`\nAgent:\n${agentAnswer}\n`)
.color("white")
.bgColor("black");
}
}
}
}
// Define the main function
async function main() {
/**
* Runs the main loop of the chat application.
*
* @return {Promise<void>} A promise that resolves when the user chooses to quit the chat.
*/
// Loop until the user chooses to quit the chat
while (true) {
// Get the user's question and display it in the terminal
const userQuestion = await getUserQuestion("\nUser:\n");
// Check if the user wants to quit the chat
if (userQuestion.toLowerCase() === "quit") {
tk.bgColor("white").color("black")("\nAgent:\nHave a nice day!\n");
tk.bgColor("black").color("white")("\n");
break;
}
// Use the stream method of the LangGraph agent to get the agent's answer
const agentAnswer = await langgraphAgent.stream({
messages: [new HumanMessage({ content: userQuestion })],
});
// Process the chunks from the agent
for await (const chunk of agentAnswer) {
processChunks(chunk);
}
}
}
// Call the main function
// main();