Skip to content

Commit

Permalink
feat: llama using cf worker
Browse files Browse the repository at this point in the history
  • Loading branch information
sdip15fa committed Oct 7, 2023
1 parent c7c7854 commit fa9bfe2
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 15 deletions.
37 changes: 25 additions & 12 deletions commands/llama.ts
Original file line number Diff line number Diff line change
@@ -1,21 +1,15 @@
// Import necessary modules and dependencies
import { Client, Message } from "whatsapp-web.js";
import config from "../config.js";
import { LlamaModel, LlamaContext, LlamaChatSession } from "node-llama-cpp";

const modelPath = config.llama_model_path; // Retrieve model path from environment variable

const model = new LlamaModel({ modelPath });
const context = new LlamaContext({ model });
const session = new LlamaChatSession({ context });
import axios from "../helpers/axios.js";

const execute = async (client: Client, msg: Message, args: string[]) => {
const chatId = (await msg.getChat()).id._serialized;

if (!modelPath) {
if (!config.cf_worker.url) {
return client.sendMessage(
chatId,
"Sorry, llama model path not specified in the environment variable."
"Sorry, cf worker url not specified in the environment variable."
);
}

Expand All @@ -28,11 +22,30 @@ const execute = async (client: Client, msg: Message, args: string[]) => {

const text = args.join(" ") || quotedMsg.body;

const username = config.cf_worker.username;
const password = config.cf_worker.password;

const encodedCredentials = Buffer.from(`${username}:${password}`).toString(
"base64"
);
const authHeader = `Basic ${encodedCredentials}`;

// Call Llama model with the obtained text
const response = await session.prompt(text);
const response = await axios.get<{ response: string }>(config.cf_worker.url, {
params: {
prompt: text,
},
headers: {
Authorization: authHeader,
},
});

// Send the response back to the user
await client.sendMessage(chatId, `Llama: ${response}`);
try {
// Send the response back to the user
await client.sendMessage(chatId, `Llama: ${response.data.response}`);
} catch {
await client.sendMessage(chatId, "LLaMA generation failed.");
}

// Optionally, you can handle conversation history or context here

Expand Down
6 changes: 5 additions & 1 deletion config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@ const config = {
default_tr_lang: process.env.DEFAULT_TR_LANG || "en",
enable_delete_alert: process.env.ENABLE_DELETE_ALERT || "true",
ocr_space_api_key: process.env.OCR_SPACE_API_KEY || "",
llama_model_path: process.env.LLAMA_MODEL_PATH || "",
cf_worker: {
url: process.env.CF_WORKER_URL,
username: process.env.CF_WORKER_USERNAME,
password: process.env.CF_WORKER_PASSWORD,
},
};

export default config;
4 changes: 3 additions & 1 deletion docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ services:
BITLY_API_KEY: ${BITLY_API_KEY}
OPENAI_API_KEY: ${OPENAI_API_KEY}
TZ: ${TZ:-Asia/Hong_Kong}
LLAMA_MODEL_PATH: ${LLAMA_MODEL_PATH}
CF_WORKER_URL: ${CF_WORKER_URL}
CF_WORKER_USERNAME: ${CF_WORKER_USERNAME}
CF_WORKER_PASSWORD: ${CF_WORKER_PASSWORD}
networks:
- whatsbot-network
wtstodis-mongo:
Expand Down
4 changes: 3 additions & 1 deletion example.env
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,6 @@ DISCORD_OWNER_ID=""
BITLY_API_KEY=""
OPENAI_API_KEY=""
TZ="Asia/Hong_Kong"
LLAMA_MODEL_PATH="llama/openllama-3b-v2-q4_0.gguf"
CF_WORKER_URL=
CF_WORKER_USERNAME=
CF_WORKER_PASSWORD=

0 comments on commit fa9bfe2

Please sign in to comment.