Skip to content

Commit

Permalink
test: update the examples after refactoring.
Browse files Browse the repository at this point in the history
  • Loading branch information
AsakusaRinne committed Jun 10, 2023
1 parent 264fb9a commit aaa0cba
Show file tree
Hide file tree
Showing 10 changed files with 151 additions and 103 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,17 @@
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using LLama.Old;

namespace LLama.Examples
{
public class ChatSession
{
ChatSession<LLamaModel> _session;
LLama.Old.ChatSession<LLama.Old.LLamaModel> _session;
public ChatSession(string modelPath, string promptFilePath, string[] antiprompt)
{
LLamaModel model = new(new LLamaParams(model: modelPath, n_ctx: 512, interactive: true, repeat_penalty: 1.0f, verbose_prompt: false));
_session = new ChatSession<LLamaModel>(model)
LLama.Old.LLamaModel model = new(new LLamaParams(model: modelPath, n_ctx: 512, interactive: true, repeat_penalty: 1.0f, verbose_prompt: false));
_session = new ChatSession<LLama.Old.LLamaModel>(model)
.WithPromptFile(promptFilePath)
.WithAntiprompt(antiprompt);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using LLama.Old;

namespace LLama.Examples
namespace LLama.Examples.Old
{
public class ChatWithLLamaModel
{
LLamaModel _model;
LLama.Old.LLamaModel _model;
public ChatWithLLamaModel(string modelPath, string promptFilePath, string[] antiprompt)
{
_model = new LLamaModel(new LLamaParams(model: modelPath, n_ctx: 512, interactive: true, antiprompt: antiprompt.ToList(),
_model = new LLama.Old.LLamaModel(new LLamaParams(model: modelPath, n_ctx: 512, interactive: true, antiprompt: antiprompt.ToList(),
repeat_penalty: 1.0f)).WithPromptFile(promptFilePath);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using LLama.Old;

namespace LLama.Examples
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using LLama.Old;

namespace LLama.Examples
namespace LLama.Examples.Old
{
public class InstructMode
{
LLamaModel _model;
LLama.Old.LLamaModel _model;
public InstructMode(string modelPath, string promptFile)
{
_model = new LLamaModel(new LLamaParams(model: modelPath, n_ctx: 2048, n_predict: -1, top_k: 10000, instruct: true,
_model = new LLama.Old.LLamaModel(new LLamaParams(model: modelPath, n_ctx: 2048, n_predict: -1, top_k: 10000, instruct: true,
repeat_penalty: 1.1f, n_batch: 256, temp: 0.2f)).WithPromptFile(promptFile);
}

Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using LLama.Old;

namespace LLama.Examples
{
public class SaveAndLoadState: IDisposable
{
LLamaModel _model;
LLama.Old.LLamaModel _model;
public SaveAndLoadState(string modelPath, string prompt)
{
_model = new LLamaModel(new LLamaParams(model: modelPath, n_ctx: 2048, n_predict: -1, top_k: 10000, instruct: true,
_model = new LLama.Old.LLamaModel(new LLamaParams(model: modelPath, n_ctx: 2048, n_predict: -1, top_k: 10000, instruct: true,
repeat_penalty: 1.1f, n_batch: 256, temp: 0.2f)).WithPrompt(prompt);
}

Expand Down
99 changes: 99 additions & 0 deletions LLama.Examples/Old/TestRunner.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace LLama.Examples.Old
{
public class OldTestRunner
{
public static void Run()
{
Console.WriteLine("================LLamaSharp Examples (Old Version)==================\n");

Console.WriteLine("Please input a number to choose an example to run:");
Console.WriteLine("0: Run a chat session.");
Console.WriteLine("1: Run a LLamaModel to chat.");
Console.WriteLine("2: Quantize a model.");
Console.WriteLine("3: Get the embeddings of a message.");
Console.WriteLine("4: Run a LLamaModel with instruct mode.");
Console.WriteLine("5: Load and save state of LLamaModel.");


while (true)
{
Console.Write("\nYour choice: ");
int choice = int.Parse(Console.ReadLine());

if (choice == 0)
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
ChatSession chat = new(modelPath, "Assets/chat-with-bob.txt", new string[] { "User:" });
chat.Run();
}
else if (choice == 1)
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
ChatWithLLamaModel chat = new(modelPath, "Assets/chat-with-bob.txt", new string[] { "User:" });
chat.Run();
}
else if (choice == 2) // quantization
{
Console.Write("Please input your original model path: ");
var inputPath = Console.ReadLine();
Console.Write("Please input your output model path: ");
var outputPath = Console.ReadLine();
Console.Write("Please input the quantize type (one of q4_0, q4_1, q5_0, q5_1, q8_0): ");
var quantizeType = Console.ReadLine();
Quantize q = new Quantize();
q.Run(inputPath, outputPath, quantizeType);
}
else if (choice == 3) // get the embeddings only
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
GetEmbeddings em = new GetEmbeddings(modelPath);
Console.Write("Please input the text: ");
var text = Console.ReadLine();
em.Run(text);
}
else if (choice == 4) // instruct mode
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
InstructMode im = new InstructMode(modelPath, "Assets/alpaca.txt");
Console.WriteLine("Here's a simple example for using instruct mode. You can input some words and let AI " +
"complete it for you. For example: Write a story about a fox that wants to make friend with human. No less than 200 words.");
im.Run();
}
else if (choice == 5) // load and save state
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
Console.Write("Please input your state file path: ");
var statePath = Console.ReadLine();
SaveAndLoadState sals = new(modelPath, File.ReadAllText(@"D:\development\llama\llama.cpp\prompts\alpaca.txt"));
sals.Run("Write a story about a fox that wants to make friend with human. No less than 200 words.");
sals.SaveState(statePath);
sals.Dispose();
GC.Collect();
GC.WaitForPendingFinalizers();

// create a new model to load the state.
SaveAndLoadState sals2 = new(modelPath, "");
sals2.LoadState(statePath);
sals2.Run("Tell me more things about the fox in the story you told me.");
}
else
{
Console.WriteLine("Cannot parse your choice. Please select again.");
continue;
}
break;
}
}
}
}
109 changes: 33 additions & 76 deletions LLama.Examples/Program.cs
Original file line number Diff line number Diff line change
@@ -1,87 +1,44 @@
using LLama;
using LLama.Abstractions.Params;
using LLama.Examples;
using LLama.Examples.Old;

Console.WriteLine("================LLamaSharp Examples==================\n");
Console.WriteLine("======================================================================================================");

Console.WriteLine("Please input a number to choose an example to run:");
Console.WriteLine("0: Run a chat session.");
Console.WriteLine("1: Run a LLamaModel to chat.");
Console.WriteLine("2: Quantize a model.");
Console.WriteLine("3: Get the embeddings of a message.");
Console.WriteLine("4: Run a LLamaModel with instruct mode.");
Console.WriteLine("5: Load and save state of LLamaModel.");
Console.WriteLine(" __ __ ____ __ \r\n/\\ \\ /\\ \\ /\\ _`\\ /\\ \\ \r\n\\ \\ \\ \\ \\ \\ __ ___ ___ __ \\ \\,\\L\\_\\\\ \\ \\___ __ _ __ _____ \r\n \\ \\ \\ __\\ \\ \\ __ /'__`\\ /' __` __`\\ /'__`\\ \\/_\\__ \\ \\ \\ _ `\\ /'__`\\ /\\`'__\\/\\ '__`\\ \r\n \\ \\ \\L\\ \\\\ \\ \\L\\ \\/\\ \\L\\.\\_ /\\ \\/\\ \\/\\ \\ /\\ \\L\\.\\_ /\\ \\L\\ \\\\ \\ \\ \\ \\ /\\ \\L\\.\\_\\ \\ \\/ \\ \\ \\L\\ \\\r\n \\ \\____/ \\ \\____/\\ \\__/.\\_\\\\ \\_\\ \\_\\ \\_\\\\ \\__/.\\_\\\\ `\\____\\\\ \\_\\ \\_\\\\ \\__/.\\_\\\\ \\_\\ \\ \\ ,__/\r\n \\/___/ \\/___/ \\/__/\\/_/ \\/_/\\/_/\\/_/ \\/__/\\/_/ \\/_____/ \\/_/\\/_/ \\/__/\\/_/ \\/_/ \\ \\ \\/ \r\n \\ \\_\\ \r\n \\/_/ ");

Console.WriteLine("======================================================================================================");

while (true)
{
Console.Write("\nYour choice: ");
int choice = int.Parse(Console.ReadLine());

if (choice == 0)
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
ChatSession chat = new(modelPath, "Assets/chat-with-bob.txt", new string[] { "User:" });
chat.Run();
}
else if (choice == 1)
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
ChatWithLLamaModel chat = new(modelPath, "Assets/chat-with-bob.txt", new string[] { "User:" });
chat.Run();
}
else if (choice == 2) // quantization
{
Console.Write("Please input your original model path: ");
var inputPath = Console.ReadLine();
Console.Write("Please input your output model path: ");
var outputPath = Console.ReadLine();
Console.Write("Please input the quantize type (one of q4_0, q4_1, q5_0, q5_1, q8_0): ");
var quantizeType = Console.ReadLine();
Quantize q = new Quantize();
q.Run(inputPath, outputPath, quantizeType);
}
else if (choice == 3) // get the embeddings only
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
GetEmbeddings em = new GetEmbeddings(modelPath);
Console.Write("Please input the text: ");
var text = Console.ReadLine();
em.Run(text);
}
else if (choice == 4) // instruct mode
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
InstructMode im = new InstructMode(modelPath, "Assets/alpaca.txt");
Console.WriteLine("Here's a simple example for using instruct mode. You can input some words and let AI " +
"complete it for you. For example: Write a story about a fox that wants to make friend with human. No less than 200 words.");
im.Run();
}
else if (choice == 5) // load and save state
{
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();
Console.Write("Please input your state file path: ");
var statePath = Console.ReadLine();
SaveAndLoadState sals = new(modelPath, File.ReadAllText(@"D:\development\llama\llama.cpp\prompts\alpaca.txt"));
sals.Run("Write a story about a fox that wants to make friend with human. No less than 200 words.");
sals.SaveState(statePath);
sals.Dispose();
GC.Collect();
GC.WaitForPendingFinalizers();

// create a new model to load the state.
SaveAndLoadState sals2 = new(modelPath, "");
sals2.LoadState(statePath);
sals2.Run("Tell me more things about the fox in the story you told me.");
}
else
Console.WriteLine();

Console.WriteLine("Please choose the version you want to test: ");
Console.WriteLine("0. old version (for v0.3.0 or earlier version)");
Console.WriteLine("1. new version (for versions after v0.4.0)");

Console.Write("\nYour Choice: ");
int version = int.Parse(Console.ReadLine());
Console.WriteLine();

if(version == 1)
{
Console.WriteLine("The examples for new versions are under working now. We'll soon update the examples." +
" Thank you for your support!");
string modelPath = "D:\\development\\llama\\weights\\wizard-vicuna-13B.ggmlv3.q4_1.bin";
var prompt = File.ReadAllText("Assets/dan.txt").Trim();
LLamaInstructExecutor ex = new(new LLamaModel(new ModelParams(modelPath, contextSize: 1024)));

while (true)
{
Console.WriteLine("Cannot parse your choice. Please select again.");
continue;
foreach (var text in ex.Infer(prompt, new SessionParams() { Temperature = 0.6f }))
{
Console.Write(text);
}
prompt = Console.ReadLine();
}
break;
}
else
{
OldTestRunner.Run();
}
3 changes: 2 additions & 1 deletion LLama.WebAPI/Services/ChatService.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using LLama.WebAPI.Models;
using LLama.Old;
using LLama.WebAPI.Models;

namespace LLama.WebAPI.Services;

Expand Down
16 changes: 1 addition & 15 deletions LLamaSharp.sln
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLama.Examples", "LLama.Exa
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLamaSharp", "LLama\LLamaSharp.csproj", "{01A12D68-DE95-425E-AEEE-2D099305036D}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WPFDemo", "WPFDemo\WPFDemo.csproj", "{1E952A70-B720-4F76-9856-EC3B4259A80B}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LLama.WebAPI", "LLama.WebAPI\LLama.WebAPI.csproj", "{D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLama.WebAPI", "LLama.WebAPI\LLama.WebAPI.csproj", "{D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Expand Down Expand Up @@ -59,18 +57,6 @@ Global
{01A12D68-DE95-425E-AEEE-2D099305036D}.Release|Any CPU.Build.0 = Release|Any CPU
{01A12D68-DE95-425E-AEEE-2D099305036D}.Release|x64.ActiveCfg = Release|x64
{01A12D68-DE95-425E-AEEE-2D099305036D}.Release|x64.Build.0 = Release|x64
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Debug|x64.ActiveCfg = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Debug|x64.Build.0 = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.GPU|Any CPU.ActiveCfg = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.GPU|Any CPU.Build.0 = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.GPU|x64.ActiveCfg = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.GPU|x64.Build.0 = Debug|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|Any CPU.Build.0 = Release|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|x64.ActiveCfg = Release|Any CPU
{1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|x64.Build.0 = Release|Any CPU
{D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|x64.ActiveCfg = Debug|Any CPU
Expand Down

0 comments on commit aaa0cba

Please sign in to comment.