Skip to content

Commit 06ffe3a

Browse files
authored
LLama.Examples: improve model path prompt (#526)
* LLama.Examples: RepoUtils.cs → ConsoleLogger.cs * LLama.Examples: Examples/Runner.cs → ExampleRunner.cs * LLama.Examples: delete unused console logger * LLama.Examples: improve splash screen appearance the llama_empty_call() no longer shows configuration information on startup, but it will display it automatically the first time a model is engaged * LLama.Examples: Runner → ExampleRunner * LLama.Examples: improve model path prompt The last used model is stored in a config file and is re-used when a blank path is provided * LLama.Examples: NativeApi.llama_empty_call() at startup * LLama.Examples: reduce console noise when saving model path
1 parent efa49cc commit 06ffe3a

24 files changed

+165
-165
lines changed

LLama.Examples/Examples/Runner.cs renamed to LLama.Examples/ExampleRunner.cs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
using Spectre.Console;
2+
using LLama.Examples.Examples;
23

3-
namespace LLama.Examples.Examples;
4-
5-
public class Runner
4+
public class ExampleRunner
65
{
76
private static readonly Dictionary<string, Func<Task>> Examples = new()
87
{
@@ -26,7 +25,7 @@ public class Runner
2625
{ "Batched Executor (Fork)", BatchedExecutorFork.Run },
2726
{ "Batched Executor (Rewind)", BatchedExecutorRewind.Run },
2827
{ "SK Kernel Memory.", KernelMemory.Run },
29-
{ "Exit", async () => Environment.Exit(0) }
28+
{ "Exit", () => { Environment.Exit(0); return Task.CompletedTask; } }
3029
};
3130

3231
public static async Task Run()

LLama.Examples/Examples/BatchedExecutorFork.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,7 @@ public class BatchedExecutorFork
1515

1616
public static async Task Run()
1717
{
18-
Console.Write("Please input your model path: ");
19-
var modelPath = Console.ReadLine();
18+
string modelPath = UserSettings.GetModelPath();
2019

2120
var parameters = new ModelParams(modelPath);
2221
using var model = LLamaWeights.LoadFromFile(parameters);

LLama.Examples/Examples/BatchedExecutorRewind.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@ public class BatchedExecutorRewind
1616

1717
public static async Task Run()
1818
{
19-
Console.Write("Please input your model path: ");
20-
var modelPath = Console.ReadLine();
19+
string modelPath = UserSettings.GetModelPath();
2120

2221
var parameters = new ModelParams(modelPath);
2322
using var model = LLamaWeights.LoadFromFile(parameters);

LLama.Examples/Examples/ChatChineseGB2312.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,7 @@ public static async Task Run()
2222
" to use https://huggingface.co/hfl/chinese-alpaca-2-7b-gguf/blob/main/ggml-model-q5_0.gguf, which has been verified by LLamaSharp developers.");
2323
Console.ForegroundColor = ConsoleColor.White;
2424

25-
Console.Write("Please input your model path: ");
26-
var modelPath = Console.ReadLine();
25+
string modelPath = UserSettings.GetModelPath();
2726

2827
var parameters = new ModelParams(modelPath)
2928
{

LLama.Examples/Examples/ChatSessionStripRoleName.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,7 @@ public class ChatSessionStripRoleName
66
{
77
public static async Task Run()
88
{
9-
Console.Write("Please input your model path: ");
10-
var modelPath = Console.ReadLine();
9+
string modelPath = UserSettings.GetModelPath();
1110

1211
var parameters = new ModelParams(modelPath)
1312
{

LLama.Examples/Examples/ChatSessionWithHistory.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,7 @@ public class ChatSessionWithHistory
66
{
77
public static async Task Run()
88
{
9-
Console.Write("Please input your model path: ");
10-
var modelPath = Console.ReadLine();
9+
string modelPath = UserSettings.GetModelPath();
1110

1211
var parameters = new ModelParams(modelPath)
1312
{

LLama.Examples/Examples/ChatSessionWithRoleName.cs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,7 @@ public class ChatSessionWithRoleName
66
{
77
public static async Task Run()
88
{
9-
Console.Write("Please input your model path: ");
10-
var modelPath = Console.ReadLine();
9+
string modelPath = UserSettings.GetModelPath();
1110

1211
var parameters = new ModelParams(modelPath)
1312
{

LLama.Examples/Examples/CodingAssistant.cs

Lines changed: 16 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -2,27 +2,27 @@
22
{
33
using LLama.Common;
44
using System;
5-
using System.Reflection;
65

76
internal class CodingAssistant
87
{
9-
const string DefaultModelUri = "https://huggingface.co/TheBloke/CodeLlama-7B-Instruct-GGUF/resolve/main/codellama-7b-instruct.Q4_K_S.gguf";
10-
118
// Source paper with example prompts:
129
// https://doi.org/10.48550/arXiv.2308.12950
1310
const string InstructionPrefix = "[INST]";
1411
const string InstructionSuffix = "[/INST]";
15-
const string SystemInstruction = "You're an intelligent, concise coding assistant. Wrap code in ``` for readability. Don't repeat yourself. Use best practice and good coding standards.";
16-
private static string ModelsDirectory = Path.Combine(Directory.GetParent(Assembly.GetExecutingAssembly().Location)!.FullName, "Models");
12+
const string SystemInstruction = "You're an intelligent, concise coding assistant. " +
13+
"Wrap code in ``` for readability. Don't repeat yourself. " +
14+
"Use best practice and good coding standards.";
1715

1816
public static async Task Run()
1917
{
20-
Console.Write("Please input your model path (if left empty, a default model will be downloaded for you): ");
21-
var modelPath = Console.ReadLine();
22-
23-
if(string.IsNullOrWhiteSpace(modelPath) )
18+
string modelPath = UserSettings.GetModelPath();
19+
if (!modelPath.Contains("codellama", StringComparison.InvariantCultureIgnoreCase))
2420
{
25-
modelPath = await GetDefaultModel();
21+
Console.ForegroundColor = ConsoleColor.Yellow;
22+
Console.WriteLine("WARNING: the model you selected is not a Code LLama model!");
23+
Console.WriteLine("For this example we specifically recommend 'codellama-7b-instruct.Q4_K_S.gguf'");
24+
Console.WriteLine("Press ENTER to continue...");
25+
Console.ReadLine();
2626
}
2727

2828
var parameters = new ModelParams(modelPath)
@@ -35,12 +35,14 @@ public static async Task Run()
3535

3636
Console.ForegroundColor = ConsoleColor.Yellow;
3737
Console.WriteLine("The executor has been enabled. In this example, the LLM will follow your instructions." +
38-
"\nIt's a 7B Code Llama, so it's trained for programming tasks like \"Write a C# function reading a file name from a given URI\" or \"Write some programming interview questions\"." +
38+
"\nIt's a 7B Code Llama, so it's trained for programming tasks like \"Write a C# function reading " +
39+
"a file name from a given URI\" or \"Write some programming interview questions\"." +
3940
"\nWrite 'exit' to exit");
4041
Console.ForegroundColor = ConsoleColor.White;
4142

42-
var inferenceParams = new InferenceParams() {
43-
Temperature = 0.8f,
43+
var inferenceParams = new InferenceParams()
44+
{
45+
Temperature = 0.8f,
4446
MaxTokens = -1,
4547
};
4648

@@ -51,7 +53,7 @@ public static async Task Run()
5153
{
5254

5355
Console.ForegroundColor = ConsoleColor.Green;
54-
await foreach (var text in executor.InferAsync(instruction + System.Environment.NewLine, inferenceParams))
56+
await foreach (var text in executor.InferAsync(instruction + Environment.NewLine, inferenceParams))
5557
{
5658
Console.Write(text);
5759
}
@@ -61,34 +63,5 @@ public static async Task Run()
6163
instruction = Console.ReadLine() ?? "Ask me for instructions.";
6264
}
6365
}
64-
65-
private static async Task<string> GetDefaultModel()
66-
{
67-
var uri = new Uri(DefaultModelUri);
68-
var modelName = uri.Segments[^1];
69-
await Console.Out.WriteLineAsync($"The following model will be used: {modelName}");
70-
var modelPath = Path.Combine(ModelsDirectory, modelName);
71-
if(!Directory.Exists(ModelsDirectory))
72-
{
73-
Directory.CreateDirectory(ModelsDirectory);
74-
}
75-
76-
if (File.Exists(modelPath))
77-
{
78-
await Console.Out.WriteLineAsync($"Existing model found, using {modelPath}");
79-
}
80-
else
81-
{
82-
await Console.Out.WriteLineAsync($"Model not found locally, downloading {DefaultModelUri}...");
83-
using var http = new HttpClient();
84-
await using var downloadStream = await http.GetStreamAsync(uri);
85-
await using var fileStream = new FileStream(modelPath, FileMode.Create, FileAccess.Write);
86-
await downloadStream.CopyToAsync(fileStream);
87-
await Console.Out.WriteLineAsync($"Model downloaded and saved to {modelPath}");
88-
}
89-
90-
91-
return modelPath;
92-
}
9366
}
9467
}

LLama.Examples/Examples/GetEmbeddings.cs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,7 @@ public class GetEmbeddings
66
{
77
public static void Run()
88
{
9-
Console.ForegroundColor = ConsoleColor.White;
10-
Console.Write("Please input your model path: ");
11-
var modelPath = Console.ReadLine();
9+
string modelPath = UserSettings.GetModelPath();
1210

1311
Console.ForegroundColor = ConsoleColor.DarkGray;
1412
var @params = new ModelParams(modelPath) { EmbeddingMode = true };

LLama.Examples/Examples/GrammarJsonResponse.cs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,10 @@ public class GrammarJsonResponse
77
{
88
public static async Task Run()
99
{
10-
var gbnf = (await File.ReadAllTextAsync("Assets/json.gbnf")).Trim();
11-
var grammar = Grammar.Parse(gbnf, "root");
10+
string modelPath = UserSettings.GetModelPath();
1211

13-
Console.Write("Please input your model path: ");
14-
var modelPath = Console.ReadLine();
12+
var gbnf = File.ReadAllText("Assets/json.gbnf").Trim();
13+
var grammar = Grammar.Parse(gbnf, "root");
1514

1615
var parameters = new ModelParams(modelPath)
1716
{
@@ -27,10 +26,10 @@ public static async Task Run()
2726
Console.ForegroundColor = ConsoleColor.White;
2827

2928
using var grammarInstance = grammar.CreateInstance();
30-
var inferenceParams = new InferenceParams()
31-
{
32-
Temperature = 0.6f,
33-
AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" },
29+
var inferenceParams = new InferenceParams()
30+
{
31+
Temperature = 0.6f,
32+
AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" },
3433
MaxTokens = 50,
3534
Grammar = grammarInstance
3635
};

0 commit comments

Comments
 (0)