-
Notifications
You must be signed in to change notification settings - Fork 45
/
Copy pathOllamaAgent.cs
329 lines (281 loc) · 11 KB
/
OllamaAgent.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
using System.Diagnostics;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
using AIShell.Abstraction;
using OllamaSharp;
using OllamaSharp.Models;
namespace AIShell.Ollama.Agent;
public sealed partial class OllamaAgent : ILLMAgent
{
private bool _reloadSettings;
private bool _isDisposed;
private string _configRoot;
private Settings _settings;
private OllamaApiClient _client;
private GenerateRequest _request;
private FileSystemWatcher _watcher;
/// <summary>
/// The name of setting file
/// </summary>
private const string SettingFileName = "ollama.config.json";
/// <summary>
/// Gets the settings.
/// </summary>
internal Settings Settings => _settings;
/// <summary>
/// The name of the agent
/// </summary>
public string Name => "ollama";
/// <summary>
/// The description of the agent to be shown at start up
/// </summary>
public string Description => "This is an AI assistant to interact with a language model running locally or remotely by utilizing the Ollama API. Be sure to follow all prerequisites in https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent";
/// <summary>
/// This is the company added to /like and /dislike verbiage for who the telemetry helps.
/// </summary>
public string Company => "Microsoft";
/// <summary>
/// These are samples that are shown at start up for good questions to ask the agent
/// </summary>
public List<string> SampleQueries => [
"How do I list files in a given directory?"
];
/// <summary>
/// These are any optional legal/additional information links you want to provide at start up
/// </summary>
public Dictionary<string, string> LegalLinks { private set; get; } = new(StringComparer.OrdinalIgnoreCase)
{
["Ollama Docs"] = "https://github.com/ollama/ollama",
["Prerequisites"] = "https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent"
};
/// <summary>
/// Dispose method to clean up the unmanaged resource of the chatService
/// </summary>
public void Dispose()
{
if (_isDisposed)
{
return;
}
GC.SuppressFinalize(this);
_watcher.Dispose();
_isDisposed = true;
}
/// <summary>
/// Initializing function for the class when the shell registers an agent
/// </summary>
/// <param name="config">Agent configuration for any configuration file and other settings</param>
public void Initialize(AgentConfig config)
{
_configRoot = config.ConfigurationRoot;
SettingFile = Path.Combine(_configRoot, SettingFileName);
_settings = ReadSettings();
if (_settings is null)
{
// Create the setting file with examples to serve as a template for user to update.
NewExampleSettingFile();
_settings = ReadSettings();
}
// Create Ollama request
_request = new GenerateRequest();
// Create Ollama client
_client = new OllamaApiClient(_settings.Endpoint);
// Watch for changes to the settings file
_watcher = new FileSystemWatcher(_configRoot, SettingFileName)
{
NotifyFilter = NotifyFilters.LastWrite,
EnableRaisingEvents = true,
};
_watcher.Changed += OnSettingFileChange;
}
/// <summary>
/// Get commands that an agent can register to the shell when being loaded.
/// </summary>
public IEnumerable<CommandBase> GetCommands() => null;
/// <summary>
/// Gets the path to the setting file of the agent.
/// </summary>
public string SettingFile { private set; get; }
/// <summary>
/// Gets a value indicating whether the agent accepts a specific user action feedback.
/// </summary>
/// <param name="action">The user action.</param>
public bool CanAcceptFeedback(UserAction action) => false;
/// <summary>
/// A user action was taken against the last response from this agent.
/// </summary>
/// <param name="action">Type of the action.</param>
/// <param name="actionPayload"></param>
public void OnUserAction(UserActionPayload actionPayload) {}
/// <summary>
/// Refresh the current chat by starting a new chat session.
/// This method allows an agent to reset chat states, interact with user for authentication, print welcome message, and more.
/// </summary>
public Task RefreshChatAsync(IShell shell, bool force)
{
if (force)
{
// Reload the setting file if needed.
ReloadSettings();
// Reset context
_request.Context = null;
}
return Task.CompletedTask;
}
/// <summary>
/// Main chat function that takes the users input and passes it to the LLM and renders it.
/// </summary>
/// <param name="input">The user input from the chat experience.</param>
/// <param name="shell">The shell that provides host functionality.</param>
/// <returns>Task Boolean that indicates whether the query was served by the agent.</returns>
public async Task<bool> ChatAsync(string input, IShell shell)
{
// Get the shell host
IHost host = shell.Host;
// Get the cancellation token
CancellationToken token = shell.CancellationToken;
// Reload the setting file if needed.
ReloadSettings();
if (IsLocalHost().IsMatch(_client.Uri.Host) && Process.GetProcessesByName("ollama").Length is 0)
{
host.WriteErrorLine("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
return false;
}
// Prepare request
_request.Prompt = input;
_request.Model = _settings.Model;
_request.Stream = _settings.Stream;
try
{
if (_request.Stream)
{
// Wait for the stream with the spinner running
var ollamaStreamEnumerator = await host.RunWithSpinnerAsync(
status: "Thinking ...",
func: async () =>
{
// Start generating the stream asynchronously and return an enumerator
var enumerator = _client.GenerateAsync(_request, token).GetAsyncEnumerator(token);
if (await enumerator.MoveNextAsync().ConfigureAwait(false))
{
return enumerator;
}
return null;
}
).ConfigureAwait(false);
if (ollamaStreamEnumerator is not null)
{
using IStreamRender streamingRender = host.NewStreamRender(token);
do
{
var currentStream = ollamaStreamEnumerator.Current;
// Update the render with stream response
streamingRender.Refresh(currentStream.Response);
if (currentStream.Done)
{
// If the stream is complete, update the request context with the last stream context
var ollamaLastStream = (GenerateDoneResponseStream)currentStream;
_request.Context = ollamaLastStream.Context;
}
}
while (await ollamaStreamEnumerator.MoveNextAsync().ConfigureAwait(false));
}
}
else
{
// Build single response with spinner
var ollamaResponse = await host.RunWithSpinnerAsync(
status: "Thinking ...",
func: async () => { return await _client.GenerateAsync(_request, token).StreamToEndAsync(); }
).ConfigureAwait(false);
// Update request context
_request.Context = ollamaResponse.Context;
// Render the full response
host.RenderFullResponse(ollamaResponse.Response);
}
}
catch (OperationCanceledException)
{
// Ignore the cancellation exception.
}
catch (HttpRequestException e)
{
host.WriteErrorLine($"{e.Message}");
host.WriteErrorLine($"Ollama model: \"{_settings.Model}\"");
host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\"");
host.WriteErrorLine($"Ollama settings: \"{SettingFile}\"");
}
return true;
}
private void ReloadSettings()
{
if (_reloadSettings)
{
_reloadSettings = false;
var settings = ReadSettings();
if (settings is null)
{
return;
}
_settings = settings;
// Check if the endpoint has changed
bool isEndpointChanged = !string.Equals(_settings.Endpoint, _client.Uri.OriginalString, StringComparison.OrdinalIgnoreCase);
if (isEndpointChanged)
{
// Create a new client with updated endpoint
_client = new OllamaApiClient(_settings.Endpoint);
}
}
}
private Settings ReadSettings()
{
Settings settings = null;
FileInfo file = new(SettingFile);
if (file.Exists)
{
try
{
using var stream = file.OpenRead();
var data = JsonSerializer.Deserialize(stream, SourceGenerationContext.Default.ConfigData);
settings = new Settings(data);
}
catch (Exception e)
{
throw new InvalidDataException($"Parsing settings from '{SettingFile}' failed with the following error: {e.Message}", e);
}
}
return settings;
}
private void OnSettingFileChange(object sender, FileSystemEventArgs e)
{
if (e.ChangeType is WatcherChangeTypes.Changed)
{
_reloadSettings = true;
}
}
private void NewExampleSettingFile()
{
string SampleContent = """
{
// To use Ollama API service:
// 1. Install Ollama: `winget install Ollama.Ollama`
// 2. Start Ollama API server: `ollama serve`
// 3. Install Ollama model: `ollama pull phi3`
// Declare Ollama model
"Model": "phi3",
// Declare Ollama endpoint
"Endpoint": "http://localhost:11434",
// Enable Ollama streaming
"Stream": false
}
""";
File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8);
}
/// <summary>
/// Defines a generated regular expression to match localhost addresses
/// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity.
/// </summary>
[GeneratedRegex("^(localhost|127\\.0\\.0\\.1|\\[::1\\])$", RegexOptions.IgnoreCase)]
internal partial Regex IsLocalHost();
}