Skip to content

Commit 943b701

Browse files
authored
Big improvements to the Ollama agent (PowerShell#310)
1. Switching to `OllamaSharp` to simplify API calls and support both streaming and non-streaming. 2. Add context support to enable the agent to remember previous responses. 3. Add configuration management support. 4. Improving chat interaction.
1 parent 3110c2c commit 943b701

File tree

7 files changed

+307
-166
lines changed

7 files changed

+307
-166
lines changed

shell/agents/AIShell.Ollama.Agent/AIShell.Ollama.Agent.csproj

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
<TargetFramework>net8.0</TargetFramework>
55
<ImplicitUsings>enable</ImplicitUsings>
66
<SuppressNETCoreSdkPreviewMessage>true</SuppressNETCoreSdkPreviewMessage>
7+
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
78

89
<!-- Disable deps.json generation -->
910
<GenerateDependencyFile>false</GenerateDependencyFile>
@@ -15,6 +16,10 @@
1516
<DebugType>None</DebugType>
1617
</PropertyGroup>
1718

19+
<ItemGroup>
20+
<PackageReference Include="OllamaSharp" Version="4.0.8" />
21+
</ItemGroup>
22+
1823
<ItemGroup>
1924
<ProjectReference Include="..\..\AIShell.Abstraction\AIShell.Abstraction.csproj">
2025
<!-- Disable copying AIShell.Abstraction.dll to output folder -->

shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs

Lines changed: 230 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,33 @@
11
using System.Diagnostics;
2+
using System.Text;
3+
using System.Text.Json;
4+
using System.Text.RegularExpressions;
25
using AIShell.Abstraction;
6+
using OllamaSharp;
7+
using OllamaSharp.Models;
38

49
namespace AIShell.Ollama.Agent;
510

6-
public sealed class OllamaAgent : ILLMAgent
11+
public sealed partial class OllamaAgent : ILLMAgent
712
{
13+
private bool _reloadSettings;
14+
private bool _isDisposed;
15+
private string _configRoot;
16+
private Settings _settings;
17+
private OllamaApiClient _client;
18+
private GenerateRequest _request;
19+
private FileSystemWatcher _watcher;
20+
21+
/// <summary>
22+
/// The name of setting file
23+
/// </summary>
24+
private const string SettingFileName = "ollama.config.json";
25+
26+
/// <summary>
27+
/// Gets the settings.
28+
/// </summary>
29+
internal Settings Settings => _settings;
30+
831
/// <summary>
932
/// The name of the agent
1033
/// </summary>
@@ -13,7 +36,7 @@ public sealed class OllamaAgent : ILLMAgent
1336
/// <summary>
1437
/// The description of the agent to be shown at start up
1538
/// </summary>
16-
public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool. Be sure to follow all prerequisites in aka.ms/aish/ollama";
39+
public string Description => "This is an AI assistant to interact with a language model running locally or remotely by utilizing the Ollama API. Be sure to follow all prerequisites in https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent";
1740

1841
/// <summary>
1942
/// This is the company added to /like and /dislike verbiage for who the telemetry helps.
@@ -30,19 +53,25 @@ public sealed class OllamaAgent : ILLMAgent
3053
/// <summary>
3154
/// These are any optional legal/additional information links you want to provide at start up
3255
/// </summary>
33-
public Dictionary<string, string> LegalLinks { private set; get; }
34-
35-
/// <summary>
36-
/// This is the chat service to call the API from
37-
/// </summary>
38-
private OllamaChatService _chatService;
56+
public Dictionary<string, string> LegalLinks { private set; get; } = new(StringComparer.OrdinalIgnoreCase)
57+
{
58+
["Ollama Docs"] = "https://github.com/ollama/ollama",
59+
["Prerequisites"] = "https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent"
60+
};
3961

4062
/// <summary>
4163
/// Dispose method to clean up the unmanaged resource of the chatService
4264
/// </summary>
4365
public void Dispose()
4466
{
45-
_chatService?.Dispose();
67+
if (_isDisposed)
68+
{
69+
return;
70+
}
71+
72+
GC.SuppressFinalize(this);
73+
_watcher.Dispose();
74+
_isDisposed = true;
4675
}
4776

4877
/// <summary>
@@ -51,13 +80,31 @@ public void Dispose()
5180
/// <param name="config">Agent configuration for any configuration file and other settings</param>
5281
public void Initialize(AgentConfig config)
5382
{
54-
_chatService = new OllamaChatService();
83+
_configRoot = config.ConfigurationRoot;
84+
85+
SettingFile = Path.Combine(_configRoot, SettingFileName);
86+
_settings = ReadSettings();
87+
88+
if (_settings is null)
89+
{
90+
// Create the setting file with examples to serve as a template for user to update.
91+
NewExampleSettingFile();
92+
_settings = ReadSettings();
93+
}
94+
95+
// Create Ollama request
96+
_request = new GenerateRequest();
97+
98+
// Create Ollama client
99+
_client = new OllamaApiClient(_settings.Endpoint);
55100

56-
LegalLinks = new(StringComparer.OrdinalIgnoreCase)
101+
// Watch for changes to the settings file
102+
_watcher = new FileSystemWatcher(_configRoot, SettingFileName)
57103
{
58-
["Ollama Docs"] = "https://github.com/ollama/ollama",
59-
["Prerequisites"] = "https://aka.ms/ollama/readme"
104+
NotifyFilter = NotifyFilters.LastWrite,
105+
EnableRaisingEvents = true,
60106
};
107+
_watcher.Changed += OnSettingFileChange;
61108
}
62109

63110
/// <summary>
@@ -68,7 +115,7 @@ public void Initialize(AgentConfig config)
68115
/// <summary>
69116
/// Gets the path to the setting file of the agent.
70117
/// </summary>
71-
public string SettingFile { private set; get; } = null;
118+
public string SettingFile { private set; get; }
72119

73120
/// <summary>
74121
/// Gets a value indicating whether the agent accepts a specific user action feedback.
@@ -87,7 +134,19 @@ public void OnUserAction(UserActionPayload actionPayload) {}
87134
/// Refresh the current chat by starting a new chat session.
88135
/// This method allows an agent to reset chat states, interact with user for authentication, print welcome message, and more.
89136
/// </summary>
90-
public Task RefreshChatAsync(IShell shell, bool force) => Task.CompletedTask;
137+
public Task RefreshChatAsync(IShell shell, bool force)
138+
{
139+
if (force)
140+
{
141+
// Reload the setting file if needed.
142+
ReloadSettings();
143+
144+
// Reset context
145+
_request.Context = null;
146+
}
147+
148+
return Task.CompletedTask;
149+
}
91150

92151
/// <summary>
93152
/// Main chat function that takes the users input and passes it to the LLM and renders it.
@@ -100,26 +159,171 @@ public async Task<bool> ChatAsync(string input, IShell shell)
100159
// Get the shell host
101160
IHost host = shell.Host;
102161

103-
// get the cancellation token
162+
// Get the cancellation token
104163
CancellationToken token = shell.CancellationToken;
105164

106-
if (Process.GetProcessesByName("ollama").Length is 0)
165+
// Reload the setting file if needed.
166+
ReloadSettings();
167+
168+
if (IsLocalHost().IsMatch(_client.Uri.Host) && Process.GetProcessesByName("ollama").Length is 0)
107169
{
108-
host.RenderFullResponse("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
170+
host.WriteErrorLine("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
109171
return false;
110172
}
111173

112-
ResponseData ollamaResponse = await host.RunWithSpinnerAsync(
113-
status: "Thinking ...",
114-
func: async context => await _chatService.GetChatResponseAsync(context, input, token)
115-
).ConfigureAwait(false);
174+
// Prepare request
175+
_request.Prompt = input;
176+
_request.Model = _settings.Model;
177+
_request.Stream = _settings.Stream;
116178

117-
if (ollamaResponse is not null)
179+
try
118180
{
119-
// render the content
120-
host.RenderFullResponse(ollamaResponse.response);
181+
if (_request.Stream)
182+
{
183+
// Wait for the stream with the spinner running
184+
var ollamaStreamEnumerator = await host.RunWithSpinnerAsync(
185+
status: "Thinking ...",
186+
func: async () =>
187+
{
188+
// Start generating the stream asynchronously and return an enumerator
189+
var enumerator = _client.GenerateAsync(_request, token).GetAsyncEnumerator(token);
190+
if (await enumerator.MoveNextAsync().ConfigureAwait(false))
191+
{
192+
return enumerator;
193+
}
194+
return null;
195+
}
196+
).ConfigureAwait(false);
197+
198+
if (ollamaStreamEnumerator is not null)
199+
{
200+
using IStreamRender streamingRender = host.NewStreamRender(token);
201+
202+
do
203+
{
204+
var currentStream = ollamaStreamEnumerator.Current;
205+
206+
// Update the render with stream response
207+
streamingRender.Refresh(currentStream.Response);
208+
209+
if (currentStream.Done)
210+
{
211+
// If the stream is complete, update the request context with the last stream context
212+
var ollamaLastStream = (GenerateDoneResponseStream)currentStream;
213+
_request.Context = ollamaLastStream.Context;
214+
}
215+
}
216+
while (await ollamaStreamEnumerator.MoveNextAsync().ConfigureAwait(false));
217+
}
218+
}
219+
else
220+
{
221+
// Build single response with spinner
222+
var ollamaResponse = await host.RunWithSpinnerAsync(
223+
status: "Thinking ...",
224+
func: async () => { return await _client.GenerateAsync(_request, token).StreamToEndAsync(); }
225+
).ConfigureAwait(false);
226+
227+
// Update request context
228+
_request.Context = ollamaResponse.Context;
229+
230+
// Render the full response
231+
host.RenderFullResponse(ollamaResponse.Response);
232+
}
121233
}
122-
234+
catch (OperationCanceledException)
235+
{
236+
// Ignore the cancellation exception.
237+
}
238+
catch (HttpRequestException e)
239+
{
240+
host.WriteErrorLine($"{e.Message}");
241+
host.WriteErrorLine($"Ollama model: \"{_settings.Model}\"");
242+
host.WriteErrorLine($"Ollama endpoint: \"{_settings.Endpoint}\"");
243+
host.WriteErrorLine($"Ollama settings: \"{SettingFile}\"");
244+
}
245+
123246
return true;
124247
}
248+
249+
private void ReloadSettings()
250+
{
251+
if (_reloadSettings)
252+
{
253+
_reloadSettings = false;
254+
var settings = ReadSettings();
255+
if (settings is null)
256+
{
257+
return;
258+
}
259+
260+
_settings = settings;
261+
262+
// Check if the endpoint has changed
263+
bool isEndpointChanged = !string.Equals(_settings.Endpoint, _client.Uri.OriginalString, StringComparison.OrdinalIgnoreCase);
264+
265+
if (isEndpointChanged)
266+
{
267+
// Create a new client with updated endpoint
268+
_client = new OllamaApiClient(_settings.Endpoint);
269+
}
270+
}
271+
}
272+
273+
private Settings ReadSettings()
274+
{
275+
Settings settings = null;
276+
FileInfo file = new(SettingFile);
277+
278+
if (file.Exists)
279+
{
280+
try
281+
{
282+
using var stream = file.OpenRead();
283+
var data = JsonSerializer.Deserialize(stream, SourceGenerationContext.Default.ConfigData);
284+
settings = new Settings(data);
285+
}
286+
catch (Exception e)
287+
{
288+
throw new InvalidDataException($"Parsing settings from '{SettingFile}' failed with the following error: {e.Message}", e);
289+
}
290+
}
291+
292+
return settings;
293+
}
294+
295+
private void OnSettingFileChange(object sender, FileSystemEventArgs e)
296+
{
297+
if (e.ChangeType is WatcherChangeTypes.Changed)
298+
{
299+
_reloadSettings = true;
300+
}
301+
}
302+
303+
private void NewExampleSettingFile()
304+
{
305+
string SampleContent = """
306+
{
307+
// To use Ollama API service:
308+
// 1. Install Ollama: `winget install Ollama.Ollama`
309+
// 2. Start Ollama API server: `ollama serve`
310+
// 3. Install Ollama model: `ollama pull phi3`
311+
312+
// Declare Ollama model
313+
"Model": "phi3",
314+
// Declare Ollama endpoint
315+
"Endpoint": "http://localhost:11434",
316+
// Enable Ollama streaming
317+
"Stream": false
318+
}
319+
""";
320+
File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8);
321+
}
322+
323+
/// <summary>
324+
/// Defines a generated regular expression to match localhost addresses
325+
/// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity.
326+
/// </summary>
327+
[GeneratedRegex("^(localhost|127\\.0\\.0\\.1|\\[::1\\])$", RegexOptions.IgnoreCase)]
328+
internal partial Regex IsLocalHost();
125329
}

0 commit comments

Comments
 (0)