1
1
using System . Diagnostics ;
2
+ using System . Text ;
3
+ using System . Text . Json ;
4
+ using System . Text . RegularExpressions ;
2
5
using AIShell . Abstraction ;
6
+ using OllamaSharp ;
7
+ using OllamaSharp . Models ;
3
8
4
9
namespace AIShell . Ollama . Agent ;
5
10
6
- public sealed class OllamaAgent : ILLMAgent
11
+ public sealed partial class OllamaAgent : ILLMAgent
7
12
{
13
+ private bool _reloadSettings ;
14
+ private bool _isDisposed ;
15
+ private string _configRoot ;
16
+ private Settings _settings ;
17
+ private OllamaApiClient _client ;
18
+ private GenerateRequest _request ;
19
+ private FileSystemWatcher _watcher ;
20
+
21
+ /// <summary>
22
+ /// The name of setting file
23
+ /// </summary>
24
+ private const string SettingFileName = "ollama.config.json" ;
25
+
26
+ /// <summary>
27
+ /// Gets the settings.
28
+ /// </summary>
29
+ internal Settings Settings => _settings ;
30
+
8
31
/// <summary>
9
32
/// The name of the agent
10
33
/// </summary>
@@ -13,7 +36,7 @@ public sealed class OllamaAgent : ILLMAgent
13
36
/// <summary>
14
37
/// The description of the agent to be shown at start up
15
38
/// </summary>
16
- public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool . Be sure to follow all prerequisites in aka.ms/aish/ollama " ;
39
+ public string Description => "This is an AI assistant to interact with a language model running locally or remotely by utilizing the Ollama API . Be sure to follow all prerequisites in https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent " ;
17
40
18
41
/// <summary>
19
42
/// This is the company added to /like and /dislike verbiage for who the telemetry helps.
@@ -30,19 +53,25 @@ public sealed class OllamaAgent : ILLMAgent
30
53
/// <summary>
31
54
/// These are any optional legal/additional information links you want to provide at start up
32
55
/// </summary>
33
- public Dictionary < string , string > LegalLinks { private set ; get ; }
34
-
35
- /// <summary>
36
- /// This is the chat service to call the API from
37
- /// </summary>
38
- private OllamaChatService _chatService ;
56
+ public Dictionary < string , string > LegalLinks { private set ; get ; } = new ( StringComparer . OrdinalIgnoreCase )
57
+ {
58
+ [ "Ollama Docs" ] = "https://github.com/ollama/ollama" ,
59
+ [ "Prerequisites" ] = "https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent"
60
+ } ;
39
61
40
62
/// <summary>
41
63
/// Dispose method to clean up the unmanaged resource of the chatService
42
64
/// </summary>
43
65
public void Dispose ( )
44
66
{
45
- _chatService ? . Dispose ( ) ;
67
+ if ( _isDisposed )
68
+ {
69
+ return ;
70
+ }
71
+
72
+ GC . SuppressFinalize ( this ) ;
73
+ _watcher . Dispose ( ) ;
74
+ _isDisposed = true ;
46
75
}
47
76
48
77
/// <summary>
@@ -51,13 +80,31 @@ public void Dispose()
51
80
/// <param name="config">Agent configuration for any configuration file and other settings</param>
52
81
public void Initialize ( AgentConfig config )
53
82
{
54
- _chatService = new OllamaChatService ( ) ;
83
+ _configRoot = config . ConfigurationRoot ;
84
+
85
+ SettingFile = Path . Combine ( _configRoot , SettingFileName ) ;
86
+ _settings = ReadSettings ( ) ;
87
+
88
+ if ( _settings is null )
89
+ {
90
+ // Create the setting file with examples to serve as a template for user to update.
91
+ NewExampleSettingFile ( ) ;
92
+ _settings = ReadSettings ( ) ;
93
+ }
94
+
95
+ // Create Ollama request
96
+ _request = new GenerateRequest ( ) ;
97
+
98
+ // Create Ollama client
99
+ _client = new OllamaApiClient ( _settings . Endpoint ) ;
55
100
56
- LegalLinks = new ( StringComparer . OrdinalIgnoreCase )
101
+ // Watch for changes to the settings file
102
+ _watcher = new FileSystemWatcher ( _configRoot , SettingFileName )
57
103
{
58
- [ "Ollama Docs" ] = "https://github.com/ollama/ollama" ,
59
- [ "Prerequisites" ] = "https://aka.ms/ollama/readme"
104
+ NotifyFilter = NotifyFilters . LastWrite ,
105
+ EnableRaisingEvents = true ,
60
106
} ;
107
+ _watcher . Changed += OnSettingFileChange ;
61
108
}
62
109
63
110
/// <summary>
@@ -68,7 +115,7 @@ public void Initialize(AgentConfig config)
68
115
/// <summary>
69
116
/// Gets the path to the setting file of the agent.
70
117
/// </summary>
71
- public string SettingFile { private set ; get ; } = null ;
118
+ public string SettingFile { private set ; get ; }
72
119
73
120
/// <summary>
74
121
/// Gets a value indicating whether the agent accepts a specific user action feedback.
@@ -87,7 +134,19 @@ public void OnUserAction(UserActionPayload actionPayload) {}
87
134
/// Refresh the current chat by starting a new chat session.
88
135
/// This method allows an agent to reset chat states, interact with user for authentication, print welcome message, and more.
89
136
/// </summary>
90
- public Task RefreshChatAsync ( IShell shell , bool force ) => Task . CompletedTask ;
137
+ public Task RefreshChatAsync ( IShell shell , bool force )
138
+ {
139
+ if ( force )
140
+ {
141
+ // Reload the setting file if needed.
142
+ ReloadSettings ( ) ;
143
+
144
+ // Reset context
145
+ _request . Context = null ;
146
+ }
147
+
148
+ return Task . CompletedTask ;
149
+ }
91
150
92
151
/// <summary>
93
152
/// Main chat function that takes the users input and passes it to the LLM and renders it.
@@ -100,26 +159,171 @@ public async Task<bool> ChatAsync(string input, IShell shell)
100
159
// Get the shell host
101
160
IHost host = shell . Host ;
102
161
103
- // get the cancellation token
162
+ // Get the cancellation token
104
163
CancellationToken token = shell . CancellationToken ;
105
164
106
- if ( Process . GetProcessesByName ( "ollama" ) . Length is 0 )
165
+ // Reload the setting file if needed.
166
+ ReloadSettings ( ) ;
167
+
168
+ if ( IsLocalHost ( ) . IsMatch ( _client . Uri . Host ) && Process . GetProcessesByName ( "ollama" ) . Length is 0 )
107
169
{
108
- host . RenderFullResponse ( "Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met." ) ;
170
+ host . WriteErrorLine ( "Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met." ) ;
109
171
return false ;
110
172
}
111
173
112
- ResponseData ollamaResponse = await host . RunWithSpinnerAsync (
113
- status : "Thinking ..." ,
114
- func : async context => await _chatService . GetChatResponseAsync ( context , input , token )
115
- ) . ConfigureAwait ( false ) ;
174
+ // Prepare request
175
+ _request . Prompt = input ;
176
+ _request . Model = _settings . Model ;
177
+ _request . Stream = _settings . Stream ;
116
178
117
- if ( ollamaResponse is not null )
179
+ try
118
180
{
119
- // render the content
120
- host . RenderFullResponse ( ollamaResponse . response ) ;
181
+ if ( _request . Stream )
182
+ {
183
+ // Wait for the stream with the spinner running
184
+ var ollamaStreamEnumerator = await host . RunWithSpinnerAsync (
185
+ status : "Thinking ..." ,
186
+ func : async ( ) =>
187
+ {
188
+ // Start generating the stream asynchronously and return an enumerator
189
+ var enumerator = _client . GenerateAsync ( _request , token ) . GetAsyncEnumerator ( token ) ;
190
+ if ( await enumerator . MoveNextAsync ( ) . ConfigureAwait ( false ) )
191
+ {
192
+ return enumerator ;
193
+ }
194
+ return null ;
195
+ }
196
+ ) . ConfigureAwait ( false ) ;
197
+
198
+ if ( ollamaStreamEnumerator is not null )
199
+ {
200
+ using IStreamRender streamingRender = host . NewStreamRender ( token ) ;
201
+
202
+ do
203
+ {
204
+ var currentStream = ollamaStreamEnumerator . Current ;
205
+
206
+ // Update the render with stream response
207
+ streamingRender . Refresh ( currentStream . Response ) ;
208
+
209
+ if ( currentStream . Done )
210
+ {
211
+ // If the stream is complete, update the request context with the last stream context
212
+ var ollamaLastStream = ( GenerateDoneResponseStream ) currentStream ;
213
+ _request . Context = ollamaLastStream . Context ;
214
+ }
215
+ }
216
+ while ( await ollamaStreamEnumerator . MoveNextAsync ( ) . ConfigureAwait ( false ) ) ;
217
+ }
218
+ }
219
+ else
220
+ {
221
+ // Build single response with spinner
222
+ var ollamaResponse = await host . RunWithSpinnerAsync (
223
+ status : "Thinking ..." ,
224
+ func : async ( ) => { return await _client . GenerateAsync ( _request , token ) . StreamToEndAsync ( ) ; }
225
+ ) . ConfigureAwait ( false ) ;
226
+
227
+ // Update request context
228
+ _request . Context = ollamaResponse . Context ;
229
+
230
+ // Render the full response
231
+ host . RenderFullResponse ( ollamaResponse . Response ) ;
232
+ }
121
233
}
122
-
234
+ catch ( OperationCanceledException )
235
+ {
236
+ // Ignore the cancellation exception.
237
+ }
238
+ catch ( HttpRequestException e )
239
+ {
240
+ host . WriteErrorLine ( $ "{ e . Message } ") ;
241
+ host . WriteErrorLine ( $ "Ollama model: \" { _settings . Model } \" ") ;
242
+ host . WriteErrorLine ( $ "Ollama endpoint: \" { _settings . Endpoint } \" ") ;
243
+ host . WriteErrorLine ( $ "Ollama settings: \" { SettingFile } \" ") ;
244
+ }
245
+
123
246
return true ;
124
247
}
248
+
249
+ private void ReloadSettings ( )
250
+ {
251
+ if ( _reloadSettings )
252
+ {
253
+ _reloadSettings = false ;
254
+ var settings = ReadSettings ( ) ;
255
+ if ( settings is null )
256
+ {
257
+ return ;
258
+ }
259
+
260
+ _settings = settings ;
261
+
262
+ // Check if the endpoint has changed
263
+ bool isEndpointChanged = ! string . Equals ( _settings . Endpoint , _client . Uri . OriginalString , StringComparison . OrdinalIgnoreCase ) ;
264
+
265
+ if ( isEndpointChanged )
266
+ {
267
+ // Create a new client with updated endpoint
268
+ _client = new OllamaApiClient ( _settings . Endpoint ) ;
269
+ }
270
+ }
271
+ }
272
+
273
+ private Settings ReadSettings ( )
274
+ {
275
+ Settings settings = null ;
276
+ FileInfo file = new ( SettingFile ) ;
277
+
278
+ if ( file . Exists )
279
+ {
280
+ try
281
+ {
282
+ using var stream = file . OpenRead ( ) ;
283
+ var data = JsonSerializer . Deserialize ( stream , SourceGenerationContext . Default . ConfigData ) ;
284
+ settings = new Settings ( data ) ;
285
+ }
286
+ catch ( Exception e )
287
+ {
288
+ throw new InvalidDataException ( $ "Parsing settings from '{ SettingFile } ' failed with the following error: { e . Message } ", e ) ;
289
+ }
290
+ }
291
+
292
+ return settings ;
293
+ }
294
+
295
+ private void OnSettingFileChange ( object sender , FileSystemEventArgs e )
296
+ {
297
+ if ( e . ChangeType is WatcherChangeTypes . Changed )
298
+ {
299
+ _reloadSettings = true ;
300
+ }
301
+ }
302
+
303
+ private void NewExampleSettingFile ( )
304
+ {
305
+ string SampleContent = """
306
+ {
307
+ // To use Ollama API service:
308
+ // 1. Install Ollama: `winget install Ollama.Ollama`
309
+ // 2. Start Ollama API server: `ollama serve`
310
+ // 3. Install Ollama model: `ollama pull phi3`
311
+
312
+ // Declare Ollama model
313
+ "Model": "phi3",
314
+ // Declare Ollama endpoint
315
+ "Endpoint": "http://localhost:11434",
316
+ // Enable Ollama streaming
317
+ "Stream": false
318
+ }
319
+ """ ;
320
+ File . WriteAllText ( SettingFile , SampleContent , Encoding . UTF8 ) ;
321
+ }
322
+
323
+ /// <summary>
324
+ /// Defines a generated regular expression to match localhost addresses
325
+ /// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity.
326
+ /// </summary>
327
+ [ GeneratedRegex ( "^(localhost|127\\ .0\\ .0\\ .1|\\ [::1\\ ])$" , RegexOptions . IgnoreCase ) ]
328
+ internal partial Regex IsLocalHost ( ) ;
125
329
}
0 commit comments