Skip to content

Commit 3281507

Browse files
brichettrungleduc
andauthored
Do not expose providers api (#84)
* Do not explicitly expose the model in completer, it is useless * Do not expose the models in the provider registry, only a wrapper containing required methods * Add a flag allowing to expose the whole chat model instead of a wrapper * Do not expose the providers list and current provider to avoid being able to modify the 'exposeChatModel' flag * Docstring * Update src/provider.ts Co-authored-by: Duc Trung Le <leductrungxf@gmail.com> --------- Co-authored-by: Duc Trung Le <leductrungxf@gmail.com>
1 parent 98a10de commit 3281507

File tree

13 files changed

+168
-90
lines changed

13 files changed

+168
-90
lines changed

src/base-completer.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,9 @@ import {
22
CompletionHandler,
33
IInlineCompletionContext
44
} from '@jupyterlab/completer';
5-
import { BaseLanguageModel } from '@langchain/core/language_models/base';
65
import { ReadonlyPartialJSONObject } from '@lumino/coreutils';
76

87
export interface IBaseCompleter {
9-
/**
10-
* The LLM completer.
11-
*/
12-
completer: BaseLanguageModel;
13-
148
/**
159
* The completion prompt.
1610
*/

src/chat-handler.ts

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,18 @@ import {
1212
IInputModel,
1313
INewMessage
1414
} from '@jupyter/chat';
15-
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
1615
import {
1716
AIMessage,
1817
HumanMessage,
1918
mergeMessageRuns,
2019
SystemMessage
2120
} from '@langchain/core/messages';
2221
import { UUID } from '@lumino/coreutils';
22+
23+
import { jupyternautLiteIcon } from './icons';
2324
import { chatSystemPrompt } from './provider';
25+
import { AIChatModel } from './types/ai-model';
2426
import { IAIProviderRegistry } from './tokens';
25-
import { jupyternautLiteIcon } from './icons';
2627

2728
/**
2829
* The base64 encoded SVG string of the jupyternaut lite icon.
@@ -52,7 +53,7 @@ export class ChatHandler extends ChatModel {
5253
});
5354
}
5455

55-
get provider(): BaseChatModel | null {
56+
get provider(): AIChatModel | null {
5657
return this._providerRegistry.currentChatModel;
5758
}
5859

@@ -194,10 +195,16 @@ export class ChatHandler extends ChatModel {
194195
}
195196

196197
export namespace ChatHandler {
198+
/**
199+
* The options used to create a chat handler.
200+
*/
197201
export interface IOptions extends ChatModel.IOptions {
198202
providerRegistry: IAIProviderRegistry;
199203
}
200204

205+
/**
206+
* The chat command provider for the chat.
207+
*/
201208
export class ClearCommandProvider implements IChatCommandProvider {
202209
public id: string = '@jupyterlite/ai:clear-commands';
203210
private _slash_commands: ChatCommand[] = [

src/completion-provider.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ import {
44
IInlineCompletionProvider
55
} from '@jupyterlab/completer';
66

7-
import { IBaseCompleter } from './base-completer';
87
import { IAIProviderRegistry } from './tokens';
8+
import { AICompleter } from './types/ai-model';
99

1010
/**
1111
* The generic completion provider to register to the completion provider manager.
@@ -34,7 +34,7 @@ export class CompletionProvider implements IInlineCompletionProvider {
3434
/**
3535
* Get the current completer.
3636
*/
37-
get completer(): IBaseCompleter | null {
37+
get completer(): AICompleter | null {
3838
return this._providerRegistry.currentCompleter;
3939
}
4040

src/default-providers/Anthropic/completer.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ import {
33
IInlineCompletionContext
44
} from '@jupyterlab/completer';
55
import { ChatAnthropic } from '@langchain/anthropic';
6-
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
76
import { AIMessage, SystemMessage } from '@langchain/core/messages';
87

98
import { BaseCompleter, IBaseCompleter } from '../../base-completer';
@@ -14,10 +13,6 @@ export class AnthropicCompleter implements IBaseCompleter {
1413
this._completer = new ChatAnthropic({ ...options.settings });
1514
}
1615

17-
get completer(): BaseChatModel {
18-
return this._completer;
19-
}
20-
2116
/**
2217
* Getter and setter for the initial prompt.
2318
*/

src/default-providers/ChromeAI/completer.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ import {
33
IInlineCompletionContext
44
} from '@jupyterlab/completer';
55
import { ChromeAI } from '@langchain/community/experimental/llms/chrome_ai';
6-
import { LLM } from '@langchain/core/language_models/llms';
76
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
87

98
import { BaseCompleter, IBaseCompleter } from '../../base-completer';
@@ -45,10 +44,6 @@ export class ChromeCompleter implements IBaseCompleter {
4544
this._prompt = value;
4645
}
4746

48-
get completer(): LLM {
49-
return this._completer;
50-
}
51-
5247
async fetch(
5348
request: CompletionHandler.IRequest,
5449
context: IInlineCompletionContext

src/default-providers/MistralAI/completer.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ import {
22
CompletionHandler,
33
IInlineCompletionContext
44
} from '@jupyterlab/completer';
5-
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
65
import {
76
BaseMessage,
87
HumanMessage,
@@ -47,10 +46,6 @@ export class CodestralCompleter implements IBaseCompleter {
4746
);
4847
}
4948

50-
get completer(): BaseChatModel {
51-
return this._completer;
52-
}
53-
5449
/**
5550
* Getter and setter for the initial prompt.
5651
*/

src/default-providers/Ollama/completer.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ import {
22
CompletionHandler,
33
IInlineCompletionContext
44
} from '@jupyterlab/completer';
5-
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
65
import { AIMessage, SystemMessage } from '@langchain/core/messages';
76
import { ChatOllama } from '@langchain/ollama';
87

@@ -12,11 +11,6 @@ import { COMPLETION_SYSTEM_PROMPT } from '../../provider';
1211
export class OllamaCompleter implements IBaseCompleter {
1312
constructor(options: BaseCompleter.IOptions) {
1413
this._completer = new ChatOllama({ ...options.settings });
15-
console.log('Settings', options.settings);
16-
}
17-
18-
get completer(): BaseChatModel {
19-
return this._completer;
2014
}
2115

2216
/**

src/default-providers/OpenAI/completer.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ import {
22
CompletionHandler,
33
IInlineCompletionContext
44
} from '@jupyterlab/completer';
5-
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
65
import { AIMessage, SystemMessage } from '@langchain/core/messages';
76
import { ChatOpenAI } from '@langchain/openai';
87

@@ -14,10 +13,6 @@ export class OpenAICompleter implements IBaseCompleter {
1413
this._completer = new ChatOpenAI({ ...options.settings });
1514
}
1615

17-
get completer(): BaseChatModel {
18-
return this._completer;
19-
}
20-
2116
/**
2217
* Getter and setter for the initial prompt.
2318
*/

src/default-providers/WebLLM/completer.ts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@ import {
44
} from '@jupyterlab/completer';
55
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
66
import { ChatWebLLM } from '@langchain/community/chat_models/webllm';
7+
78
import { BaseCompleter, IBaseCompleter } from '../../base-completer';
89
import { COMPLETION_SYSTEM_PROMPT } from '../../provider';
9-
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
1010

1111
/**
1212
* Regular expression to match the '```' string at the start of a string.
@@ -69,10 +69,6 @@ export class WebLLMCompleter implements IBaseCompleter {
6969
}
7070
}
7171

72-
get completer(): BaseChatModel {
73-
return this._completer;
74-
}
75-
7672
/**
7773
* Getter and setter for the initial prompt.
7874
*/

src/default-providers/index.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,8 @@ const webLLMProviderPlugin: JupyterFrontEndPlugin<void> = {
9898
completer: WebLLMCompleter,
9999
settingsSchema: WebLLMSettings,
100100
instructions: WebLLMInstructions,
101-
compatibilityCheck: webLLMCompatibilityCheck
101+
compatibilityCheck: webLLMCompatibilityCheck,
102+
exposeChatModel: true
102103
});
103104

104105
registry.providerChanged.connect(async (sender, args) => {

0 commit comments

Comments
 (0)