From 3dd9c20374cb42c0bbcac988747df1b192073eb9 Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 10:07:23 +0300 Subject: [PATCH 1/8] added ollama provider --- src/providers/OllamaProvider.ts | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 src/providers/OllamaProvider.ts diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts new file mode 100644 index 0000000..e69de29 From 8bc1c98fce1bea12b63c3de3a369001433aded43 Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 10:20:46 +0300 Subject: [PATCH 2/8] modified --- src/App.tsx | 13 +++++ src/index.tsx | 3 +- src/providers/OllamaProvider.ts | 88 +++++++++++++++++++++++++++++++++ 3 files changed, 103 insertions(+), 1 deletion(-) diff --git a/src/App.tsx b/src/App.tsx index 14f2641..de5c3d2 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -5,6 +5,7 @@ import { LlmConnectorBlock } from './types/LlmConnectorBlock'; import GeminiProvider from './providers/GeminiProvider'; import OpenaiProvider from './providers/OpenaiProvider'; import WebLlmProvider from './providers/WebLlmProvider'; +import OllamaProvider from './providers/OllamaProvider'; // fill in your api keys below if you wish to explore/develop const geminiApiKey = ''; @@ -110,6 +111,18 @@ const App = () => { }, }, } as LlmConnectorBlock, + ollama: { + llmConnector: { + provider: new OllamaProvider({ + model: 'gemma3', + }), + outputType: 'character', + stopConditions: { + onUserMessage: onUserMessageCheck, + onKeyDown: onKeyDownCheck, + }, + }, + } as LlmConnectorBlock, }; return ; diff --git a/src/index.tsx b/src/index.tsx index 83fb37d..6659aa7 100644 --- a/src/index.tsx +++ b/src/index.tsx @@ -5,6 +5,7 @@ import LlmConnector from './factory/RcbPluginFactory'; import GeminiProvider from './providers/GeminiProvider'; import OpenaiProvider from './providers/OpenaiProvider'; import WebLlmProvider from './providers/WebLlmProvider'; +import OllamaProvider from './providers/OllamaProvider'; // type imports import { LlmConnectorBlock } from './types/LlmConnectorBlock'; @@ -12,7 +13,7 @@ import { PluginConfig } from './types/PluginConfig'; import { Provider } from './types/Provider'; // default provider exports -export { GeminiProvider, OpenaiProvider, WebLlmProvider }; +export { GeminiProvider, OpenaiProvider, WebLlmProvider, OllamaProvider }; // type exports export type { LlmConnectorBlock, PluginConfig, Provider }; diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts index e69de29..c00ca2c 100644 --- a/src/providers/OllamaProvider.ts +++ b/src/providers/OllamaProvider.ts @@ -0,0 +1,88 @@ +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; + +interface OllamaProviderConfig { + model: string; + baseUrl?: string; + stream?: boolean; + debug?: boolean; + headers?: Record; +} + +class OllamaProvider implements Provider { + private endpoint: string; + private model: string; + private stream: boolean; + private debug: boolean; + private headers: Record; + + public constructor(config: OllamaProviderConfig) { + this.model = config.model; + this.stream = config.stream ?? true; + this.debug = config.debug ?? false; + this.headers = { + 'Content-Type': 'application/json', + ...config.headers, + }; + this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/generate'; + } + + public async *sendMessages(messages: Message[]): AsyncGenerator { + const prompt = messages + .filter((m) => typeof m.content === 'string') + .map((m) => m.content) + .join('\n'); + + const body = { + model: this.model, + prompt, + stream: this.stream, + }; + + if (this.debug) { + console.log('[OllamaProvider] Request:', { + endpoint: this.endpoint, + headers: this.headers, + body, + }); + } + + const res = await fetch(this.endpoint, { + method: 'POST', + headers: this.headers, + body: JSON.stringify(body), + }); + + if (!res.ok) { + throw new Error(`Ollama API error ${res.status}: ${await res.text()}`); + } + + if (this.stream) { + if (!res.body) throw new Error('No response body for streaming'); + const reader = res.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + while (true) { + const { value, done } = await reader.read(); + if (done) break; + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split('\n'); + buffer = lines.pop()!; + for (const line of lines) { + if (!line.trim()) continue; + try { + const data = JSON.parse(line); + if (data.response) yield data.response; + } catch (e) { + if (this.debug) console.error('Ollama stream parse error:', line, e); + } + } + } + } else { + const data = await res.json(); + if (data.response) yield data.response; + } + } +} + +export default OllamaProvider; From 69189a024f50bc4e4cd49e5767bc485b241ffe55 Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 10:24:32 +0300 Subject: [PATCH 3/8] added dist --- dist/App.d.ts | 3 + dist/App.d.ts.map | 1 + dist/constants/DefaultPluginConfig.d.ts | 6 + dist/constants/DefaultPluginConfig.d.ts.map | 1 + dist/core/useRcbPlugin.d.ts | 10 + dist/core/useRcbPlugin.d.ts.map | 1 + dist/development.d.ts | 2 + dist/development.d.ts.map | 1 + dist/factory/RcbPluginFactory.d.ts | 13 + dist/factory/RcbPluginFactory.d.ts.map | 1 + dist/hooks/useChangePath.d.ts | 11 + dist/hooks/useChangePath.d.ts.map | 1 + dist/hooks/useMessageHandler.d.ts | 32 ++ dist/hooks/useMessageHandler.d.ts.map | 1 + dist/hooks/useProcessBlock.d.ts | 32 ++ dist/hooks/useProcessBlock.d.ts.map | 1 + dist/index.cjs | 4 + dist/index.d.ts | 12 + dist/index.d.ts.map | 1 + dist/index.js | 509 ++++++++++++++++++ dist/providers/GeminiProvider.d.ts | 49 ++ dist/providers/GeminiProvider.d.ts.map | 1 + dist/providers/OllamaProvider.d.ts | 20 + dist/providers/OllamaProvider.d.ts.map | 1 + dist/providers/OpenaiProvider.d.ts | 49 ++ dist/providers/OpenaiProvider.d.ts.map | 1 + dist/providers/WebLlmProvider.d.ts | 47 ++ dist/providers/WebLlmProvider.d.ts.map | 1 + dist/tsconfig.tsbuildinfo | 1 + dist/types/LlmConnectorBlock.d.ts | 20 + dist/types/LlmConnectorBlock.d.ts.map | 1 + dist/types/PluginConfig.d.ts | 8 + dist/types/PluginConfig.d.ts.map | 1 + dist/types/Provider.d.ts | 13 + dist/types/Provider.d.ts.map | 1 + .../provider-config/GeminiProviderConfig.d.ts | 39 ++ .../GeminiProviderConfig.d.ts.map | 1 + .../provider-config/OpenaiProviderConfig.d.ts | 39 ++ .../OpenaiProviderConfig.d.ts.map | 1 + .../provider-config/WebLlmProviderConfig.d.ts | 17 + .../WebLlmProviderConfig.d.ts.map | 1 + .../GeminiProviderMessage.d.ts | 9 + .../GeminiProviderMessage.d.ts.map | 1 + .../OpenaiProviderMessage.d.ts | 9 + .../OpenaiProviderMessage.d.ts.map | 1 + .../WebLlmProviderMessage.d.ts | 9 + .../WebLlmProviderMessage.d.ts.map | 1 + dist/utils/promptHandler.d.ts | 36 ++ dist/utils/promptHandler.d.ts.map | 1 + dist/utils/streamController.d.ts | 10 + dist/utils/streamController.d.ts.map | 1 + 51 files changed, 1033 insertions(+) create mode 100644 dist/App.d.ts create mode 100644 dist/App.d.ts.map create mode 100644 dist/constants/DefaultPluginConfig.d.ts create mode 100644 dist/constants/DefaultPluginConfig.d.ts.map create mode 100644 dist/core/useRcbPlugin.d.ts create mode 100644 dist/core/useRcbPlugin.d.ts.map create mode 100644 dist/development.d.ts create mode 100644 dist/development.d.ts.map create mode 100644 dist/factory/RcbPluginFactory.d.ts create mode 100644 dist/factory/RcbPluginFactory.d.ts.map create mode 100644 dist/hooks/useChangePath.d.ts create mode 100644 dist/hooks/useChangePath.d.ts.map create mode 100644 dist/hooks/useMessageHandler.d.ts create mode 100644 dist/hooks/useMessageHandler.d.ts.map create mode 100644 dist/hooks/useProcessBlock.d.ts create mode 100644 dist/hooks/useProcessBlock.d.ts.map create mode 100644 dist/index.cjs create mode 100644 dist/index.d.ts create mode 100644 dist/index.d.ts.map create mode 100644 dist/index.js create mode 100644 dist/providers/GeminiProvider.d.ts create mode 100644 dist/providers/GeminiProvider.d.ts.map create mode 100644 dist/providers/OllamaProvider.d.ts create mode 100644 dist/providers/OllamaProvider.d.ts.map create mode 100644 dist/providers/OpenaiProvider.d.ts create mode 100644 dist/providers/OpenaiProvider.d.ts.map create mode 100644 dist/providers/WebLlmProvider.d.ts create mode 100644 dist/providers/WebLlmProvider.d.ts.map create mode 100644 dist/tsconfig.tsbuildinfo create mode 100644 dist/types/LlmConnectorBlock.d.ts create mode 100644 dist/types/LlmConnectorBlock.d.ts.map create mode 100644 dist/types/PluginConfig.d.ts create mode 100644 dist/types/PluginConfig.d.ts.map create mode 100644 dist/types/Provider.d.ts create mode 100644 dist/types/Provider.d.ts.map create mode 100644 dist/types/provider-config/GeminiProviderConfig.d.ts create mode 100644 dist/types/provider-config/GeminiProviderConfig.d.ts.map create mode 100644 dist/types/provider-config/OpenaiProviderConfig.d.ts create mode 100644 dist/types/provider-config/OpenaiProviderConfig.d.ts.map create mode 100644 dist/types/provider-config/WebLlmProviderConfig.d.ts create mode 100644 dist/types/provider-config/WebLlmProviderConfig.d.ts.map create mode 100644 dist/types/provider-message/GeminiProviderMessage.d.ts create mode 100644 dist/types/provider-message/GeminiProviderMessage.d.ts.map create mode 100644 dist/types/provider-message/OpenaiProviderMessage.d.ts create mode 100644 dist/types/provider-message/OpenaiProviderMessage.d.ts.map create mode 100644 dist/types/provider-message/WebLlmProviderMessage.d.ts create mode 100644 dist/types/provider-message/WebLlmProviderMessage.d.ts.map create mode 100644 dist/utils/promptHandler.d.ts create mode 100644 dist/utils/promptHandler.d.ts.map create mode 100644 dist/utils/streamController.d.ts create mode 100644 dist/utils/streamController.d.ts.map diff --git a/dist/App.d.ts b/dist/App.d.ts new file mode 100644 index 0000000..c28a238 --- /dev/null +++ b/dist/App.d.ts @@ -0,0 +1,3 @@ +declare const App: () => import("react/jsx-runtime").JSX.Element; +export default App; +//# sourceMappingURL=App.d.ts.map \ No newline at end of file diff --git a/dist/App.d.ts.map b/dist/App.d.ts.map new file mode 100644 index 0000000..5357fef --- /dev/null +++ b/dist/App.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"App.d.ts","sourceRoot":"","sources":["../src/App.tsx"],"names":[],"mappings":"AAaA,QAAA,MAAM,GAAG,+CAmHR,CAAC;AAEF,eAAe,GAAG,CAAC"} \ No newline at end of file diff --git a/dist/constants/DefaultPluginConfig.d.ts b/dist/constants/DefaultPluginConfig.d.ts new file mode 100644 index 0000000..a7a4ed8 --- /dev/null +++ b/dist/constants/DefaultPluginConfig.d.ts @@ -0,0 +1,6 @@ +import { PluginConfig } from '../types/PluginConfig'; +/** + * Default values for plugin config. + */ +export declare const DefaultPluginConfig: PluginConfig; +//# sourceMappingURL=DefaultPluginConfig.d.ts.map \ No newline at end of file diff --git a/dist/constants/DefaultPluginConfig.d.ts.map b/dist/constants/DefaultPluginConfig.d.ts.map new file mode 100644 index 0000000..337e4ef --- /dev/null +++ b/dist/constants/DefaultPluginConfig.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"DefaultPluginConfig.d.ts","sourceRoot":"","sources":["../../src/constants/DefaultPluginConfig.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD;;GAEG;AACH,eAAO,MAAM,mBAAmB,EAAE,YAEjC,CAAC"} \ No newline at end of file diff --git a/dist/core/useRcbPlugin.d.ts b/dist/core/useRcbPlugin.d.ts new file mode 100644 index 0000000..8452738 --- /dev/null +++ b/dist/core/useRcbPlugin.d.ts @@ -0,0 +1,10 @@ +import { Plugin } from 'react-chatbotify'; +import { PluginConfig } from '../types/PluginConfig'; +/** + * Plugin hook that handles all the core logic. + * + * @param pluginConfig configurations for the plugin + */ +declare const useRcbPlugin: (pluginConfig?: PluginConfig) => ReturnType; +export default useRcbPlugin; +//# sourceMappingURL=useRcbPlugin.d.ts.map \ No newline at end of file diff --git a/dist/core/useRcbPlugin.d.ts.map b/dist/core/useRcbPlugin.d.ts.map new file mode 100644 index 0000000..4aa0bc1 --- /dev/null +++ b/dist/core/useRcbPlugin.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"useRcbPlugin.d.ts","sourceRoot":"","sources":["../../src/core/useRcbPlugin.tsx"],"names":[],"mappings":"AACA,OAAO,EAEN,MAAM,EAON,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAOrD;;;;GAIG;AACH,QAAA,MAAM,YAAY,GAAI,eAAe,YAAY,KAAG,UAAU,CAAC,MAAM,CAsFpE,CAAC;AAEF,eAAe,YAAY,CAAC"} \ No newline at end of file diff --git a/dist/development.d.ts b/dist/development.d.ts new file mode 100644 index 0000000..de2c7d6 --- /dev/null +++ b/dist/development.d.ts @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=development.d.ts.map \ No newline at end of file diff --git a/dist/development.d.ts.map b/dist/development.d.ts.map new file mode 100644 index 0000000..dd3b27b --- /dev/null +++ b/dist/development.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"development.d.ts","sourceRoot":"","sources":["../src/development.tsx"],"names":[],"mappings":""} \ No newline at end of file diff --git a/dist/factory/RcbPluginFactory.d.ts b/dist/factory/RcbPluginFactory.d.ts new file mode 100644 index 0000000..0887cb6 --- /dev/null +++ b/dist/factory/RcbPluginFactory.d.ts @@ -0,0 +1,13 @@ +import { PluginConfig } from '../types/PluginConfig'; +/** + * Factory that prepares the plugin hook to be consumed by the core library. + * + * @param pluginConfig configurations for the plugin + */ +declare const RcbPluginFactory: (pluginConfig?: PluginConfig) => () => { + name: string; + settings?: import("react-chatbotify").Settings; + styles?: import("react-chatbotify").Styles; +}; +export default RcbPluginFactory; +//# sourceMappingURL=RcbPluginFactory.d.ts.map \ No newline at end of file diff --git a/dist/factory/RcbPluginFactory.d.ts.map b/dist/factory/RcbPluginFactory.d.ts.map new file mode 100644 index 0000000..530d07f --- /dev/null +++ b/dist/factory/RcbPluginFactory.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"RcbPluginFactory.d.ts","sourceRoot":"","sources":["../../src/factory/RcbPluginFactory.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD;;;;GAIG;AACH,QAAA,MAAM,gBAAgB,GAAI,eAAe,YAAY;;;;CAOpD,CAAC;AAEF,eAAe,gBAAgB,CAAC"} \ No newline at end of file diff --git a/dist/hooks/useChangePath.d.ts b/dist/hooks/useChangePath.d.ts new file mode 100644 index 0000000..6c4e47d --- /dev/null +++ b/dist/hooks/useChangePath.d.ts @@ -0,0 +1,11 @@ +import { Flow } from 'react-chatbotify'; +import { LlmConnectorBlock } from '../types/LlmConnectorBlock'; +/** + * Handles changing of conversation path (block). + * + * @param getFlow flow of the chatbot + * @param setConnectorBlockFields sets all fields required for llm connector block + */ +declare const useChangePath: (getFlow: () => Flow, setConnectorBlockFields: (block: LlmConnectorBlock) => void) => void; +export { useChangePath }; +//# sourceMappingURL=useChangePath.d.ts.map \ No newline at end of file diff --git a/dist/hooks/useChangePath.d.ts.map b/dist/hooks/useChangePath.d.ts.map new file mode 100644 index 0000000..b38f5d5 --- /dev/null +++ b/dist/hooks/useChangePath.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"useChangePath.d.ts","sourceRoot":"","sources":["../../src/hooks/useChangePath.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,IAAI,EAA+C,MAAM,kBAAkB,CAAC;AAErF,OAAO,EAAE,iBAAiB,EAAE,MAAM,4BAA4B,CAAC;AAE/D;;;;;GAKG;AACH,QAAA,MAAM,aAAa,GAAI,SAAS,MAAM,IAAI,EAAE,yBAAyB,CAAC,KAAK,EAAE,iBAAiB,KAAK,IAAI,SAoBtG,CAAC;AAEF,OAAO,EAAE,aAAa,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/hooks/useMessageHandler.d.ts b/dist/hooks/useMessageHandler.d.ts new file mode 100644 index 0000000..051bffc --- /dev/null +++ b/dist/hooks/useMessageHandler.d.ts @@ -0,0 +1,32 @@ +import { Message } from 'react-chatbotify'; +import { Provider } from '../types/Provider'; +/** + * Handles message events. + * + * @param refs object containing relevant refs + * @param actions object containing relevant actions + */ +declare const useMessageHandler: (refs: { + providerRef: React.MutableRefObject; + messagesRef: React.MutableRefObject; + outputTypeRef: React.MutableRefObject<"character" | "chunk" | "full">; + outputSpeedRef: React.MutableRefObject; + historySizeRef: React.MutableRefObject; + initialMessageRef: React.MutableRefObject; + errorMessageRef: React.MutableRefObject; + onUserMessageRef: React.MutableRefObject<((msg: Message) => Promise) | null>; + onKeyDownRef: React.MutableRefObject<((e: KeyboardEvent) => Promise) | null>; +}, actions: { + speakAudio: (text: string) => void; + injectMessage: (content: string | JSX.Element, sender?: string) => Promise; + simulateStreamMessage: (content: string, sender?: string) => Promise; + streamMessage: (msg: string) => void; + endStreamMessage: () => void; + toggleTextAreaDisabled: (active?: boolean) => void; + toggleIsBotTyping: (active?: boolean) => void; + focusTextArea: () => void; + goToPath: (path: string) => void; + getIsChatBotVisible: () => boolean; +}) => void; +export { useMessageHandler }; +//# sourceMappingURL=useMessageHandler.d.ts.map \ No newline at end of file diff --git a/dist/hooks/useMessageHandler.d.ts.map b/dist/hooks/useMessageHandler.d.ts.map new file mode 100644 index 0000000..1166604 --- /dev/null +++ b/dist/hooks/useMessageHandler.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"useMessageHandler.d.ts","sourceRoot":"","sources":["../../src/hooks/useMessageHandler.ts"],"names":[],"mappings":"AACA,OAAO,EAIN,OAAO,EAGP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAM7C;;;;;GAKG;AACH,QAAA,MAAM,iBAAiB,GACtB,MAAM;IACL,WAAW,EAAE,KAAK,CAAC,gBAAgB,CAAC,QAAQ,GAAG,IAAI,CAAC,CAAC;IACrD,WAAW,EAAE,KAAK,CAAC,gBAAgB,CAAC,OAAO,EAAE,CAAC,CAAC;IAC/C,aAAa,EAAE,KAAK,CAAC,gBAAgB,CAAC,WAAW,GAAG,OAAO,GAAG,MAAM,CAAC,CAAC;IACtE,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAC/C,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAC/C,iBAAiB,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAClD,eAAe,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAChD,gBAAgB,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC5F,YAAY,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAAE,aAAa,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;CAC5F,EACD,SAAS;IACR,UAAU,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC3F,qBAAqB,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IACrF,aAAa,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,sBAAsB,EAAE,CAAC,MAAM,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;IACnD,iBAAiB,EAAE,CAAC,MAAM,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;IAC9C,aAAa,EAAE,MAAM,IAAI,CAAC;IAC1B,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACjC,mBAAmB,EAAE,MAAM,OAAO,CAAC;CACnC,SA8FD,CAAC;AAEF,OAAO,EAAE,iBAAiB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/hooks/useProcessBlock.d.ts b/dist/hooks/useProcessBlock.d.ts new file mode 100644 index 0000000..be109e1 --- /dev/null +++ b/dist/hooks/useProcessBlock.d.ts @@ -0,0 +1,32 @@ +import { Message } from 'react-chatbotify'; +import { Provider } from '../types/Provider'; +/** + * Handles pre-processing and post-processing of blocks. + * + * @param refs object containing relevant refs + * @param actions object containing relevant actions + */ +declare const useProcessBlock: (refs: { + providerRef: React.MutableRefObject; + messagesRef: React.MutableRefObject; + outputTypeRef: React.MutableRefObject<"character" | "chunk" | "full">; + outputSpeedRef: React.MutableRefObject; + historySizeRef: React.MutableRefObject; + initialMessageRef: React.MutableRefObject; + errorMessageRef: React.MutableRefObject; + onUserMessageRef: React.MutableRefObject<((msg: Message) => Promise) | null>; + onKeyDownRef: React.MutableRefObject<((e: KeyboardEvent) => Promise) | null>; +}, actions: { + speakAudio: (text: string) => void; + injectMessage: (content: string | JSX.Element, sender?: string) => Promise; + simulateStreamMessage: (content: string, sender?: string) => Promise; + streamMessage: (msg: string) => void; + endStreamMessage: () => void; + toggleTextAreaDisabled: (active?: boolean) => void; + toggleIsBotTyping: (active?: boolean) => void; + focusTextArea: () => void; + goToPath: (path: string) => void; + getIsChatBotVisible: () => boolean; +}) => void; +export { useProcessBlock }; +//# sourceMappingURL=useProcessBlock.d.ts.map \ No newline at end of file diff --git a/dist/hooks/useProcessBlock.d.ts.map b/dist/hooks/useProcessBlock.d.ts.map new file mode 100644 index 0000000..b06eb7b --- /dev/null +++ b/dist/hooks/useProcessBlock.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"useProcessBlock.d.ts","sourceRoot":"","sources":["../../src/hooks/useProcessBlock.ts"],"names":[],"mappings":"AACA,OAAO,EAAqD,OAAO,EAA2B,MAAM,kBAAkB,CAAC;AAEvH,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAE7C;;;;;GAKG;AACH,QAAA,MAAM,eAAe,GACpB,MAAM;IACL,WAAW,EAAE,KAAK,CAAC,gBAAgB,CAAC,QAAQ,GAAG,IAAI,CAAC,CAAC;IACrD,WAAW,EAAE,KAAK,CAAC,gBAAgB,CAAC,OAAO,EAAE,CAAC,CAAC;IAC/C,aAAa,EAAE,KAAK,CAAC,gBAAgB,CAAC,WAAW,GAAG,OAAO,GAAG,MAAM,CAAC,CAAC;IACtE,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAC/C,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAC/C,iBAAiB,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAClD,eAAe,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAChD,gBAAgB,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC5F,YAAY,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAAE,aAAa,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;CAC5F,EACD,SAAS;IACR,UAAU,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC3F,qBAAqB,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IACrF,aAAa,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,sBAAsB,EAAE,CAAC,MAAM,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;IACnD,iBAAiB,EAAE,CAAC,MAAM,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;IAC9C,aAAa,EAAE,MAAM,IAAI,CAAC;IAC1B,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACjC,mBAAmB,EAAE,MAAM,OAAO,CAAC;CACnC,SAuDD,CAAC;AACF,OAAO,EAAE,eAAe,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/index.cjs b/dist/index.cjs new file mode 100644 index 0000000..941caef --- /dev/null +++ b/dist/index.cjs @@ -0,0 +1,4 @@ +"use strict";var $=Object.create;var _=Object.defineProperty;var G=Object.getOwnPropertyDescriptor;var L=Object.getOwnPropertyNames;var N=Object.getPrototypeOf,z=Object.prototype.hasOwnProperty;var K=(n,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let o of L(e))!z.call(n,o)&&o!==t&&_(n,o,{get:()=>e[o],enumerable:!(s=G(e,o))||s.enumerable});return n};var J=(n,e,t)=>(t=n!=null?$(N(n)):{},K(e||!n||!n.__esModule?_(t,"default",{value:n,enumerable:!0}):t,n));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),m=require("react-chatbotify"),q={autoConfig:!0},H=(n,e)=>{const t=g.useCallback(s=>{const r=n()[s.data.nextPath];e(r)},[n,e]);m.useOnRcbEvent(m.RcbEvent.CHANGE_PATH,t)},Y=(n,e)=>{const{outputTypeRef:t}=n,{toggleTextAreaDisabled:s,toggleIsBotTyping:o,focusTextArea:r,injectMessage:a,simulateStreamMessage:i,getIsChatBotVisible:c}=e,l=g.useCallback(d=>{var u;const h=d.data.block;h.llmConnector&&(d.preventDefault(),d.type==="rcb-pre-process-block"&&((u=h.llmConnector)!=null&&u.initialMessage&&(t.current==="full"?a(n.initialMessageRef.current):i(n.initialMessageRef.current)),o(!1),s(!1),setTimeout(()=>{c()&&r()})))},[o,s,r,c]);m.useOnRcbEvent(m.RcbEvent.PRE_PROCESS_BLOCK,l),m.useOnRcbEvent(m.RcbEvent.POST_PROCESS_BLOCK,l)},V=async function*(n,e){for await(const t of n)for(const s of t)yield s,await new Promise(o=>setTimeout(o,e))},Q=async function*(n,e){for await(const t of n)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(n,e,t){e==="character"?yield*V(n,t):yield*Q(n,t)},Z=async function*(n,e){for await(const t of n)e(t),yield t},ee=async(n,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:o,toggleIsBotTyping:r,toggleTextAreaDisabled:a,focusTextArea:i,injectMessage:c,streamMessage:l,endStreamMessage:d,getIsChatBotVisible:h}=t,u=e.providerRef.current.sendMessages(n),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let p="";for await(const f of u){if((R=s.signal)!=null&&R.aborted)break;p+=f}r(!1),c(p),setTimeout(()=>{a(!1),h()&&i()})}else{const p=X(Z(u,o),b,y);let f="",S=!1;for await(const E of p){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=E,l(f)}d(),setTimeout(()=>{a(!1),h()&&i()})}},te=500,se=(n,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:o,onKeyDownRef:r,errorMessageRef:a}=n,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:l,toggleIsBotTyping:d,goToPath:h,focusTextArea:u,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!n.providerRef.current)return;const p=M.data.message,f=p.sender.toUpperCase();p.tags=p.tags??[],p.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(d(!0),l(!0),setTimeout(async()=>{var T;if(o.current){const P=await o.current(p);if(P)return(T=y.current)==null||T.abort(),y.current=null,h(P)}const S=n.historySizeRef.current,E=t.current,v=S?[...E.slice(-(S-1)),p]:[p],C=new AbortController;y.current=C,ee(v,n,e,{signal:C.signal}).catch(P=>{d(!1),l(!1),setTimeout(()=>{b()&&u()}),console.error("LLM prompt failed",P),s.current==="full"?i(a.current):c(a.current)})},te))},[n,e]);m.useOnRcbEvent(m.RcbEvent.POST_INJECT_MESSAGE,R),m.useOnRcbEvent(m.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),m.useOnRcbEvent(m.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async p=>{var f;if(r.current){const S=await r.current(p);S&&((f=y.current)==null||f.abort(),y.current=null,h(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=n=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),o=g.useRef(30),r=g.useRef(0),a=g.useRef(""),i=g.useRef("Unable to get response, please try again."),c=g.useRef(null),l=g.useRef(null),{getFlow:d}=m.useFlow(),{speakAudio:h}=m.useAudio(),{messages:u,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=m.useMessages(),{goToPath:p}=m.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=m.useTextArea(),{toggleIsBotTyping:E,getIsChatBotVisible:v}=m.useChatWindow(),C={...q,...n??{}};g.useEffect(()=>{e.current=u},[u]),H(d,w=>{var x,A,k,B,U,I,j,D,F,W;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",o.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,a.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((I=w.llmConnector)==null?void 0:I.errorMessage)??"Unable to get response, please try again.",c.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onUserMessage)??null,l.current=((W=(F=w.llmConnector)==null?void 0:F.stopConditions)==null?void 0:W.onKeyDown)??null});const T={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:o,historySizeRef:r,initialMessageRef:a,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:l},P={speakAudio:h,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:E,focusTextArea:S,goToPath:p,getIsChatBotVisible:v};Y(T,P),se(T,P);const O={name:"@rcb-plugins/llm-connector"};return C!=null&&C.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=n=>()=>re(n);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let o;return this.messageParser?o=this.messageParser(s):o=s.filter(a=>typeof a.content=="string"&&a.sender.toUpperCase()!=="SYSTEM").map(a=>{const i=this.roleMap(a.sender.toUpperCase()),c=a.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(o=[{role:"user",parts:[{text:this.systemMessage}]},...o]),{contents:o,...this.body}},this.handleStreamResponse=async function*(s){var a,i,c,l,d;const o=new TextDecoder("utf-8");let r="";for(;;){const{value:h,done:u}=await s.read();if(u)break;r+=o.decode(h,{stream:!0});const b=r.split(` +`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(d=(l=(c=(i=(a=JSON.parse(M).candidates)==null?void 0:a[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:l[0])==null?void 0:d.text;f&&(yield f)}catch(p){console.error("SSE JSON parse error:",M,p)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,o,r,a,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),l={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:l,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const l of this.handleStreamResponse(c))yield l}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const l=(i=(a=(r=(o=(s=c.candidates)==null?void 0:s[0])==null?void 0:o.content)==null?void 0:r.parts)==null?void 0:a[0])==null?void 0:i.text;if(typeof l=="string")yield l;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const a=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:a,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,a,i;const s=new TextDecoder("utf-8");let o="";for(;;){const{value:c,done:l}=await t.read();if(l)break;o+=s.decode(c,{stream:!0});const d=o.split(/\r?\n/);o=d.pop();for(const h of d){if(!h.startsWith("data: "))continue;const u=h.slice(6).trim();if(u==="[DONE]")return;try{const y=(i=(a=(r=JSON.parse(u).choices)==null?void 0:r[0])==null?void 0:a.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,o,r;if(this.debug){const a={...this.headers};delete a.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:a,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const a=t.body.getReader();for await(const i of this.handleStreamResponse(a))yield i}else{const a=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",a);const i=(r=(o=(s=a.choices)==null?void 0:s[0])==null?void 0:o.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const a=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:a,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,o,r,a,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const l of t){const d=(r=(o=l.choices[0])==null?void 0:o.delta)==null?void 0:r.content;d&&(yield d)}else(c=(i=(a=t==null?void 0:t.choices)==null?void 0:a[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){this.model=e.model,this.stream=e.stream??!0,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",...e.headers},this.endpoint=e.baseUrl??"http://localhost:11434/api/generate"}async*sendMessages(e){const t=e.filter(r=>typeof r.content=="string").map(r=>r.content).join(` +`),s={model:this.model,prompt:t,stream:this.stream};this.debug&&console.log("[OllamaProvider] Request:",{endpoint:this.endpoint,headers:this.headers,body:s});const o=await fetch(this.endpoint,{method:"POST",headers:this.headers,body:JSON.stringify(s)});if(!o.ok)throw new Error(`Ollama API error ${o.status}: ${await o.text()}`);if(this.stream){if(!o.body)throw new Error("No response body for streaming");const r=o.body.getReader(),a=new TextDecoder;let i="";for(;;){const{value:c,done:l}=await r.read();if(l)break;i+=a.decode(c,{stream:!0});const d=i.split(` +`);i=d.pop();for(const h of d)if(h.trim())try{const u=JSON.parse(h);u.response&&(yield u.response)}catch(u){this.debug&&console.error("Ollama stream parse error:",h,u)}}}else{const r=await o.json();r.response&&(yield r.response)}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; diff --git a/dist/index.d.ts b/dist/index.d.ts new file mode 100644 index 0000000..e0dde96 --- /dev/null +++ b/dist/index.d.ts @@ -0,0 +1,12 @@ +import LlmConnector from './factory/RcbPluginFactory'; +import GeminiProvider from './providers/GeminiProvider'; +import OpenaiProvider from './providers/OpenaiProvider'; +import WebLlmProvider from './providers/WebLlmProvider'; +import OllamaProvider from './providers/OllamaProvider'; +import { LlmConnectorBlock } from './types/LlmConnectorBlock'; +import { PluginConfig } from './types/PluginConfig'; +import { Provider } from './types/Provider'; +export { GeminiProvider, OpenaiProvider, WebLlmProvider, OllamaProvider }; +export type { LlmConnectorBlock, PluginConfig, Provider }; +export default LlmConnector; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/dist/index.d.ts.map b/dist/index.d.ts.map new file mode 100644 index 0000000..ea86f11 --- /dev/null +++ b/dist/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.tsx"],"names":[],"mappings":"AACA,OAAO,YAAY,MAAM,4BAA4B,CAAC;AAGtD,OAAO,cAAc,MAAM,4BAA4B,CAAC;AACxD,OAAO,cAAc,MAAM,4BAA4B,CAAC;AACxD,OAAO,cAAc,MAAM,4BAA4B,CAAC;AACxD,OAAO,cAAc,MAAM,4BAA4B,CAAC;AAGxD,OAAO,EAAE,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AAC9D,OAAO,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AACpD,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAG5C,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,cAAc,EAAE,cAAc,EAAE,CAAC;AAG1E,YAAY,EAAE,iBAAiB,EAAE,YAAY,EAAE,QAAQ,EAAE,CAAC;AAG1D,eAAe,YAAY,CAAC"} \ No newline at end of file diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 0000000..f4b75c1 --- /dev/null +++ b/dist/index.js @@ -0,0 +1,509 @@ +import { useCallback as O, useRef as w, useEffect as N } from "react"; +import { useOnRcbEvent as C, RcbEvent as T, useFlow as G, useAudio as L, useMessages as z, usePaths as K, useTextArea as J, useChatWindow as H } from "react-chatbotify"; +const Y = { + autoConfig: !0 +}, q = (i, e) => { + const t = O( + (o) => { + const s = i()[o.data.nextPath]; + e(s); + }, + [i, e] + ); + C(T.CHANGE_PATH, t); +}, V = (i, e) => { + const { outputTypeRef: t } = i, { + toggleTextAreaDisabled: o, + toggleIsBotTyping: r, + focusTextArea: s, + injectMessage: n, + simulateStreamMessage: a, + getIsChatBotVisible: c + } = e, l = O( + (d) => { + var h; + const u = d.data.block; + u.llmConnector && (d.preventDefault(), d.type === "rcb-pre-process-block" && ((h = u.llmConnector) != null && h.initialMessage && (t.current === "full" ? n(i.initialMessageRef.current) : a(i.initialMessageRef.current)), r(!1), o(!1), setTimeout(() => { + c() && s(); + }))); + }, + [r, o, s, c] + ); + C(T.PRE_PROCESS_BLOCK, l), C(T.POST_PROCESS_BLOCK, l); +}, Q = async function* (i, e) { + for await (const t of i) + for (const o of t) + yield o, await new Promise((r) => setTimeout(r, e)); +}, X = async function* (i, e) { + for await (const t of i) + yield t, await new Promise((o) => setTimeout(o, e)); +}, Z = async function* (i, e, t) { + e === "character" ? yield* Q(i, t) : yield* X(i, t); +}, ee = async function* (i, e) { + for await (const t of i) + e(t), yield t; +}, te = async (i, e, t, o = {}) => { + var b, y; + if (!e.providerRef.current) + return; + const { + speakAudio: r, + toggleIsBotTyping: s, + toggleTextAreaDisabled: n, + focusTextArea: a, + injectMessage: c, + streamMessage: l, + endStreamMessage: d, + getIsChatBotVisible: u + } = t, h = e.providerRef.current.sendMessages(i), g = e.outputTypeRef.current, f = e.outputSpeedRef.current; + if (g === "full") { + let p = ""; + for await (const m of h) { + if ((b = o.signal) != null && b.aborted) break; + p += m; + } + s(!1), c(p), setTimeout(() => { + n(!1), u() && a(); + }); + } else { + const p = Z(ee(h, r), g, f); + let m = "", M = !1; + for await (const E of p) { + if ((y = o.signal) != null && y.aborted) + break; + M || (s(!1), M = !0), m += E, l(m); + } + d(), setTimeout(() => { + n(!1), u() && a(); + }); + } +}, se = 500, oe = (i, e) => { + const { messagesRef: t, outputTypeRef: o, onUserMessageRef: r, onKeyDownRef: s, errorMessageRef: n } = i, { + injectMessage: a, + simulateStreamMessage: c, + toggleTextAreaDisabled: l, + toggleIsBotTyping: d, + goToPath: u, + focusTextArea: h, + getIsChatBotVisible: g + } = e, f = w(null), b = O( + (y) => { + if (!i.providerRef.current) + return; + const p = y.data.message, m = p.sender.toUpperCase(); + p.tags = p.tags ?? [], p.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (d(!0), l(!0), setTimeout(async () => { + var v; + if (r.current) { + const R = await r.current(p); + if (R) + return (v = f.current) == null || v.abort(), f.current = null, u(R); + } + const M = i.historySizeRef.current, E = t.current, x = M ? [...E.slice(-(M - 1)), p] : [p], P = new AbortController(); + f.current = P, te(x, i, e, { signal: P.signal }).catch((R) => { + d(!1), l(!1), setTimeout(() => { + g() && h(); + }), console.error("LLM prompt failed", R), o.current === "full" ? a(n.current) : c(n.current); + }); + }, se)); + }, + [i, e] + ); + C(T.POST_INJECT_MESSAGE, b), C(T.STOP_SIMULATE_STREAM_MESSAGE, b), C(T.STOP_STREAM_MESSAGE, b), N(() => { + const y = async (p) => { + var m; + if (s.current) { + const M = await s.current(p); + M && ((m = f.current) == null || m.abort(), f.current = null, u(M)); + } + }; + return window.addEventListener("keydown", y), () => window.removeEventListener("keydown", y); + }, []); +}, re = (i) => { + const e = w([]), t = w(null), o = w("chunk"), r = w(30), s = w(0), n = w(""), a = w("Unable to get response, please try again."), c = w(null), l = w(null), { getFlow: d } = G(), { speakAudio: u } = L(), { messages: h, injectMessage: g, simulateStreamMessage: f, streamMessage: b, endStreamMessage: y } = z(), { goToPath: p } = K(), { toggleTextAreaDisabled: m, focusTextArea: M } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = H(), P = { ...Y, ...i ?? {} }; + N(() => { + e.current = h; + }, [h]), q(d, (S) => { + var k, B, U, I, D, F, j, $, W, _; + t.current = ((k = S.llmConnector) == null ? void 0 : k.provider) ?? null, o.current = ((B = S.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", r.current = ((U = S.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, s.current = ((I = S.llmConnector) == null ? void 0 : I.historySize) ?? 0, n.current = ((D = S.llmConnector) == null ? void 0 : D.initialMessage) ?? "", a.current = ((F = S.llmConnector) == null ? void 0 : F.errorMessage) ?? "Unable to get response, please try again.", c.current = (($ = (j = S.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : $.onUserMessage) ?? null, l.current = ((_ = (W = S.llmConnector) == null ? void 0 : W.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; + }); + const v = { + providerRef: t, + messagesRef: e, + outputTypeRef: o, + outputSpeedRef: r, + historySizeRef: s, + initialMessageRef: n, + errorMessageRef: a, + onUserMessageRef: c, + onKeyDownRef: l + }, R = { + speakAudio: u, + injectMessage: g, + simulateStreamMessage: f, + streamMessage: b, + endStreamMessage: y, + toggleTextAreaDisabled: m, + toggleIsBotTyping: E, + focusTextArea: M, + goToPath: p, + getIsChatBotVisible: x + }; + V(v, R), oe(v, R); + const A = { name: "@rcb-plugins/llm-connector" }; + return P != null && P.autoConfig && (A.settings = { + event: { + rcbChangePath: !0, + rcbPostInjectMessage: !0, + rcbStopSimulateStreamMessage: !0, + rcbStopStreamMessage: !0, + rcbPreProcessBlock: !0, + rcbPostProcessBlock: !0 + } + }), A; +}, ie = (i) => () => re(i); +class ce { + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/Gemini.md + * + * @param config configuration for setup + */ + constructor(e) { + this.debug = !1, this.roleMap = (o) => { + switch (o) { + case "USER": + return "user"; + default: + return "model"; + } + }, this.constructBodyWithMessages = (o) => { + let r; + return this.messageParser ? r = this.messageParser(o) : r = o.filter( + (n) => typeof n.content == "string" && n.sender.toUpperCase() !== "SYSTEM" + ).map((n) => { + const a = this.roleMap(n.sender.toUpperCase()), c = n.content; + return { + role: a, + parts: [{ text: c }] + }; + }), this.systemMessage && (r = [{ role: "user", parts: [{ text: this.systemMessage }] }, ...r]), { + contents: r, + ...this.body + }; + }, this.handleStreamResponse = async function* (o) { + var n, a, c, l, d; + const r = new TextDecoder("utf-8"); + let s = ""; + for (; ; ) { + const { value: u, done: h } = await o.read(); + if (h) break; + s += r.decode(u, { stream: !0 }); + const g = s.split(` +`); + s = g.pop(); + for (const f of g) { + const b = f.trim(); + if (!b.startsWith("data: ")) continue; + const y = b.slice(6); + try { + const m = (d = (l = (c = (a = (n = JSON.parse(y).candidates) == null ? void 0 : n[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : l[0]) == null ? void 0 : d.text; + m && (yield m); + } catch (p) { + console.error("SSE JSON parse error:", y, p); + } + } + } + }, this.method = e.method ?? "POST", this.body = e.body ?? {}, this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.debug = e.debug ?? !1, this.headers = { + "Content-Type": "application/json", + Accept: this.responseFormat === "stream" ? "text/event-stream" : "application/json", + ...e.headers + }; + const t = e.baseUrl ?? "https://generativelanguage.googleapis.com/v1beta"; + if (e.mode === "direct") + this.endpoint = this.responseFormat === "stream" ? `${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey || ""}` : `${t}/models/${e.model}:generateContent?key=${e.apiKey || ""}`; + else if (e.mode === "proxy") + this.endpoint = `${t}/${e.model}`; + else + throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy')."); + } + /** + * Calls Gemini and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + async *sendMessages(e) { + var o, r, s, n, a; + if (this.debug) { + const c = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), l = { ...this.headers }; + console.log("[GeminiProvider] Request:", { + method: this.method, + endpoint: c, + headers: l, + body: this.constructBodyWithMessages(e) + }); + } + const t = await fetch(this.endpoint, { + method: this.method, + headers: this.headers, + body: JSON.stringify(this.constructBodyWithMessages(e)) + }); + if (this.debug && console.log("[GeminiProvider] Response status:", t.status), !t.ok) + throw new Error(`Gemini API error ${t.status}: ${await t.text()}`); + if (this.responseFormat === "stream") { + if (!t.body) + throw new Error("Response body is empty – cannot stream"); + const c = t.body.getReader(); + for await (const l of this.handleStreamResponse(c)) + yield l; + } else { + const c = await t.json(); + this.debug && console.log("[GeminiProvider] Response body:", c); + const l = (a = (n = (s = (r = (o = c.candidates) == null ? void 0 : o[0]) == null ? void 0 : r.content) == null ? void 0 : s.parts) == null ? void 0 : n[0]) == null ? void 0 : a.text; + if (typeof l == "string") + yield l; + else + throw new Error("Unexpected response shape – no text candidate"); + } + } +} +class le { + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ + constructor(e) { + if (this.debug = !1, this.roleMap = (t) => { + switch (t) { + case "USER": + return "user"; + case "SYSTEM": + return "system"; + default: + return "assistant"; + } + }, this.constructBodyWithMessages = (t) => { + let o; + return this.messageParser ? o = this.messageParser(t) : o = t.filter( + (s) => typeof s.content == "string" && s.sender.toUpperCase() !== "SYSTEM" + ).map((s) => { + const n = this.roleMap(s.sender.toUpperCase()), a = s.content; + return { + role: n, + content: a + }; + }), this.systemMessage && (o = [{ role: "system", content: this.systemMessage }, ...o]), { + messages: o, + ...this.body + }; + }, this.handleStreamResponse = async function* (t) { + var s, n, a; + const o = new TextDecoder("utf-8"); + let r = ""; + for (; ; ) { + const { value: c, done: l } = await t.read(); + if (l) break; + r += o.decode(c, { stream: !0 }); + const d = r.split(/\r?\n/); + r = d.pop(); + for (const u of d) { + if (!u.startsWith("data: ")) continue; + const h = u.slice(6).trim(); + if (h === "[DONE]") return; + try { + const f = (a = (n = (s = JSON.parse(h).choices) == null ? void 0 : s[0]) == null ? void 0 : n.delta) == null ? void 0 : a.content; + f && (yield f); + } catch (g) { + console.error("Stream parse error", g); + } + } + } + }, this.method = e.method ?? "POST", this.endpoint = e.baseUrl ?? "https://api.openai.com/v1/chat/completions", this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.debug = e.debug ?? !1, this.headers = { + "Content-Type": "application/json", + Accept: this.responseFormat === "stream" ? "text/event-stream" : "application/json", + ...e.headers + }, this.body = { + model: e.model, + stream: this.responseFormat === "stream", + ...e.body + }, e.mode === "direct") { + this.headers = { ...this.headers, Authorization: `Bearer ${e.apiKey}` }; + return; + } + if (e.mode !== "proxy") + throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy')."); + } + /** + * Calls Openai and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + async *sendMessages(e) { + var o, r, s; + if (this.debug) { + const n = { ...this.headers }; + delete n.Authorization, console.log("[OpenaiProvider] Request:", { + method: this.method, + endpoint: this.endpoint, + headers: n, + body: this.constructBodyWithMessages(e) + }); + } + const t = await fetch(this.endpoint, { + method: this.method, + headers: this.headers, + body: JSON.stringify(this.constructBodyWithMessages(e)) + }); + if (this.debug && console.log("[OpenaiProvider] Response status:", t.status), !t.ok) + throw new Error(`Openai API error ${t.status}: ${await t.text()}`); + if (this.responseFormat === "stream") { + if (!t.body) + throw new Error("Response body is empty – cannot stream"); + const n = t.body.getReader(); + for await (const a of this.handleStreamResponse(n)) + yield a; + } else { + const n = await t.json(); + this.debug && console.log("[OpenaiProvider] Response body:", n); + const a = (s = (r = (o = n.choices) == null ? void 0 : o[0]) == null ? void 0 : r.message) == null ? void 0 : s.content; + if (typeof a == "string") + yield a; + else + throw new Error("Unexpected response shape – no text candidate"); + } + } +} +class de { + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/WebLlm.md + * + * @param config configuration for setup + */ + constructor(e) { + this.debug = !1, this.roleMap = (t) => { + switch (t) { + case "USER": + return "user"; + case "SYSTEM": + return "system"; + default: + return "assistant"; + } + }, this.constructBodyWithMessages = (t) => { + let o; + return this.messageParser ? o = this.messageParser(t) : o = t.filter( + (s) => typeof s.content == "string" && s.sender.toUpperCase() !== "SYSTEM" + ).map((s) => { + const n = this.roleMap(s.sender.toUpperCase()), a = s.content; + return { + role: n, + content: a + }; + }), this.systemMessage && (o = [ + { + role: "system", + content: this.systemMessage + }, + ...o + ]), { + messages: o, + stream: this.responseFormat === "stream", + ...this.chatCompletionOptions + }; + }, this.model = e.model, this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.engineConfig = e.engineConfig ?? {}, this.chatCompletionOptions = e.chatCompletionOptions ?? {}, this.debug = e.debug ?? !1, this.createEngine(); + } + /** + * Creates MLC Engine for inferencing. + */ + async createEngine() { + const { CreateMLCEngine: e } = await import("@mlc-ai/web-llm"); + this.engine = await e(this.model, { + ...this.engineConfig + }); + } + /** + * Calls WebLlm and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + async *sendMessages(e) { + var o, r, s, n, a, c; + this.engine || await this.createEngine(), this.debug && console.log("[WebLlmProvider] Request:", { + model: this.model, + systemMessage: this.systemMessage, + responseFormat: this.responseFormat, + engineConfig: this.engineConfig, + chatCompletionOptions: this.chatCompletionOptions, + messages: this.constructBodyWithMessages(e).messages + // Log messages being sent + }); + const t = await ((o = this.engine) == null ? void 0 : o.chat.completions.create(this.constructBodyWithMessages(e))); + if (this.debug && console.log("[WebLlmProvider] Response:", t), t && Symbol.asyncIterator in t) + for await (const l of t) { + const d = (s = (r = l.choices[0]) == null ? void 0 : r.delta) == null ? void 0 : s.content; + d && (yield d); + } + else (c = (a = (n = t == null ? void 0 : t.choices) == null ? void 0 : n[0]) == null ? void 0 : a.message) != null && c.content && (yield t.choices[0].message.content); + } +} +class he { + constructor(e) { + this.model = e.model, this.stream = e.stream ?? !0, this.debug = e.debug ?? !1, this.headers = { + "Content-Type": "application/json", + ...e.headers + }, this.endpoint = e.baseUrl ?? "http://localhost:11434/api/generate"; + } + async *sendMessages(e) { + const t = e.filter((s) => typeof s.content == "string").map((s) => s.content).join(` +`), o = { + model: this.model, + prompt: t, + stream: this.stream + }; + this.debug && console.log("[OllamaProvider] Request:", { + endpoint: this.endpoint, + headers: this.headers, + body: o + }); + const r = await fetch(this.endpoint, { + method: "POST", + headers: this.headers, + body: JSON.stringify(o) + }); + if (!r.ok) + throw new Error(`Ollama API error ${r.status}: ${await r.text()}`); + if (this.stream) { + if (!r.body) throw new Error("No response body for streaming"); + const s = r.body.getReader(), n = new TextDecoder(); + let a = ""; + for (; ; ) { + const { value: c, done: l } = await s.read(); + if (l) break; + a += n.decode(c, { stream: !0 }); + const d = a.split(` +`); + a = d.pop(); + for (const u of d) + if (u.trim()) + try { + const h = JSON.parse(u); + h.response && (yield h.response); + } catch (h) { + this.debug && console.error("Ollama stream parse error:", u, h); + } + } + } else { + const s = await r.json(); + s.response && (yield s.response); + } + } +} +export { + ce as GeminiProvider, + he as OllamaProvider, + le as OpenaiProvider, + de as WebLlmProvider, + ie as default +}; diff --git a/dist/providers/GeminiProvider.d.ts b/dist/providers/GeminiProvider.d.ts new file mode 100644 index 0000000..f48f102 --- /dev/null +++ b/dist/providers/GeminiProvider.d.ts @@ -0,0 +1,49 @@ +import { GeminiProviderConfig } from '../types/provider-config/GeminiProviderConfig'; +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; +/** + * Provider for Gemini’s API, supports both direct and proxy modes. + */ +declare class GeminiProvider implements Provider { + private method; + private endpoint; + private headers; + private body; + private systemMessage?; + private responseFormat; + private messageParser?; + private debug; + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/Gemini.md + * + * @param config configuration for setup + */ + constructor(config: GeminiProviderConfig); + /** + * Calls Gemini and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + sendMessages(messages: Message[]): AsyncGenerator; + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap; + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages; + /** + * Consumes an SSE/text stream Response and yield each text chunk. + * + * @reader request body reader + */ + private handleStreamResponse; +} +export default GeminiProvider; +//# sourceMappingURL=GeminiProvider.d.ts.map \ No newline at end of file diff --git a/dist/providers/GeminiProvider.d.ts.map b/dist/providers/GeminiProvider.d.ts.map new file mode 100644 index 0000000..ee32d6d --- /dev/null +++ b/dist/providers/GeminiProvider.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../src/providers/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AACrF,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAG3C;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA0B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IAmDvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CAOb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA6B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA4B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/dist/providers/OllamaProvider.d.ts b/dist/providers/OllamaProvider.d.ts new file mode 100644 index 0000000..8ff10b3 --- /dev/null +++ b/dist/providers/OllamaProvider.d.ts @@ -0,0 +1,20 @@ +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; +interface OllamaProviderConfig { + model: string; + baseUrl?: string; + stream?: boolean; + debug?: boolean; + headers?: Record; +} +declare class OllamaProvider implements Provider { + private endpoint; + private model; + private stream; + private debug; + private headers; + constructor(config: OllamaProviderConfig); + sendMessages(messages: Message[]): AsyncGenerator; +} +export default OllamaProvider; +//# sourceMappingURL=OllamaProvider.d.ts.map \ No newline at end of file diff --git a/dist/providers/OllamaProvider.d.ts.map b/dist/providers/OllamaProvider.d.ts.map new file mode 100644 index 0000000..1a53850 --- /dev/null +++ b/dist/providers/OllamaProvider.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,UAAU,oBAAoB;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACjC;AAED,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,KAAK,CAAU;IACvB,OAAO,CAAC,OAAO,CAAyB;gBAErB,MAAM,EAAE,oBAAoB;IAWjC,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;CAwDvE;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/dist/providers/OpenaiProvider.d.ts b/dist/providers/OpenaiProvider.d.ts new file mode 100644 index 0000000..039b1e7 --- /dev/null +++ b/dist/providers/OpenaiProvider.d.ts @@ -0,0 +1,49 @@ +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; +import { OpenaiProviderConfig } from '../types/provider-config/OpenaiProviderConfig'; +/** + * Provider for Openai’s API, supports both direct and proxy modes. + */ +declare class OpenaiProvider implements Provider { + private method; + private endpoint; + private headers; + private body; + private systemMessage?; + private responseFormat; + private messageParser?; + private debug; + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ + constructor(config: OpenaiProviderConfig); + /** + * Calls Openai and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + sendMessages(messages: Message[]): AsyncGenerator; + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap; + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages; + /** + * Consumes an SSE/text stream Response and yield each text chunk. + * + * @reader request body reader + */ + private handleStreamResponse; +} +export default OpenaiProvider; +//# sourceMappingURL=OpenaiProvider.d.ts.map \ No newline at end of file diff --git a/dist/providers/OpenaiProvider.d.ts.map b/dist/providers/OpenaiProvider.d.ts.map new file mode 100644 index 0000000..b527a9a --- /dev/null +++ b/dist/providers/OpenaiProvider.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"OpenaiProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OpenaiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA6B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA2B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/dist/providers/WebLlmProvider.d.ts b/dist/providers/WebLlmProvider.d.ts new file mode 100644 index 0000000..dd6642f --- /dev/null +++ b/dist/providers/WebLlmProvider.d.ts @@ -0,0 +1,47 @@ +import { WebLlmProviderConfig } from '../types/provider-config/WebLlmProviderConfig'; +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; +/** + * Provider for MLC’s WebLLM runtime, for running models in the browser. + */ +declare class WebLlmProvider implements Provider { + private model; + private systemMessage?; + private responseFormat; + private engineConfig; + private chatCompletionOptions; + private messageParser?; + private engine?; + private debug; + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/WebLlm.md + * + * @param config configuration for setup + */ + constructor(config: WebLlmProviderConfig); + /** + * Creates MLC Engine for inferencing. + */ + private createEngine; + /** + * Calls WebLlm and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + sendMessages(messages: Message[]): AsyncGenerator; + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap; + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages; +} +export default WebLlmProvider; +//# sourceMappingURL=WebLlmProvider.d.ts.map \ No newline at end of file diff --git a/dist/providers/WebLlmProvider.d.ts.map b/dist/providers/WebLlmProvider.d.ts.map new file mode 100644 index 0000000..bdaeef9 --- /dev/null +++ b/dist/providers/WebLlmProvider.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"WebLlmProvider.d.ts","sourceRoot":"","sources":["../../src/providers/WebLlmProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AACrF,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAI3C;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,KAAK,CAAU;IACvB,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,YAAY,CAAkB;IACtC,OAAO,CAAC,qBAAqB,CAA0B;IACvD,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,MAAM,CAAC,CAAY;IAC3B,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACS,MAAM,EAAE,oBAAoB;IAWxC;;OAEG;YACW,YAAY;IAO1B;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IAkCvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CAoC/B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/dist/tsconfig.tsbuildinfo b/dist/tsconfig.tsbuildinfo new file mode 100644 index 0000000..b7e519a --- /dev/null +++ b/dist/tsconfig.tsbuildinfo @@ -0,0 +1 @@ +{"root":["../src/app.tsx","../src/development.tsx","../src/index.tsx","../src/vite-env.d.ts","../src/constants/defaultpluginconfig.ts","../src/core/usercbplugin.tsx","../src/factory/rcbpluginfactory.ts","../src/hooks/usechangepath.ts","../src/hooks/usemessagehandler.ts","../src/hooks/useprocessblock.ts","../src/providers/geminiprovider.ts","../src/providers/ollamaprovider.ts","../src/providers/openaiprovider.ts","../src/providers/webllmprovider.ts","../src/types/llmconnectorblock.ts","../src/types/pluginconfig.ts","../src/types/provider.ts","../src/types/provider-config/geminiproviderconfig.ts","../src/types/provider-config/openaiproviderconfig.ts","../src/types/provider-config/webllmproviderconfig.ts","../src/types/provider-message/geminiprovidermessage.ts","../src/types/provider-message/openaiprovidermessage.ts","../src/types/provider-message/webllmprovidermessage.ts","../src/utils/prompthandler.tsx","../src/utils/streamcontroller.ts"],"version":"5.8.3"} \ No newline at end of file diff --git a/dist/types/LlmConnectorBlock.d.ts b/dist/types/LlmConnectorBlock.d.ts new file mode 100644 index 0000000..ee904b7 --- /dev/null +++ b/dist/types/LlmConnectorBlock.d.ts @@ -0,0 +1,20 @@ +import { Block, Message } from 'react-chatbotify'; +import { Provider } from './Provider'; +/** + * Extends the Block from React ChatBotify to support the llm connector attribute and its properties. + */ +export type LlmConnectorBlock = Block & { + llmConnector: { + provider: Provider; + outputType?: 'character' | 'chunk' | 'full'; + outputSpeed?: number; + historySize?: number; + initialMessage?: string; + errorMessage?: string; + stopConditions?: { + onUserMessage?: (message: Message) => Promise; + onKeyDown?: (event: KeyboardEvent) => Promise; + }; + }; +}; +//# sourceMappingURL=LlmConnectorBlock.d.ts.map \ No newline at end of file diff --git a/dist/types/LlmConnectorBlock.d.ts.map b/dist/types/LlmConnectorBlock.d.ts.map new file mode 100644 index 0000000..9c642d1 --- /dev/null +++ b/dist/types/LlmConnectorBlock.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"LlmConnectorBlock.d.ts","sourceRoot":"","sources":["../../src/types/LlmConnectorBlock.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAClD,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAEtC;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,KAAK,GAAG;IACvC,YAAY,EAAE;QACb,QAAQ,EAAE,QAAQ,CAAC;QACnB,UAAU,CAAC,EAAE,WAAW,GAAG,OAAO,GAAG,MAAM,CAAC;QAC5C,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,cAAc,CAAC,EAAE;YAChB,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;YAC7D,SAAS,CAAC,EAAE,CAAC,KAAK,EAAE,aAAa,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC;SAC7D,CAAC;KACF,CAAC;CACF,CAAC"} \ No newline at end of file diff --git a/dist/types/PluginConfig.d.ts b/dist/types/PluginConfig.d.ts new file mode 100644 index 0000000..99fc076 --- /dev/null +++ b/dist/types/PluginConfig.d.ts @@ -0,0 +1,8 @@ +/** + * Shared plugin-level settings. + */ +type PluginConfig = { + autoConfig?: boolean; +}; +export type { PluginConfig }; +//# sourceMappingURL=PluginConfig.d.ts.map \ No newline at end of file diff --git a/dist/types/PluginConfig.d.ts.map b/dist/types/PluginConfig.d.ts.map new file mode 100644 index 0000000..e75dd5d --- /dev/null +++ b/dist/types/PluginConfig.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"PluginConfig.d.ts","sourceRoot":"","sources":["../../src/types/PluginConfig.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,KAAK,YAAY,GAAG;IACnB,UAAU,CAAC,EAAE,OAAO,CAAC;CACrB,CAAC;AAEF,YAAY,EAAE,YAAY,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/Provider.d.ts b/dist/types/Provider.d.ts new file mode 100644 index 0000000..820fa26 --- /dev/null +++ b/dist/types/Provider.d.ts @@ -0,0 +1,13 @@ +import { Message } from 'react-chatbotify'; +/** + * Interface that all LLM providers must implement. + */ +export type Provider = { + /** + * Sends a series of messages to the LLM to get a reply. + * + * @param messages messages to send + */ + sendMessages(messages: Message[]): AsyncGenerator; +}; +//# sourceMappingURL=Provider.d.ts.map \ No newline at end of file diff --git a/dist/types/Provider.d.ts.map b/dist/types/Provider.d.ts.map new file mode 100644 index 0000000..3b8627a --- /dev/null +++ b/dist/types/Provider.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"Provider.d.ts","sourceRoot":"","sources":["../../src/types/Provider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;;;OAIG;IACH,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;CAC1D,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-config/GeminiProviderConfig.d.ts b/dist/types/provider-config/GeminiProviderConfig.d.ts new file mode 100644 index 0000000..82a5ed4 --- /dev/null +++ b/dist/types/provider-config/GeminiProviderConfig.d.ts @@ -0,0 +1,39 @@ +import { Message } from 'react-chatbotify'; +import { GeminiProviderMessage } from '../provider-message/GeminiProviderMessage'; +/** + * Configurations for GeminiProvider in direct mode. + */ +type DirectConfig = { + mode: 'direct'; + model: string; + apiKey: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + baseUrl?: string; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => GeminiProviderMessage[]; + debug?: boolean; +}; +/** + * Configurations for GeminiProvider in proxy mode. + */ +type ProxyConfig = { + mode: 'proxy'; + model: string; + baseUrl: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => GeminiProviderMessage[]; + debug?: boolean; +}; +/** + * Combined gemini provider configurations. + */ +type GeminiProviderConfig = DirectConfig | ProxyConfig; +export type { GeminiProviderConfig }; +//# sourceMappingURL=GeminiProviderConfig.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-config/GeminiProviderConfig.d.ts.map b/dist/types/provider-config/GeminiProviderConfig.d.ts.map new file mode 100644 index 0000000..7205577 --- /dev/null +++ b/dist/types/provider-config/GeminiProviderConfig.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"GeminiProviderConfig.d.ts","sourceRoot":"","sources":["../../../src/types/provider-config/GeminiProviderConfig.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,qBAAqB,EAAE,MAAM,2CAA2C,CAAC;AAElF;;GAEG;AACH,KAAK,YAAY,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF;;GAEG;AACH,KAAK,WAAW,GAAG;IAClB,IAAI,EAAE,OAAO,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF;;GAEG;AACH,KAAK,oBAAoB,GAAG,YAAY,GAAG,WAAW,CAAC;AAEvD,YAAY,EAAE,oBAAoB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-config/OpenaiProviderConfig.d.ts b/dist/types/provider-config/OpenaiProviderConfig.d.ts new file mode 100644 index 0000000..4cc734f --- /dev/null +++ b/dist/types/provider-config/OpenaiProviderConfig.d.ts @@ -0,0 +1,39 @@ +import { Message } from 'react-chatbotify'; +import { OpenaiProviderMessage } from '../provider-message/OpenaiProviderMessage'; +/** + * Configurations for OpenaiProvider in direct mode. + */ +type DirectConfig = { + mode: 'direct'; + model: string; + apiKey: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + baseUrl?: string; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => OpenaiProviderMessage[]; + debug?: boolean; +}; +/** + * Configurations for OpenaiProvider in proxy mode. + */ +type ProxyConfig = { + mode: 'proxy'; + model: string; + baseUrl: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => OpenaiProviderMessage[]; + debug?: boolean; +}; +/** + * Combined openai provider configurations. + */ +type OpenaiProviderConfig = DirectConfig | ProxyConfig; +export type { OpenaiProviderConfig }; +//# sourceMappingURL=OpenaiProviderConfig.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-config/OpenaiProviderConfig.d.ts.map b/dist/types/provider-config/OpenaiProviderConfig.d.ts.map new file mode 100644 index 0000000..ad31887 --- /dev/null +++ b/dist/types/provider-config/OpenaiProviderConfig.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"OpenaiProviderConfig.d.ts","sourceRoot":"","sources":["../../../src/types/provider-config/OpenaiProviderConfig.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,qBAAqB,EAAE,MAAM,2CAA2C,CAAC;AAElF;;GAEG;AACH,KAAK,YAAY,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF;;GAEG;AACH,KAAK,WAAW,GAAG;IAClB,IAAI,EAAE,OAAO,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF;;GAEG;AACH,KAAK,oBAAoB,GAAG,YAAY,GAAG,WAAW,CAAC;AAEvD,YAAY,EAAE,oBAAoB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-config/WebLlmProviderConfig.d.ts b/dist/types/provider-config/WebLlmProviderConfig.d.ts new file mode 100644 index 0000000..8331712 --- /dev/null +++ b/dist/types/provider-config/WebLlmProviderConfig.d.ts @@ -0,0 +1,17 @@ +import { Message } from 'react-chatbotify'; +import { WebLlmProviderMessage } from '../provider-message/WebLlmProviderMessage'; +import { MLCEngineConfig } from '@mlc-ai/web-llm'; +/** + * Configurations for WebLlmProvider. + */ +type WebLlmProviderConfig = { + model: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + engineConfig?: MLCEngineConfig; + chatCompletionOptions?: Record; + messageParser?: (messages: Message[]) => WebLlmProviderMessage[]; + debug?: boolean; +}; +export type { WebLlmProviderConfig }; +//# sourceMappingURL=WebLlmProviderConfig.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-config/WebLlmProviderConfig.d.ts.map b/dist/types/provider-config/WebLlmProviderConfig.d.ts.map new file mode 100644 index 0000000..8d6378a --- /dev/null +++ b/dist/types/provider-config/WebLlmProviderConfig.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"WebLlmProviderConfig.d.ts","sourceRoot":"","sources":["../../../src/types/provider-config/WebLlmProviderConfig.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,qBAAqB,EAAE,MAAM,2CAA2C,CAAC;AAClF,OAAO,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAC;AAElD;;GAEG;AACH,KAAK,oBAAoB,GAAG;IAC3B,KAAK,EAAE,MAAM,CAAC;IACd,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,YAAY,CAAC,EAAE,eAAe,CAAC;IAC/B,qBAAqB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAChD,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF,YAAY,EAAE,oBAAoB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-message/GeminiProviderMessage.d.ts b/dist/types/provider-message/GeminiProviderMessage.d.ts new file mode 100644 index 0000000..5d1deb5 --- /dev/null +++ b/dist/types/provider-message/GeminiProviderMessage.d.ts @@ -0,0 +1,9 @@ +/** + * Message format for Google Gemini. + */ +type GeminiProviderMessage = { + role: 'user' | 'model'; + content: string; +}; +export type { GeminiProviderMessage }; +//# sourceMappingURL=GeminiProviderMessage.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-message/GeminiProviderMessage.d.ts.map b/dist/types/provider-message/GeminiProviderMessage.d.ts.map new file mode 100644 index 0000000..1e92727 --- /dev/null +++ b/dist/types/provider-message/GeminiProviderMessage.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"GeminiProviderMessage.d.ts","sourceRoot":"","sources":["../../../src/types/provider-message/GeminiProviderMessage.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,KAAK,qBAAqB,GAAG;IAC5B,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC;IACvB,OAAO,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,YAAY,EAAE,qBAAqB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-message/OpenaiProviderMessage.d.ts b/dist/types/provider-message/OpenaiProviderMessage.d.ts new file mode 100644 index 0000000..00c9a95 --- /dev/null +++ b/dist/types/provider-message/OpenaiProviderMessage.d.ts @@ -0,0 +1,9 @@ +/** + * Message format for OpenAI. + */ +type OpenaiProviderMessage = { + role: 'user' | 'assistant' | 'system'; + content: string; +}; +export type { OpenaiProviderMessage }; +//# sourceMappingURL=OpenaiProviderMessage.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-message/OpenaiProviderMessage.d.ts.map b/dist/types/provider-message/OpenaiProviderMessage.d.ts.map new file mode 100644 index 0000000..76ba5df --- /dev/null +++ b/dist/types/provider-message/OpenaiProviderMessage.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"OpenaiProviderMessage.d.ts","sourceRoot":"","sources":["../../../src/types/provider-message/OpenaiProviderMessage.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,KAAK,qBAAqB,GAAG;IAC5B,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,YAAY,EAAE,qBAAqB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-message/WebLlmProviderMessage.d.ts b/dist/types/provider-message/WebLlmProviderMessage.d.ts new file mode 100644 index 0000000..4ab2d28 --- /dev/null +++ b/dist/types/provider-message/WebLlmProviderMessage.d.ts @@ -0,0 +1,9 @@ +/** + * Message format for web-llm. + */ +type WebLlmProviderMessage = { + role: 'user' | 'assistant' | 'system'; + content: string; +}; +export type { WebLlmProviderMessage }; +//# sourceMappingURL=WebLlmProviderMessage.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-message/WebLlmProviderMessage.d.ts.map b/dist/types/provider-message/WebLlmProviderMessage.d.ts.map new file mode 100644 index 0000000..1265039 --- /dev/null +++ b/dist/types/provider-message/WebLlmProviderMessage.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"WebLlmProviderMessage.d.ts","sourceRoot":"","sources":["../../../src/types/provider-message/WebLlmProviderMessage.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,KAAK,qBAAqB,GAAG;IAC5B,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,YAAY,EAAE,qBAAqB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/utils/promptHandler.d.ts b/dist/utils/promptHandler.d.ts new file mode 100644 index 0000000..01d00ba --- /dev/null +++ b/dist/utils/promptHandler.d.ts @@ -0,0 +1,36 @@ +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; +/** + * Processes the prompt using the provided model connector. + * + * @param messages messages to send to the LLM + * @param refs object containing relevant refs + * @param actions object containing relevant actions + * @param opts optional AbortSignal + */ +declare const handlePrompt: (messages: Message[], refs: { + providerRef: React.MutableRefObject; + messagesRef: React.MutableRefObject; + outputTypeRef: React.MutableRefObject<"character" | "chunk" | "full">; + outputSpeedRef: React.MutableRefObject; + historySizeRef: React.MutableRefObject; + initialMessageRef: React.MutableRefObject; + errorMessageRef: React.MutableRefObject; + onUserMessageRef: React.MutableRefObject<((msg: Message) => Promise) | null>; + onKeyDownRef: React.MutableRefObject<((e: KeyboardEvent) => Promise) | null>; +}, actions: { + speakAudio: (text: string) => void; + injectMessage: (content: string | JSX.Element, sender?: string) => Promise; + simulateStreamMessage: (content: string, sender?: string) => Promise; + streamMessage: (msg: string) => void; + endStreamMessage: () => void; + toggleTextAreaDisabled: (active?: boolean) => void; + toggleIsBotTyping: (active?: boolean) => void; + focusTextArea: () => void; + goToPath: (path: string) => void; + getIsChatBotVisible: () => boolean; +}, opts?: { + signal?: AbortSignal; +}) => Promise; +export { handlePrompt }; +//# sourceMappingURL=promptHandler.d.ts.map \ No newline at end of file diff --git a/dist/utils/promptHandler.d.ts.map b/dist/utils/promptHandler.d.ts.map new file mode 100644 index 0000000..ff6e848 --- /dev/null +++ b/dist/utils/promptHandler.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"promptHandler.d.ts","sourceRoot":"","sources":["../../src/utils/promptHandler.tsx"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAe3C;;;;;;;GAOG;AACH,QAAA,MAAM,YAAY,GACjB,UAAU,OAAO,EAAE,EACnB,MAAM;IACL,WAAW,EAAE,KAAK,CAAC,gBAAgB,CAAC,QAAQ,GAAG,IAAI,CAAC,CAAC;IACrD,WAAW,EAAE,KAAK,CAAC,gBAAgB,CAAC,OAAO,EAAE,CAAC,CAAC;IAC/C,aAAa,EAAE,KAAK,CAAC,gBAAgB,CAAC,WAAW,GAAG,OAAO,GAAG,MAAM,CAAC,CAAC;IACtE,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAC/C,cAAc,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAC/C,iBAAiB,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAClD,eAAe,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAChD,gBAAgB,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC5F,YAAY,EAAE,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,EAAE,aAAa,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;CAC5F,EACD,SAAS;IACR,UAAU,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACnC,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IAC3F,qBAAqB,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC;IACrF,aAAa,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,gBAAgB,EAAE,MAAM,IAAI,CAAC;IAC7B,sBAAsB,EAAE,CAAC,MAAM,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;IACnD,iBAAiB,EAAE,CAAC,MAAM,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;IAC9C,aAAa,EAAE,MAAM,IAAI,CAAC;IAC1B,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IACjC,mBAAmB,EAAE,MAAM,OAAO,CAAC;CACnC,EACD,OAAM;IAAE,MAAM,CAAC,EAAE,WAAW,CAAA;CAAO,KACjC,OAAO,CAAC,IAAI,CA+Dd,CAAC;AAEF,OAAO,EAAE,YAAY,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/utils/streamController.d.ts b/dist/utils/streamController.d.ts new file mode 100644 index 0000000..449f42e --- /dev/null +++ b/dist/utils/streamController.d.ts @@ -0,0 +1,10 @@ +/** + * Formats a raw stream according to the specified mode. + * + * @param stream raw async iterable stream of strings. + * @param outputType 'character' for per-character output, 'chunk' for as-is. + * @param outputSpeed speed in milliseconds to stream response + */ +declare const formatStream: (stream: AsyncGenerator, outputType: "chunk" | "character" | "full", outputSpeed: number) => AsyncGenerator; +export { formatStream }; +//# sourceMappingURL=streamController.d.ts.map \ No newline at end of file diff --git a/dist/utils/streamController.d.ts.map b/dist/utils/streamController.d.ts.map new file mode 100644 index 0000000..6463739 --- /dev/null +++ b/dist/utils/streamController.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"streamController.d.ts","sourceRoot":"","sources":["../../src/utils/streamController.ts"],"names":[],"mappings":"AAgCA;;;;;;GAMG;AACH,QAAA,MAAM,YAAY,GACjB,QAAQ,cAAc,CAAC,MAAM,CAAC,EAC9B,YAAY,OAAO,GAAG,WAAW,GAAG,MAAM,EAC1C,aAAa,MAAM,KACjB,cAAc,CAAC,MAAM,CAMvB,CAAC;AAEF,OAAO,EAAE,YAAY,EAAE,CAAC"} \ No newline at end of file From 070b3c4f2836f64cc2e269da75073109dabb9f32 Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 10:41:23 +0300 Subject: [PATCH 4/8] fixed ollama provider --- .gitignore | 1 - dist/App.d.ts.map | 2 +- dist/index.cjs | 6 +- dist/index.js | 473 ++++++++++-------- dist/providers/OllamaProvider.d.ts | 51 +- dist/providers/OllamaProvider.d.ts.map | 2 +- dist/tsconfig.tsbuildinfo | 2 +- .../OllamaProviderMessage.d.ts | 1 + .../OllamaProviderMessage.d.ts.map | 1 + src/App.tsx | 5 +- src/providers/OllamaProvider.ts | 220 +++++--- .../provider-message/OllamaProviderMessage.ts | 1 + 12 files changed, 479 insertions(+), 286 deletions(-) create mode 100644 dist/types/provider-message/OllamaProviderMessage.d.ts create mode 100644 dist/types/provider-message/OllamaProviderMessage.d.ts.map create mode 100644 src/types/provider-message/OllamaProviderMessage.ts diff --git a/.gitignore b/.gitignore index 3f29b4e..a67e677 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,6 @@ # production /build -/dist *.tgz # misc diff --git a/dist/App.d.ts.map b/dist/App.d.ts.map index 5357fef..5711b1e 100644 --- a/dist/App.d.ts.map +++ b/dist/App.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"App.d.ts","sourceRoot":"","sources":["../src/App.tsx"],"names":[],"mappings":"AAaA,QAAA,MAAM,GAAG,+CAmHR,CAAC;AAEF,eAAe,GAAG,CAAC"} \ No newline at end of file +{"version":3,"file":"App.d.ts","sourceRoot":"","sources":["../src/App.tsx"],"names":[],"mappings":"AAaA,QAAA,MAAM,GAAG,+CAsHR,CAAC;AAEF,eAAe,GAAG,CAAC"} \ No newline at end of file diff --git a/dist/index.cjs b/dist/index.cjs index 941caef..aeb45b8 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -1,4 +1,2 @@ -"use strict";var $=Object.create;var _=Object.defineProperty;var G=Object.getOwnPropertyDescriptor;var L=Object.getOwnPropertyNames;var N=Object.getPrototypeOf,z=Object.prototype.hasOwnProperty;var K=(n,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let o of L(e))!z.call(n,o)&&o!==t&&_(n,o,{get:()=>e[o],enumerable:!(s=G(e,o))||s.enumerable});return n};var J=(n,e,t)=>(t=n!=null?$(N(n)):{},K(e||!n||!n.__esModule?_(t,"default",{value:n,enumerable:!0}):t,n));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),m=require("react-chatbotify"),q={autoConfig:!0},H=(n,e)=>{const t=g.useCallback(s=>{const r=n()[s.data.nextPath];e(r)},[n,e]);m.useOnRcbEvent(m.RcbEvent.CHANGE_PATH,t)},Y=(n,e)=>{const{outputTypeRef:t}=n,{toggleTextAreaDisabled:s,toggleIsBotTyping:o,focusTextArea:r,injectMessage:a,simulateStreamMessage:i,getIsChatBotVisible:c}=e,l=g.useCallback(d=>{var u;const h=d.data.block;h.llmConnector&&(d.preventDefault(),d.type==="rcb-pre-process-block"&&((u=h.llmConnector)!=null&&u.initialMessage&&(t.current==="full"?a(n.initialMessageRef.current):i(n.initialMessageRef.current)),o(!1),s(!1),setTimeout(()=>{c()&&r()})))},[o,s,r,c]);m.useOnRcbEvent(m.RcbEvent.PRE_PROCESS_BLOCK,l),m.useOnRcbEvent(m.RcbEvent.POST_PROCESS_BLOCK,l)},V=async function*(n,e){for await(const t of n)for(const s of t)yield s,await new Promise(o=>setTimeout(o,e))},Q=async function*(n,e){for await(const t of n)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(n,e,t){e==="character"?yield*V(n,t):yield*Q(n,t)},Z=async function*(n,e){for await(const t of n)e(t),yield t},ee=async(n,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:o,toggleIsBotTyping:r,toggleTextAreaDisabled:a,focusTextArea:i,injectMessage:c,streamMessage:l,endStreamMessage:d,getIsChatBotVisible:h}=t,u=e.providerRef.current.sendMessages(n),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let p="";for await(const f of u){if((R=s.signal)!=null&&R.aborted)break;p+=f}r(!1),c(p),setTimeout(()=>{a(!1),h()&&i()})}else{const p=X(Z(u,o),b,y);let f="",S=!1;for await(const E of p){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=E,l(f)}d(),setTimeout(()=>{a(!1),h()&&i()})}},te=500,se=(n,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:o,onKeyDownRef:r,errorMessageRef:a}=n,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:l,toggleIsBotTyping:d,goToPath:h,focusTextArea:u,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!n.providerRef.current)return;const p=M.data.message,f=p.sender.toUpperCase();p.tags=p.tags??[],p.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(d(!0),l(!0),setTimeout(async()=>{var T;if(o.current){const P=await o.current(p);if(P)return(T=y.current)==null||T.abort(),y.current=null,h(P)}const S=n.historySizeRef.current,E=t.current,v=S?[...E.slice(-(S-1)),p]:[p],C=new AbortController;y.current=C,ee(v,n,e,{signal:C.signal}).catch(P=>{d(!1),l(!1),setTimeout(()=>{b()&&u()}),console.error("LLM prompt failed",P),s.current==="full"?i(a.current):c(a.current)})},te))},[n,e]);m.useOnRcbEvent(m.RcbEvent.POST_INJECT_MESSAGE,R),m.useOnRcbEvent(m.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),m.useOnRcbEvent(m.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async p=>{var f;if(r.current){const S=await r.current(p);S&&((f=y.current)==null||f.abort(),y.current=null,h(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=n=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),o=g.useRef(30),r=g.useRef(0),a=g.useRef(""),i=g.useRef("Unable to get response, please try again."),c=g.useRef(null),l=g.useRef(null),{getFlow:d}=m.useFlow(),{speakAudio:h}=m.useAudio(),{messages:u,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=m.useMessages(),{goToPath:p}=m.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=m.useTextArea(),{toggleIsBotTyping:E,getIsChatBotVisible:v}=m.useChatWindow(),C={...q,...n??{}};g.useEffect(()=>{e.current=u},[u]),H(d,w=>{var x,A,k,B,U,I,j,D,F,W;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",o.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,a.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((I=w.llmConnector)==null?void 0:I.errorMessage)??"Unable to get response, please try again.",c.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onUserMessage)??null,l.current=((W=(F=w.llmConnector)==null?void 0:F.stopConditions)==null?void 0:W.onKeyDown)??null});const T={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:o,historySizeRef:r,initialMessageRef:a,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:l},P={speakAudio:h,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:E,focusTextArea:S,goToPath:p,getIsChatBotVisible:v};Y(T,P),se(T,P);const O={name:"@rcb-plugins/llm-connector"};return C!=null&&C.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=n=>()=>re(n);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let o;return this.messageParser?o=this.messageParser(s):o=s.filter(a=>typeof a.content=="string"&&a.sender.toUpperCase()!=="SYSTEM").map(a=>{const i=this.roleMap(a.sender.toUpperCase()),c=a.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(o=[{role:"user",parts:[{text:this.systemMessage}]},...o]),{contents:o,...this.body}},this.handleStreamResponse=async function*(s){var a,i,c,l,d;const o=new TextDecoder("utf-8");let r="";for(;;){const{value:h,done:u}=await s.read();if(u)break;r+=o.decode(h,{stream:!0});const b=r.split(` -`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(d=(l=(c=(i=(a=JSON.parse(M).candidates)==null?void 0:a[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:l[0])==null?void 0:d.text;f&&(yield f)}catch(p){console.error("SSE JSON parse error:",M,p)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,o,r,a,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),l={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:l,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const l of this.handleStreamResponse(c))yield l}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const l=(i=(a=(r=(o=(s=c.candidates)==null?void 0:s[0])==null?void 0:o.content)==null?void 0:r.parts)==null?void 0:a[0])==null?void 0:i.text;if(typeof l=="string")yield l;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const a=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:a,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,a,i;const s=new TextDecoder("utf-8");let o="";for(;;){const{value:c,done:l}=await t.read();if(l)break;o+=s.decode(c,{stream:!0});const d=o.split(/\r?\n/);o=d.pop();for(const h of d){if(!h.startsWith("data: "))continue;const u=h.slice(6).trim();if(u==="[DONE]")return;try{const y=(i=(a=(r=JSON.parse(u).choices)==null?void 0:r[0])==null?void 0:a.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,o,r;if(this.debug){const a={...this.headers};delete a.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:a,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const a=t.body.getReader();for await(const i of this.handleStreamResponse(a))yield i}else{const a=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",a);const i=(r=(o=(s=a.choices)==null?void 0:s[0])==null?void 0:o.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const a=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:a,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,o,r,a,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const l of t){const d=(r=(o=l.choices[0])==null?void 0:o.delta)==null?void 0:r.content;d&&(yield d)}else(c=(i=(a=t==null?void 0:t.choices)==null?void 0:a[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){this.model=e.model,this.stream=e.stream??!0,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",...e.headers},this.endpoint=e.baseUrl??"http://localhost:11434/api/generate"}async*sendMessages(e){const t=e.filter(r=>typeof r.content=="string").map(r=>r.content).join(` -`),s={model:this.model,prompt:t,stream:this.stream};this.debug&&console.log("[OllamaProvider] Request:",{endpoint:this.endpoint,headers:this.headers,body:s});const o=await fetch(this.endpoint,{method:"POST",headers:this.headers,body:JSON.stringify(s)});if(!o.ok)throw new Error(`Ollama API error ${o.status}: ${await o.text()}`);if(this.stream){if(!o.body)throw new Error("No response body for streaming");const r=o.body.getReader(),a=new TextDecoder;let i="";for(;;){const{value:c,done:l}=await r.read();if(l)break;i+=a.decode(c,{stream:!0});const d=i.split(` -`);i=d.pop();for(const h of d)if(h.trim())try{const u=JSON.parse(h);u.response&&(yield u.response)}catch(u){this.debug&&console.error("Ollama stream parse error:",h,u)}}}else{const r=await o.json();r.response&&(yield r.response)}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; +"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const b=require("react"),p=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=b.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);p.useOnRcbEvent(p.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:c}=e,d=b.useCallback(l=>{var m;const u=l.data.block;u.llmConnector&&(l.preventDefault(),l.type==="rcb-pre-process-block"&&((m=u.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{c()&&r()})))},[n,s,r,c]);p.useOnRcbEvent(p.RcbEvent.PRE_PROCESS_BLOCK,d),p.useOnRcbEvent(p.RcbEvent.POST_PROCESS_BLOCK,d)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:c,streamMessage:d,endStreamMessage:l,getIsChatBotVisible:u}=t,m=e.providerRef.current.sendMessages(a),f=e.outputTypeRef.current,g=e.outputSpeedRef.current;if(f==="full"){let h="";for await(const y of m){if((R=s.signal)!=null&&R.aborted)break;h+=y}r(!1),c(h),setTimeout(()=>{o(!1),u()&&i()})}else{const h=X(Z(m,n),f,g);let y="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),y+=C,d(y)}l(),setTimeout(()=>{o(!1),u()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:d,toggleIsBotTyping:l,goToPath:u,focusTextArea:m,getIsChatBotVisible:f}=e,g=b.useRef(null),R=b.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,y=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${y}`),y==="USER"&&(l(!0),d(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=g.current)==null||v.abort(),g.current=null,u(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;g.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{l(!1),d(!1),setTimeout(()=>{f()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):c(o.current)})},te))},[a,e]);p.useOnRcbEvent(p.RcbEvent.POST_INJECT_MESSAGE,R),p.useOnRcbEvent(p.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),p.useOnRcbEvent(p.RcbEvent.STOP_STREAM_MESSAGE,R),b.useEffect(()=>{const M=async h=>{var y;if(r.current){const S=await r.current(h);S&&((y=g.current)==null||y.abort(),g.current=null,u(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=b.useRef([]),t=b.useRef(null),s=b.useRef("chunk"),n=b.useRef(30),r=b.useRef(0),o=b.useRef(""),i=b.useRef("Unable to get response, please try again."),c=b.useRef(null),d=b.useRef(null),{getFlow:l}=p.useFlow(),{speakAudio:u}=p.useAudio(),{messages:m,injectMessage:f,simulateStreamMessage:g,streamMessage:R,endStreamMessage:M}=p.useMessages(),{goToPath:h}=p.usePaths(),{toggleTextAreaDisabled:y,focusTextArea:S}=p.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=p.useChatWindow(),E={...Y,...a??{}};b.useEffect(()=>{e.current=m},[m]),q(l,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",c.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,d.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:d},P={speakAudio:u,injectMessage:f,simulateStreamMessage:g,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:y,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),c=o.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,c,d,l;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:u,done:m}=await s.read();if(m)break;r+=n.decode(u,{stream:!0});const f=r.split(` +`);r=f.pop();for(const g of f){const R=g.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const y=(l=(d=(c=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:d[0])==null?void 0:l.text;y&&(yield y)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),d={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:d,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const d of this.handleStreamResponse(c))yield d}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const d=(i=(o=(r=(n=(s=c.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof d=="string")yield d;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:d}=await t.read();if(d)break;n+=s.decode(c,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const u of l){if(!u.startsWith("data: "))continue;const m=u.slice(6).trim();if(m==="[DONE]")return;try{const g=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;g&&(yield g)}catch(f){console.error("Stream parse error",f)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const d of t){const l=(r=(n=d.choices[0])==null?void 0:n.delta)==null?void 0:r.content;l&&(yield l)}else(c=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:d}=await t.read();if(d)break;n+=s.decode(c,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const u of l){if(!u.startsWith("data: "))continue;const m=u.slice(6).trim();try{const f=JSON.parse(m);if(f.done===!0)return;const g=(i=(o=(r=f.choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;g&&(yield g)}catch(f){console.error("Stream parse error",f)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; diff --git a/dist/index.js b/dist/index.js index f4b75c1..36730a5 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,154 +1,154 @@ -import { useCallback as O, useRef as w, useEffect as N } from "react"; -import { useOnRcbEvent as C, RcbEvent as T, useFlow as G, useAudio as L, useMessages as z, usePaths as K, useTextArea as J, useChatWindow as H } from "react-chatbotify"; -const Y = { +import { useCallback as O, useRef as S, useEffect as z } from "react"; +import { useOnRcbEvent as C, RcbEvent as T, useFlow as G, useAudio as L, useMessages as N, usePaths as K, useTextArea as J, useChatWindow as Y } from "react-chatbotify"; +const H = { autoConfig: !0 }, q = (i, e) => { const t = O( - (o) => { - const s = i()[o.data.nextPath]; - e(s); + (s) => { + const r = i()[s.data.nextPath]; + e(r); }, [i, e] ); C(T.CHANGE_PATH, t); }, V = (i, e) => { const { outputTypeRef: t } = i, { - toggleTextAreaDisabled: o, - toggleIsBotTyping: r, - focusTextArea: s, - injectMessage: n, + toggleTextAreaDisabled: s, + toggleIsBotTyping: n, + focusTextArea: r, + injectMessage: o, simulateStreamMessage: a, getIsChatBotVisible: c - } = e, l = O( - (d) => { - var h; - const u = d.data.block; - u.llmConnector && (d.preventDefault(), d.type === "rcb-pre-process-block" && ((h = u.llmConnector) != null && h.initialMessage && (t.current === "full" ? n(i.initialMessageRef.current) : a(i.initialMessageRef.current)), r(!1), o(!1), setTimeout(() => { - c() && s(); + } = e, d = O( + (l) => { + var p; + const u = l.data.block; + u.llmConnector && (l.preventDefault(), l.type === "rcb-pre-process-block" && ((p = u.llmConnector) != null && p.initialMessage && (t.current === "full" ? o(i.initialMessageRef.current) : a(i.initialMessageRef.current)), n(!1), s(!1), setTimeout(() => { + c() && r(); }))); }, - [r, o, s, c] + [n, s, r, c] ); - C(T.PRE_PROCESS_BLOCK, l), C(T.POST_PROCESS_BLOCK, l); + C(T.PRE_PROCESS_BLOCK, d), C(T.POST_PROCESS_BLOCK, d); }, Q = async function* (i, e) { for await (const t of i) - for (const o of t) - yield o, await new Promise((r) => setTimeout(r, e)); + for (const s of t) + yield s, await new Promise((n) => setTimeout(n, e)); }, X = async function* (i, e) { for await (const t of i) - yield t, await new Promise((o) => setTimeout(o, e)); + yield t, await new Promise((s) => setTimeout(s, e)); }, Z = async function* (i, e, t) { e === "character" ? yield* Q(i, t) : yield* X(i, t); }, ee = async function* (i, e) { for await (const t of i) e(t), yield t; -}, te = async (i, e, t, o = {}) => { - var b, y; +}, te = async (i, e, t, s = {}) => { + var M, y; if (!e.providerRef.current) return; const { - speakAudio: r, - toggleIsBotTyping: s, - toggleTextAreaDisabled: n, + speakAudio: n, + toggleIsBotTyping: r, + toggleTextAreaDisabled: o, focusTextArea: a, injectMessage: c, - streamMessage: l, - endStreamMessage: d, + streamMessage: d, + endStreamMessage: l, getIsChatBotVisible: u - } = t, h = e.providerRef.current.sendMessages(i), g = e.outputTypeRef.current, f = e.outputSpeedRef.current; - if (g === "full") { - let p = ""; - for await (const m of h) { - if ((b = o.signal) != null && b.aborted) break; - p += m; + } = t, p = e.providerRef.current.sendMessages(i), m = e.outputTypeRef.current, f = e.outputSpeedRef.current; + if (m === "full") { + let h = ""; + for await (const g of p) { + if ((M = s.signal) != null && M.aborted) break; + h += g; } - s(!1), c(p), setTimeout(() => { - n(!1), u() && a(); + r(!1), c(h), setTimeout(() => { + o(!1), u() && a(); }); } else { - const p = Z(ee(h, r), g, f); - let m = "", M = !1; - for await (const E of p) { - if ((y = o.signal) != null && y.aborted) + const h = Z(ee(p, n), m, f); + let g = "", b = !1; + for await (const E of h) { + if ((y = s.signal) != null && y.aborted) break; - M || (s(!1), M = !0), m += E, l(m); + b || (r(!1), b = !0), g += E, d(g); } - d(), setTimeout(() => { - n(!1), u() && a(); + l(), setTimeout(() => { + o(!1), u() && a(); }); } -}, se = 500, oe = (i, e) => { - const { messagesRef: t, outputTypeRef: o, onUserMessageRef: r, onKeyDownRef: s, errorMessageRef: n } = i, { +}, se = 500, re = (i, e) => { + const { messagesRef: t, outputTypeRef: s, onUserMessageRef: n, onKeyDownRef: r, errorMessageRef: o } = i, { injectMessage: a, simulateStreamMessage: c, - toggleTextAreaDisabled: l, - toggleIsBotTyping: d, + toggleTextAreaDisabled: d, + toggleIsBotTyping: l, goToPath: u, - focusTextArea: h, - getIsChatBotVisible: g - } = e, f = w(null), b = O( + focusTextArea: p, + getIsChatBotVisible: m + } = e, f = S(null), M = O( (y) => { if (!i.providerRef.current) return; - const p = y.data.message, m = p.sender.toUpperCase(); - p.tags = p.tags ?? [], p.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (d(!0), l(!0), setTimeout(async () => { + const h = y.data.message, g = h.sender.toUpperCase(); + h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${g}`), g === "USER" && (l(!0), d(!0), setTimeout(async () => { var v; - if (r.current) { - const R = await r.current(p); + if (n.current) { + const R = await n.current(h); if (R) return (v = f.current) == null || v.abort(), f.current = null, u(R); } - const M = i.historySizeRef.current, E = t.current, x = M ? [...E.slice(-(M - 1)), p] : [p], P = new AbortController(); + const b = i.historySizeRef.current, E = t.current, x = b ? [...E.slice(-(b - 1)), h] : [h], P = new AbortController(); f.current = P, te(x, i, e, { signal: P.signal }).catch((R) => { - d(!1), l(!1), setTimeout(() => { - g() && h(); - }), console.error("LLM prompt failed", R), o.current === "full" ? a(n.current) : c(n.current); + l(!1), d(!1), setTimeout(() => { + m() && p(); + }), console.error("LLM prompt failed", R), s.current === "full" ? a(o.current) : c(o.current); }); }, se)); }, [i, e] ); - C(T.POST_INJECT_MESSAGE, b), C(T.STOP_SIMULATE_STREAM_MESSAGE, b), C(T.STOP_STREAM_MESSAGE, b), N(() => { - const y = async (p) => { - var m; - if (s.current) { - const M = await s.current(p); - M && ((m = f.current) == null || m.abort(), f.current = null, u(M)); + C(T.POST_INJECT_MESSAGE, M), C(T.STOP_SIMULATE_STREAM_MESSAGE, M), C(T.STOP_STREAM_MESSAGE, M), z(() => { + const y = async (h) => { + var g; + if (r.current) { + const b = await r.current(h); + b && ((g = f.current) == null || g.abort(), f.current = null, u(b)); } }; return window.addEventListener("keydown", y), () => window.removeEventListener("keydown", y); }, []); -}, re = (i) => { - const e = w([]), t = w(null), o = w("chunk"), r = w(30), s = w(0), n = w(""), a = w("Unable to get response, please try again."), c = w(null), l = w(null), { getFlow: d } = G(), { speakAudio: u } = L(), { messages: h, injectMessage: g, simulateStreamMessage: f, streamMessage: b, endStreamMessage: y } = z(), { goToPath: p } = K(), { toggleTextAreaDisabled: m, focusTextArea: M } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = H(), P = { ...Y, ...i ?? {} }; - N(() => { - e.current = h; - }, [h]), q(d, (S) => { - var k, B, U, I, D, F, j, $, W, _; - t.current = ((k = S.llmConnector) == null ? void 0 : k.provider) ?? null, o.current = ((B = S.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", r.current = ((U = S.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, s.current = ((I = S.llmConnector) == null ? void 0 : I.historySize) ?? 0, n.current = ((D = S.llmConnector) == null ? void 0 : D.initialMessage) ?? "", a.current = ((F = S.llmConnector) == null ? void 0 : F.errorMessage) ?? "Unable to get response, please try again.", c.current = (($ = (j = S.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : $.onUserMessage) ?? null, l.current = ((_ = (W = S.llmConnector) == null ? void 0 : W.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; +}, oe = (i) => { + const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), c = S(null), d = S(null), { getFlow: l } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: m, simulateStreamMessage: f, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: g, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; + z(() => { + e.current = p; + }, [p]), q(l, (w) => { + var k, B, U, F, I, W, j, D, $, _; + t.current = ((k = w.llmConnector) == null ? void 0 : k.provider) ?? null, s.current = ((B = w.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", n.current = ((U = w.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, r.current = ((F = w.llmConnector) == null ? void 0 : F.historySize) ?? 0, o.current = ((I = w.llmConnector) == null ? void 0 : I.initialMessage) ?? "", a.current = ((W = w.llmConnector) == null ? void 0 : W.errorMessage) ?? "Unable to get response, please try again.", c.current = ((D = (j = w.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : D.onUserMessage) ?? null, d.current = ((_ = ($ = w.llmConnector) == null ? void 0 : $.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; }); const v = { providerRef: t, messagesRef: e, - outputTypeRef: o, - outputSpeedRef: r, - historySizeRef: s, - initialMessageRef: n, + outputTypeRef: s, + outputSpeedRef: n, + historySizeRef: r, + initialMessageRef: o, errorMessageRef: a, onUserMessageRef: c, - onKeyDownRef: l + onKeyDownRef: d }, R = { speakAudio: u, - injectMessage: g, + injectMessage: m, simulateStreamMessage: f, - streamMessage: b, + streamMessage: M, endStreamMessage: y, - toggleTextAreaDisabled: m, + toggleTextAreaDisabled: g, toggleIsBotTyping: E, - focusTextArea: M, - goToPath: p, + focusTextArea: b, + goToPath: h, getIsChatBotVisible: x }; - V(v, R), oe(v, R); + V(v, R), re(v, R); const A = { name: "@rcb-plugins/llm-connector" }; return P != null && P.autoConfig && (A.settings = { event: { @@ -160,7 +160,7 @@ const Y = { rcbPostProcessBlock: !0 } }), A; -}, ie = (i) => () => re(i); +}, ie = (i) => () => oe(i); class ce { /** * Sets default values for the provider based on given configuration. Configuration guide here: @@ -169,47 +169,47 @@ class ce { * @param config configuration for setup */ constructor(e) { - this.debug = !1, this.roleMap = (o) => { - switch (o) { + this.debug = !1, this.roleMap = (s) => { + switch (s) { case "USER": return "user"; default: return "model"; } - }, this.constructBodyWithMessages = (o) => { - let r; - return this.messageParser ? r = this.messageParser(o) : r = o.filter( - (n) => typeof n.content == "string" && n.sender.toUpperCase() !== "SYSTEM" - ).map((n) => { - const a = this.roleMap(n.sender.toUpperCase()), c = n.content; + }, this.constructBodyWithMessages = (s) => { + let n; + return this.messageParser ? n = this.messageParser(s) : n = s.filter( + (o) => typeof o.content == "string" && o.sender.toUpperCase() !== "SYSTEM" + ).map((o) => { + const a = this.roleMap(o.sender.toUpperCase()), c = o.content; return { role: a, parts: [{ text: c }] }; - }), this.systemMessage && (r = [{ role: "user", parts: [{ text: this.systemMessage }] }, ...r]), { - contents: r, + }), this.systemMessage && (n = [{ role: "user", parts: [{ text: this.systemMessage }] }, ...n]), { + contents: n, ...this.body }; - }, this.handleStreamResponse = async function* (o) { - var n, a, c, l, d; - const r = new TextDecoder("utf-8"); - let s = ""; + }, this.handleStreamResponse = async function* (s) { + var o, a, c, d, l; + const n = new TextDecoder("utf-8"); + let r = ""; for (; ; ) { - const { value: u, done: h } = await o.read(); - if (h) break; - s += r.decode(u, { stream: !0 }); - const g = s.split(` + const { value: u, done: p } = await s.read(); + if (p) break; + r += n.decode(u, { stream: !0 }); + const m = r.split(` `); - s = g.pop(); - for (const f of g) { - const b = f.trim(); - if (!b.startsWith("data: ")) continue; - const y = b.slice(6); + r = m.pop(); + for (const f of m) { + const M = f.trim(); + if (!M.startsWith("data: ")) continue; + const y = M.slice(6); try { - const m = (d = (l = (c = (a = (n = JSON.parse(y).candidates) == null ? void 0 : n[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : l[0]) == null ? void 0 : d.text; - m && (yield m); - } catch (p) { - console.error("SSE JSON parse error:", y, p); + const g = (l = (d = (c = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : d[0]) == null ? void 0 : l.text; + g && (yield g); + } catch (h) { + console.error("SSE JSON parse error:", y, h); } } } @@ -232,13 +232,13 @@ class ce { * @param messages messages to include in the request */ async *sendMessages(e) { - var o, r, s, n, a; + var s, n, r, o, a; if (this.debug) { - const c = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), l = { ...this.headers }; + const c = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), d = { ...this.headers }; console.log("[GeminiProvider] Request:", { method: this.method, endpoint: c, - headers: l, + headers: d, body: this.constructBodyWithMessages(e) }); } @@ -253,20 +253,20 @@ class ce { if (!t.body) throw new Error("Response body is empty – cannot stream"); const c = t.body.getReader(); - for await (const l of this.handleStreamResponse(c)) - yield l; + for await (const d of this.handleStreamResponse(c)) + yield d; } else { const c = await t.json(); this.debug && console.log("[GeminiProvider] Response body:", c); - const l = (a = (n = (s = (r = (o = c.candidates) == null ? void 0 : o[0]) == null ? void 0 : r.content) == null ? void 0 : s.parts) == null ? void 0 : n[0]) == null ? void 0 : a.text; - if (typeof l == "string") - yield l; + const d = (a = (o = (r = (n = (s = c.candidates) == null ? void 0 : s[0]) == null ? void 0 : n.content) == null ? void 0 : r.parts) == null ? void 0 : o[0]) == null ? void 0 : a.text; + if (typeof d == "string") + yield d; else throw new Error("Unexpected response shape – no text candidate"); } } } -class le { +class de { /** * Sets default values for the provider based on given configuration. Configuration guide here: * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md @@ -284,38 +284,38 @@ class le { return "assistant"; } }, this.constructBodyWithMessages = (t) => { - let o; - return this.messageParser ? o = this.messageParser(t) : o = t.filter( - (s) => typeof s.content == "string" && s.sender.toUpperCase() !== "SYSTEM" - ).map((s) => { - const n = this.roleMap(s.sender.toUpperCase()), a = s.content; + let s; + return this.messageParser ? s = this.messageParser(t) : s = t.filter( + (r) => typeof r.content == "string" && r.sender.toUpperCase() !== "SYSTEM" + ).map((r) => { + const o = this.roleMap(r.sender.toUpperCase()), a = r.content; return { - role: n, + role: o, content: a }; - }), this.systemMessage && (o = [{ role: "system", content: this.systemMessage }, ...o]), { - messages: o, + }), this.systemMessage && (s = [{ role: "system", content: this.systemMessage }, ...s]), { + messages: s, ...this.body }; }, this.handleStreamResponse = async function* (t) { - var s, n, a; - const o = new TextDecoder("utf-8"); - let r = ""; + var r, o, a; + const s = new TextDecoder("utf-8"); + let n = ""; for (; ; ) { - const { value: c, done: l } = await t.read(); - if (l) break; - r += o.decode(c, { stream: !0 }); - const d = r.split(/\r?\n/); - r = d.pop(); - for (const u of d) { + const { value: c, done: d } = await t.read(); + if (d) break; + n += s.decode(c, { stream: !0 }); + const l = n.split(/\r?\n/); + n = l.pop(); + for (const u of l) { if (!u.startsWith("data: ")) continue; - const h = u.slice(6).trim(); - if (h === "[DONE]") return; + const p = u.slice(6).trim(); + if (p === "[DONE]") return; try { - const f = (a = (n = (s = JSON.parse(h).choices) == null ? void 0 : s[0]) == null ? void 0 : n.delta) == null ? void 0 : a.content; + const f = (a = (o = (r = JSON.parse(p).choices) == null ? void 0 : r[0]) == null ? void 0 : o.delta) == null ? void 0 : a.content; f && (yield f); - } catch (g) { - console.error("Stream parse error", g); + } catch (m) { + console.error("Stream parse error", m); } } } @@ -340,13 +340,13 @@ class le { * @param messages messages to include in the request */ async *sendMessages(e) { - var o, r, s; + var s, n, r; if (this.debug) { - const n = { ...this.headers }; - delete n.Authorization, console.log("[OpenaiProvider] Request:", { + const o = { ...this.headers }; + delete o.Authorization, console.log("[OpenaiProvider] Request:", { method: this.method, endpoint: this.endpoint, - headers: n, + headers: o, body: this.constructBodyWithMessages(e) }); } @@ -360,13 +360,13 @@ class le { if (this.responseFormat === "stream") { if (!t.body) throw new Error("Response body is empty – cannot stream"); - const n = t.body.getReader(); - for await (const a of this.handleStreamResponse(n)) + const o = t.body.getReader(); + for await (const a of this.handleStreamResponse(o)) yield a; } else { - const n = await t.json(); - this.debug && console.log("[OpenaiProvider] Response body:", n); - const a = (s = (r = (o = n.choices) == null ? void 0 : o[0]) == null ? void 0 : r.message) == null ? void 0 : s.content; + const o = await t.json(); + this.debug && console.log("[OpenaiProvider] Response body:", o); + const a = (r = (n = (s = o.choices) == null ? void 0 : s[0]) == null ? void 0 : n.message) == null ? void 0 : r.content; if (typeof a == "string") yield a; else @@ -374,7 +374,7 @@ class le { } } } -class de { +class le { /** * Sets default values for the provider based on given configuration. Configuration guide here: * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/WebLlm.md @@ -392,23 +392,23 @@ class de { return "assistant"; } }, this.constructBodyWithMessages = (t) => { - let o; - return this.messageParser ? o = this.messageParser(t) : o = t.filter( - (s) => typeof s.content == "string" && s.sender.toUpperCase() !== "SYSTEM" - ).map((s) => { - const n = this.roleMap(s.sender.toUpperCase()), a = s.content; + let s; + return this.messageParser ? s = this.messageParser(t) : s = t.filter( + (r) => typeof r.content == "string" && r.sender.toUpperCase() !== "SYSTEM" + ).map((r) => { + const o = this.roleMap(r.sender.toUpperCase()), a = r.content; return { - role: n, + role: o, content: a }; - }), this.systemMessage && (o = [ + }), this.systemMessage && (s = [ { role: "system", content: this.systemMessage }, - ...o + ...s ]), { - messages: o, + messages: s, stream: this.responseFormat === "stream", ...this.chatCompletionOptions }; @@ -429,7 +429,7 @@ class de { * @param messages messages to include in the request */ async *sendMessages(e) { - var o, r, s, n, a, c; + var s, n, r, o, a, c; this.engine || await this.createEngine(), this.debug && console.log("[WebLlmProvider] Request:", { model: this.model, systemMessage: this.systemMessage, @@ -439,71 +439,128 @@ class de { messages: this.constructBodyWithMessages(e).messages // Log messages being sent }); - const t = await ((o = this.engine) == null ? void 0 : o.chat.completions.create(this.constructBodyWithMessages(e))); + const t = await ((s = this.engine) == null ? void 0 : s.chat.completions.create(this.constructBodyWithMessages(e))); if (this.debug && console.log("[WebLlmProvider] Response:", t), t && Symbol.asyncIterator in t) - for await (const l of t) { - const d = (s = (r = l.choices[0]) == null ? void 0 : r.delta) == null ? void 0 : s.content; - d && (yield d); + for await (const d of t) { + const l = (r = (n = d.choices[0]) == null ? void 0 : n.delta) == null ? void 0 : r.content; + l && (yield l); } - else (c = (a = (n = t == null ? void 0 : t.choices) == null ? void 0 : n[0]) == null ? void 0 : a.message) != null && c.content && (yield t.choices[0].message.content); + else (c = (a = (o = t == null ? void 0 : t.choices) == null ? void 0 : o[0]) == null ? void 0 : a.message) != null && c.content && (yield t.choices[0].message.content); } } class he { + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ constructor(e) { - this.model = e.model, this.stream = e.stream ?? !0, this.debug = e.debug ?? !1, this.headers = { + if (this.debug = !1, this.roleMap = (t) => { + switch (t) { + case "USER": + return "user"; + case "SYSTEM": + return "system"; + default: + return "assistant"; + } + }, this.constructBodyWithMessages = (t) => { + let s; + return this.messageParser ? s = this.messageParser(t) : s = t.filter( + (r) => typeof r.content == "string" && r.sender.toUpperCase() !== "SYSTEM" + ).map((r) => { + const o = this.roleMap(r.sender.toUpperCase()), a = r.content; + return { + role: o, + content: a + }; + }), this.systemMessage && (s = [{ role: "system", content: this.systemMessage }, ...s]), { + messages: s, + ...this.body + }; + }, this.handleStreamResponse = async function* (t) { + var r, o, a; + const s = new TextDecoder("utf-8"); + let n = ""; + for (; ; ) { + const { value: c, done: d } = await t.read(); + if (d) break; + n += s.decode(c, { stream: !0 }); + const l = n.split(/\r?\n/); + n = l.pop(); + for (const u of l) { + if (!u.startsWith("data: ")) continue; + const p = u.slice(6).trim(); + try { + const m = JSON.parse(p); + if (m.done === !0) return; + const f = (a = (o = (r = m.choices) == null ? void 0 : r[0]) == null ? void 0 : o.delta) == null ? void 0 : a.content; + f && (yield f); + } catch (m) { + console.error("Stream parse error", m); + } + } + } + }, this.method = e.method ?? "POST", this.endpoint = e.baseUrl ?? "https://api.openai.com/v1/chat/completions", this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.debug = e.debug ?? !1, this.headers = { "Content-Type": "application/json", + Accept: this.responseFormat === "stream" ? "text/event-stream" : "application/json", ...e.headers - }, this.endpoint = e.baseUrl ?? "http://localhost:11434/api/generate"; + }, this.body = { + model: e.model, + stream: this.responseFormat === "stream", + ...e.body + }, e.mode === "direct") { + this.headers = { ...this.headers, Authorization: `Bearer ${e.apiKey}` }; + return; + } + if (e.mode !== "proxy") + throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy')."); } + /** + * Calls Openai and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ async *sendMessages(e) { - const t = e.filter((s) => typeof s.content == "string").map((s) => s.content).join(` -`), o = { - model: this.model, - prompt: t, - stream: this.stream - }; - this.debug && console.log("[OllamaProvider] Request:", { - endpoint: this.endpoint, - headers: this.headers, - body: o - }); - const r = await fetch(this.endpoint, { - method: "POST", + var s, n, r; + if (this.debug) { + const o = { ...this.headers }; + delete o.Authorization, console.log("[OpenaiProvider] Request:", { + method: this.method, + endpoint: this.endpoint, + headers: o, + body: this.constructBodyWithMessages(e) + }); + } + const t = await fetch(this.endpoint, { + method: this.method, headers: this.headers, - body: JSON.stringify(o) + body: JSON.stringify(this.constructBodyWithMessages(e)) }); - if (!r.ok) - throw new Error(`Ollama API error ${r.status}: ${await r.text()}`); - if (this.stream) { - if (!r.body) throw new Error("No response body for streaming"); - const s = r.body.getReader(), n = new TextDecoder(); - let a = ""; - for (; ; ) { - const { value: c, done: l } = await s.read(); - if (l) break; - a += n.decode(c, { stream: !0 }); - const d = a.split(` -`); - a = d.pop(); - for (const u of d) - if (u.trim()) - try { - const h = JSON.parse(u); - h.response && (yield h.response); - } catch (h) { - this.debug && console.error("Ollama stream parse error:", u, h); - } - } + if (this.debug && console.log("[OpenaiProvider] Response status:", t.status), !t.ok) + throw new Error(`Openai API error ${t.status}: ${await t.text()}`); + if (this.responseFormat === "stream") { + if (!t.body) + throw new Error("Response body is empty – cannot stream"); + const o = t.body.getReader(); + for await (const a of this.handleStreamResponse(o)) + yield a; } else { - const s = await r.json(); - s.response && (yield s.response); + const o = await t.json(); + this.debug && console.log("[OpenaiProvider] Response body:", o); + const a = (r = (n = (s = o.choices) == null ? void 0 : s[0]) == null ? void 0 : n.message) == null ? void 0 : r.content; + if (typeof a == "string") + yield a; + else + throw new Error("Unexpected response shape – no text candidate"); } } } export { ce as GeminiProvider, he as OllamaProvider, - le as OpenaiProvider, - de as WebLlmProvider, + de as OpenaiProvider, + le as WebLlmProvider, ie as default }; diff --git a/dist/providers/OllamaProvider.d.ts b/dist/providers/OllamaProvider.d.ts index 8ff10b3..431a8bc 100644 --- a/dist/providers/OllamaProvider.d.ts +++ b/dist/providers/OllamaProvider.d.ts @@ -1,20 +1,49 @@ import { Provider } from '../types/Provider'; import { Message } from 'react-chatbotify'; -interface OllamaProviderConfig { - model: string; - baseUrl?: string; - stream?: boolean; - debug?: boolean; - headers?: Record; -} +import { OpenaiProviderConfig } from '../types/provider-config/OpenaiProviderConfig'; +/** + * Provider for Openai’s API, supports both direct and proxy modes. + */ declare class OllamaProvider implements Provider { + private method; private endpoint; - private model; - private stream; - private debug; private headers; - constructor(config: OllamaProviderConfig); + private body; + private systemMessage?; + private responseFormat; + private messageParser?; + private debug; + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ + constructor(config: OpenaiProviderConfig); + /** + * Calls Openai and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ sendMessages(messages: Message[]): AsyncGenerator; + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap; + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages; + /** + * Consumes an SSE/text stream Response and yield each text chunk. + * + * @reader request body reader + */ + private handleStreamResponse; } export default OllamaProvider; //# sourceMappingURL=OllamaProvider.d.ts.map \ No newline at end of file diff --git a/dist/providers/OllamaProvider.d.ts.map b/dist/providers/OllamaProvider.d.ts.map index 1a53850..bf88f1f 100644 --- a/dist/providers/OllamaProvider.d.ts.map +++ b/dist/providers/OllamaProvider.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,UAAU,oBAAoB;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACjC;AAED,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,KAAK,CAAU;IACvB,OAAO,CAAC,OAAO,CAAyB;gBAErB,MAAM,EAAE,oBAAoB;IAWjC,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;CAwDvE;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file +{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA6B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA2B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/dist/tsconfig.tsbuildinfo b/dist/tsconfig.tsbuildinfo index b7e519a..afe2545 100644 --- a/dist/tsconfig.tsbuildinfo +++ b/dist/tsconfig.tsbuildinfo @@ -1 +1 @@ -{"root":["../src/app.tsx","../src/development.tsx","../src/index.tsx","../src/vite-env.d.ts","../src/constants/defaultpluginconfig.ts","../src/core/usercbplugin.tsx","../src/factory/rcbpluginfactory.ts","../src/hooks/usechangepath.ts","../src/hooks/usemessagehandler.ts","../src/hooks/useprocessblock.ts","../src/providers/geminiprovider.ts","../src/providers/ollamaprovider.ts","../src/providers/openaiprovider.ts","../src/providers/webllmprovider.ts","../src/types/llmconnectorblock.ts","../src/types/pluginconfig.ts","../src/types/provider.ts","../src/types/provider-config/geminiproviderconfig.ts","../src/types/provider-config/openaiproviderconfig.ts","../src/types/provider-config/webllmproviderconfig.ts","../src/types/provider-message/geminiprovidermessage.ts","../src/types/provider-message/openaiprovidermessage.ts","../src/types/provider-message/webllmprovidermessage.ts","../src/utils/prompthandler.tsx","../src/utils/streamcontroller.ts"],"version":"5.8.3"} \ No newline at end of file +{"root":["../src/app.tsx","../src/development.tsx","../src/index.tsx","../src/vite-env.d.ts","../src/constants/defaultpluginconfig.ts","../src/core/usercbplugin.tsx","../src/factory/rcbpluginfactory.ts","../src/hooks/usechangepath.ts","../src/hooks/usemessagehandler.ts","../src/hooks/useprocessblock.ts","../src/providers/geminiprovider.ts","../src/providers/ollamaprovider.ts","../src/providers/openaiprovider.ts","../src/providers/webllmprovider.ts","../src/types/llmconnectorblock.ts","../src/types/pluginconfig.ts","../src/types/provider.ts","../src/types/provider-config/geminiproviderconfig.ts","../src/types/provider-config/openaiproviderconfig.ts","../src/types/provider-config/webllmproviderconfig.ts","../src/types/provider-message/geminiprovidermessage.ts","../src/types/provider-message/ollamaprovidermessage.ts","../src/types/provider-message/openaiprovidermessage.ts","../src/types/provider-message/webllmprovidermessage.ts","../src/utils/prompthandler.tsx","../src/utils/streamcontroller.ts"],"version":"5.8.3"} \ No newline at end of file diff --git a/dist/types/provider-message/OllamaProviderMessage.d.ts b/dist/types/provider-message/OllamaProviderMessage.d.ts new file mode 100644 index 0000000..11e016e --- /dev/null +++ b/dist/types/provider-message/OllamaProviderMessage.d.ts @@ -0,0 +1 @@ +//# sourceMappingURL=OllamaProviderMessage.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-message/OllamaProviderMessage.d.ts.map b/dist/types/provider-message/OllamaProviderMessage.d.ts.map new file mode 100644 index 0000000..06ca4ad --- /dev/null +++ b/dist/types/provider-message/OllamaProviderMessage.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"OllamaProviderMessage.d.ts","sourceRoot":"","sources":["../../../src/types/provider-message/OllamaProviderMessage.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/src/App.tsx b/src/App.tsx index de5c3d2..372830e 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -114,7 +114,10 @@ const App = () => { ollama: { llmConnector: { provider: new OllamaProvider({ - model: 'gemma3', + baseUrl: 'http://localhost:11434/api/chat', + mode: 'direct', + model: 'robot', + apiKey: '', }), outputType: 'character', stopConditions: { diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts index c00ca2c..944d9d3 100644 --- a/src/providers/OllamaProvider.ts +++ b/src/providers/OllamaProvider.ts @@ -1,88 +1,192 @@ import { Provider } from '../types/Provider'; import { Message } from 'react-chatbotify'; +import { OpenaiProviderConfig } from '../types/provider-config/OpenaiProviderConfig'; +import { OpenaiProviderMessage } from '../types/provider-message/OpenaiProviderMessage'; -interface OllamaProviderConfig { - model: string; - baseUrl?: string; - stream?: boolean; - debug?: boolean; - headers?: Record; -} - +/** + * Provider for Openai’s API, supports both direct and proxy modes. + */ class OllamaProvider implements Provider { - private endpoint: string; - private model: string; - private stream: boolean; - private debug: boolean; - private headers: Record; - - public constructor(config: OllamaProviderConfig) { - this.model = config.model; - this.stream = config.stream ?? true; + private method!: string; + private endpoint!: string; + private headers!: Record; + private body!: Record; + private systemMessage?: string; + private responseFormat!: 'stream' | 'json'; + private messageParser?: (messages: Message[]) => OpenaiProviderMessage[]; + private debug: boolean = false; + + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ + public constructor(config: OpenaiProviderConfig) { + this.method = config.method ?? 'POST'; + this.endpoint = config.baseUrl ?? 'https://api.openai.com/v1/chat/completions'; + this.systemMessage = config.systemMessage; + this.responseFormat = config.responseFormat ?? 'stream'; + this.messageParser = config.messageParser; this.debug = config.debug ?? false; this.headers = { 'Content-Type': 'application/json', + Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json', ...config.headers, }; - this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/generate'; + this.body = { + model: config.model, + stream: this.responseFormat === 'stream', + ...config.body, + }; + + if (config.mode === 'direct') { + this.headers = { ...this.headers, Authorization: `Bearer ${config.apiKey}` }; + return; + } + + if (config.mode !== 'proxy') { + throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy')."); + } } + /** + * Calls Openai and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ public async *sendMessages(messages: Message[]): AsyncGenerator { - const prompt = messages - .filter((m) => typeof m.content === 'string') - .map((m) => m.content) - .join('\n'); - - const body = { - model: this.model, - prompt, - stream: this.stream, - }; - if (this.debug) { - console.log('[OllamaProvider] Request:', { + const sanitizedHeaders = { ...this.headers }; + delete sanitizedHeaders['Authorization']; + console.log('[OpenaiProvider] Request:', { + method: this.method, endpoint: this.endpoint, - headers: this.headers, - body, + headers: sanitizedHeaders, + body: this.constructBodyWithMessages(messages), }); } - const res = await fetch(this.endpoint, { - method: 'POST', - headers: this.headers, - body: JSON.stringify(body), + method: this.method, + headers: this.headers as HeadersInit, + body: JSON.stringify(this.constructBodyWithMessages(messages)), }); + if (this.debug) { + console.log('[OpenaiProvider] Response status:', res.status); + } + if (!res.ok) { - throw new Error(`Ollama API error ${res.status}: ${await res.text()}`); + throw new Error(`Openai API error ${res.status}: ${await res.text()}`); } - if (this.stream) { - if (!res.body) throw new Error('No response body for streaming'); + if (this.responseFormat === 'stream') { + if (!res.body) { + throw new Error('Response body is empty – cannot stream'); + } const reader = res.body.getReader(); - const decoder = new TextDecoder(); - let buffer = ''; - while (true) { - const { value, done } = await reader.read(); - if (done) break; - buffer += decoder.decode(value, { stream: true }); - const lines = buffer.split('\n'); - buffer = lines.pop()!; - for (const line of lines) { - if (!line.trim()) continue; - try { - const data = JSON.parse(line); - if (data.response) yield data.response; - } catch (e) { - if (this.debug) console.error('Ollama stream parse error:', line, e); - } - } + for await (const chunk of this.handleStreamResponse(reader)) { + yield chunk; } } else { - const data = await res.json(); - if (data.response) yield data.response; + const payload = await res.json(); + if (this.debug) { + console.log('[OpenaiProvider] Response body:', payload); + } + const text = payload.choices?.[0]?.message?.content; + if (typeof text === 'string') { + yield text; + } else { + throw new Error('Unexpected response shape – no text candidate'); + } } } + + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap = (sender: string): 'system' | 'user' | 'assistant' => { + switch (sender) { + case 'USER': + return 'user'; + case 'SYSTEM': + return 'system'; + default: + return 'assistant'; + } + }; + + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages = (messages: Message[]) => { + let parsedMessages; + if (this.messageParser) { + // use parser if specified + parsedMessages = this.messageParser(messages); + } else { + // only handle message contents of type string and exclude chatbot system messages + const filteredMessages = messages.filter( + (message) => typeof message.content === 'string' && message.sender.toUpperCase() !== 'SYSTEM' + ); + parsedMessages = filteredMessages.map((message) => { + const role = this.roleMap(message.sender.toUpperCase()); + const text = message.content; + return { + role, + content: text, + }; + }); + } + + // append system message if specified + if (this.systemMessage) { + parsedMessages = [{ role: 'system', content: this.systemMessage }, ...parsedMessages]; + } + + return { + messages: parsedMessages, + ...this.body, + }; + }; + + /** + * Consumes an SSE/text stream Response and yield each text chunk. + * + * @reader request body reader + */ + private handleStreamResponse = async function* ( + reader: ReadableStreamDefaultReader> + ): AsyncGenerator { + const decoder = new TextDecoder('utf-8'); + let buffer = ''; + + while (true) { + const { value, done } = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + const parts = buffer.split(/\r?\n/); + buffer = parts.pop()!; + + for (const line of parts) { + if (!line.startsWith('data: ')) continue; + const json = line.slice('data: '.length).trim(); + try { + const event = JSON.parse(json); + if (event.done === true) return; + const chunk = event.choices?.[0]?.delta?.content; + if (chunk) yield chunk; + } catch (err) { + console.error('Stream parse error', err); + } + } + } + }; } export default OllamaProvider; diff --git a/src/types/provider-message/OllamaProviderMessage.ts b/src/types/provider-message/OllamaProviderMessage.ts new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/types/provider-message/OllamaProviderMessage.ts @@ -0,0 +1 @@ + From 5895b6fc56443e6ba8ad61719a8581ea08238fcc Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 10:54:01 +0300 Subject: [PATCH 5/8] fix --- dist/index.cjs | 4 +- dist/index.js | 94 +++++++++++++------------- dist/providers/OllamaProvider.d.ts.map | 2 +- src/providers/OllamaProvider.ts | 12 ++-- 4 files changed, 55 insertions(+), 57 deletions(-) diff --git a/dist/index.cjs b/dist/index.cjs index aeb45b8..72e9ee5 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -1,2 +1,2 @@ -"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const b=require("react"),p=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=b.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);p.useOnRcbEvent(p.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:c}=e,d=b.useCallback(l=>{var m;const u=l.data.block;u.llmConnector&&(l.preventDefault(),l.type==="rcb-pre-process-block"&&((m=u.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{c()&&r()})))},[n,s,r,c]);p.useOnRcbEvent(p.RcbEvent.PRE_PROCESS_BLOCK,d),p.useOnRcbEvent(p.RcbEvent.POST_PROCESS_BLOCK,d)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:c,streamMessage:d,endStreamMessage:l,getIsChatBotVisible:u}=t,m=e.providerRef.current.sendMessages(a),f=e.outputTypeRef.current,g=e.outputSpeedRef.current;if(f==="full"){let h="";for await(const y of m){if((R=s.signal)!=null&&R.aborted)break;h+=y}r(!1),c(h),setTimeout(()=>{o(!1),u()&&i()})}else{const h=X(Z(m,n),f,g);let y="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),y+=C,d(y)}l(),setTimeout(()=>{o(!1),u()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:d,toggleIsBotTyping:l,goToPath:u,focusTextArea:m,getIsChatBotVisible:f}=e,g=b.useRef(null),R=b.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,y=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${y}`),y==="USER"&&(l(!0),d(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=g.current)==null||v.abort(),g.current=null,u(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;g.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{l(!1),d(!1),setTimeout(()=>{f()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):c(o.current)})},te))},[a,e]);p.useOnRcbEvent(p.RcbEvent.POST_INJECT_MESSAGE,R),p.useOnRcbEvent(p.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),p.useOnRcbEvent(p.RcbEvent.STOP_STREAM_MESSAGE,R),b.useEffect(()=>{const M=async h=>{var y;if(r.current){const S=await r.current(h);S&&((y=g.current)==null||y.abort(),g.current=null,u(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=b.useRef([]),t=b.useRef(null),s=b.useRef("chunk"),n=b.useRef(30),r=b.useRef(0),o=b.useRef(""),i=b.useRef("Unable to get response, please try again."),c=b.useRef(null),d=b.useRef(null),{getFlow:l}=p.useFlow(),{speakAudio:u}=p.useAudio(),{messages:m,injectMessage:f,simulateStreamMessage:g,streamMessage:R,endStreamMessage:M}=p.useMessages(),{goToPath:h}=p.usePaths(),{toggleTextAreaDisabled:y,focusTextArea:S}=p.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=p.useChatWindow(),E={...Y,...a??{}};b.useEffect(()=>{e.current=m},[m]),q(l,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",c.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,d.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:d},P={speakAudio:u,injectMessage:f,simulateStreamMessage:g,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:y,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),c=o.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,c,d,l;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:u,done:m}=await s.read();if(m)break;r+=n.decode(u,{stream:!0});const f=r.split(` -`);r=f.pop();for(const g of f){const R=g.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const y=(l=(d=(c=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:d[0])==null?void 0:l.text;y&&(yield y)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),d={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:d,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const d of this.handleStreamResponse(c))yield d}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const d=(i=(o=(r=(n=(s=c.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof d=="string")yield d;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:d}=await t.read();if(d)break;n+=s.decode(c,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const u of l){if(!u.startsWith("data: "))continue;const m=u.slice(6).trim();if(m==="[DONE]")return;try{const g=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;g&&(yield g)}catch(f){console.error("Stream parse error",f)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const d of t){const l=(r=(n=d.choices[0])==null?void 0:n.delta)==null?void 0:r.content;l&&(yield l)}else(c=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:d}=await t.read();if(d)break;n+=s.decode(c,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const u of l){if(!u.startsWith("data: "))continue;const m=u.slice(6).trim();try{const f=JSON.parse(m);if(f.done===!0)return;const g=(i=(o=(r=f.choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;g&&(yield g)}catch(f){console.error("Stream parse error",f)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; +"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),u=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=g.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);u.useOnRcbEvent(u.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:c}=e,d=g.useCallback(l=>{var m;const p=l.data.block;p.llmConnector&&(l.preventDefault(),l.type==="rcb-pre-process-block"&&((m=p.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{c()&&r()})))},[n,s,r,c]);u.useOnRcbEvent(u.RcbEvent.PRE_PROCESS_BLOCK,d),u.useOnRcbEvent(u.RcbEvent.POST_PROCESS_BLOCK,d)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:c,streamMessage:d,endStreamMessage:l,getIsChatBotVisible:p}=t,m=e.providerRef.current.sendMessages(a),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let h="";for await(const f of m){if((R=s.signal)!=null&&R.aborted)break;h+=f}r(!1),c(h),setTimeout(()=>{o(!1),p()&&i()})}else{const h=X(Z(m,n),b,y);let f="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=C,d(f)}l(),setTimeout(()=>{o(!1),p()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:d,toggleIsBotTyping:l,goToPath:p,focusTextArea:m,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,f=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(l(!0),d(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=y.current)==null||v.abort(),y.current=null,p(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;y.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{l(!1),d(!1),setTimeout(()=>{b()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):c(o.current)})},te))},[a,e]);u.useOnRcbEvent(u.RcbEvent.POST_INJECT_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async h=>{var f;if(r.current){const S=await r.current(h);S&&((f=y.current)==null||f.abort(),y.current=null,p(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),n=g.useRef(30),r=g.useRef(0),o=g.useRef(""),i=g.useRef("Unable to get response, please try again."),c=g.useRef(null),d=g.useRef(null),{getFlow:l}=u.useFlow(),{speakAudio:p}=u.useAudio(),{messages:m,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=u.useMessages(),{goToPath:h}=u.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=u.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=u.useChatWindow(),E={...Y,...a??{}};g.useEffect(()=>{e.current=m},[m]),q(l,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",c.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,d.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:d},P={speakAudio:p,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),c=o.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,c,d,l;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:p,done:m}=await s.read();if(m)break;r+=n.decode(p,{stream:!0});const b=r.split(` +`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(l=(d=(c=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:d[0])==null?void 0:l.text;f&&(yield f)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),d={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:d,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const d of this.handleStreamResponse(c))yield d}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const d=(i=(o=(r=(n=(s=c.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof d=="string")yield d;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:d}=await t.read();if(d)break;n+=s.decode(c,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const p of l){if(!p.startsWith("data: "))continue;const m=p.slice(6).trim();if(m==="[DONE]")return;try{const y=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const d of t){const l=(r=(n=d.choices[0])==null?void 0:n.delta)==null?void 0:r.content;l&&(yield l)}else(c=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{model:this.body.model,messages:s}},this.handleStreamResponse=async function*(t){const s=new TextDecoder("utf-8");let n="";for(;;){const{value:r,done:o}=await t.read();if(o)break;n+=s.decode(r,{stream:!0});const i=n.split(/\r?\n/);n=i.pop();for(const c of i){if(!c.startsWith("data: "))continue;const d=c.slice(6).trim();try{const l=JSON.parse(d);if(l.done===!0)return;l.message&&typeof l.message.content=="string"&&(yield l.message.content)}catch(l){console.error("Stream parse error",l)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"http://localhost:11434/api/chat",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; diff --git a/dist/index.js b/dist/index.js index 36730a5..988c514 100644 --- a/dist/index.js +++ b/dist/index.js @@ -55,23 +55,23 @@ const H = { streamMessage: d, endStreamMessage: l, getIsChatBotVisible: u - } = t, p = e.providerRef.current.sendMessages(i), m = e.outputTypeRef.current, f = e.outputSpeedRef.current; - if (m === "full") { + } = t, p = e.providerRef.current.sendMessages(i), f = e.outputTypeRef.current, g = e.outputSpeedRef.current; + if (f === "full") { let h = ""; - for await (const g of p) { + for await (const m of p) { if ((M = s.signal) != null && M.aborted) break; - h += g; + h += m; } r(!1), c(h), setTimeout(() => { o(!1), u() && a(); }); } else { - const h = Z(ee(p, n), m, f); - let g = "", b = !1; + const h = Z(ee(p, n), f, g); + let m = "", b = !1; for await (const E of h) { if ((y = s.signal) != null && y.aborted) break; - b || (r(!1), b = !0), g += E, d(g); + b || (r(!1), b = !0), m += E, d(m); } l(), setTimeout(() => { o(!1), u() && a(); @@ -85,23 +85,23 @@ const H = { toggleIsBotTyping: l, goToPath: u, focusTextArea: p, - getIsChatBotVisible: m - } = e, f = S(null), M = O( + getIsChatBotVisible: f + } = e, g = S(null), M = O( (y) => { if (!i.providerRef.current) return; - const h = y.data.message, g = h.sender.toUpperCase(); - h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${g}`), g === "USER" && (l(!0), d(!0), setTimeout(async () => { + const h = y.data.message, m = h.sender.toUpperCase(); + h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (l(!0), d(!0), setTimeout(async () => { var v; if (n.current) { const R = await n.current(h); if (R) - return (v = f.current) == null || v.abort(), f.current = null, u(R); + return (v = g.current) == null || v.abort(), g.current = null, u(R); } const b = i.historySizeRef.current, E = t.current, x = b ? [...E.slice(-(b - 1)), h] : [h], P = new AbortController(); - f.current = P, te(x, i, e, { signal: P.signal }).catch((R) => { + g.current = P, te(x, i, e, { signal: P.signal }).catch((R) => { l(!1), d(!1), setTimeout(() => { - m() && p(); + f() && p(); }), console.error("LLM prompt failed", R), s.current === "full" ? a(o.current) : c(o.current); }); }, se)); @@ -110,16 +110,16 @@ const H = { ); C(T.POST_INJECT_MESSAGE, M), C(T.STOP_SIMULATE_STREAM_MESSAGE, M), C(T.STOP_STREAM_MESSAGE, M), z(() => { const y = async (h) => { - var g; + var m; if (r.current) { const b = await r.current(h); - b && ((g = f.current) == null || g.abort(), f.current = null, u(b)); + b && ((m = g.current) == null || m.abort(), g.current = null, u(b)); } }; return window.addEventListener("keydown", y), () => window.removeEventListener("keydown", y); }, []); }, oe = (i) => { - const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), c = S(null), d = S(null), { getFlow: l } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: m, simulateStreamMessage: f, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: g, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; + const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), c = S(null), d = S(null), { getFlow: l } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: f, simulateStreamMessage: g, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: m, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; z(() => { e.current = p; }, [p]), q(l, (w) => { @@ -138,11 +138,11 @@ const H = { onKeyDownRef: d }, R = { speakAudio: u, - injectMessage: m, - simulateStreamMessage: f, + injectMessage: f, + simulateStreamMessage: g, streamMessage: M, endStreamMessage: y, - toggleTextAreaDisabled: g, + toggleTextAreaDisabled: m, toggleIsBotTyping: E, focusTextArea: b, goToPath: h, @@ -198,16 +198,16 @@ class ce { const { value: u, done: p } = await s.read(); if (p) break; r += n.decode(u, { stream: !0 }); - const m = r.split(` + const f = r.split(` `); - r = m.pop(); - for (const f of m) { - const M = f.trim(); + r = f.pop(); + for (const g of f) { + const M = g.trim(); if (!M.startsWith("data: ")) continue; const y = M.slice(6); try { - const g = (l = (d = (c = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : d[0]) == null ? void 0 : l.text; - g && (yield g); + const m = (l = (d = (c = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : d[0]) == null ? void 0 : l.text; + m && (yield m); } catch (h) { console.error("SSE JSON parse error:", y, h); } @@ -312,10 +312,10 @@ class de { const p = u.slice(6).trim(); if (p === "[DONE]") return; try { - const f = (a = (o = (r = JSON.parse(p).choices) == null ? void 0 : r[0]) == null ? void 0 : o.delta) == null ? void 0 : a.content; - f && (yield f); - } catch (m) { - console.error("Stream parse error", m); + const g = (a = (o = (r = JSON.parse(p).choices) == null ? void 0 : r[0]) == null ? void 0 : o.delta) == null ? void 0 : a.content; + g && (yield g); + } catch (f) { + console.error("Stream parse error", f); } } } @@ -476,33 +476,31 @@ class he { content: a }; }), this.systemMessage && (s = [{ role: "system", content: this.systemMessage }, ...s]), { - messages: s, - ...this.body + model: this.body.model, + messages: s }; }, this.handleStreamResponse = async function* (t) { - var r, o, a; const s = new TextDecoder("utf-8"); let n = ""; for (; ; ) { - const { value: c, done: d } = await t.read(); - if (d) break; - n += s.decode(c, { stream: !0 }); - const l = n.split(/\r?\n/); - n = l.pop(); - for (const u of l) { - if (!u.startsWith("data: ")) continue; - const p = u.slice(6).trim(); + const { value: r, done: o } = await t.read(); + if (o) break; + n += s.decode(r, { stream: !0 }); + const a = n.split(/\r?\n/); + n = a.pop(); + for (const c of a) { + if (!c.startsWith("data: ")) continue; + const d = c.slice(6).trim(); try { - const m = JSON.parse(p); - if (m.done === !0) return; - const f = (a = (o = (r = m.choices) == null ? void 0 : r[0]) == null ? void 0 : o.delta) == null ? void 0 : a.content; - f && (yield f); - } catch (m) { - console.error("Stream parse error", m); + const l = JSON.parse(d); + if (l.done === !0) return; + l.message && typeof l.message.content == "string" && (yield l.message.content); + } catch (l) { + console.error("Stream parse error", l); } } } - }, this.method = e.method ?? "POST", this.endpoint = e.baseUrl ?? "https://api.openai.com/v1/chat/completions", this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.debug = e.debug ?? !1, this.headers = { + }, this.method = e.method ?? "POST", this.endpoint = e.baseUrl ?? "http://localhost:11434/api/chat", this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.debug = e.debug ?? !1, this.headers = { "Content-Type": "application/json", Accept: this.responseFormat === "stream" ? "text/event-stream" : "application/json", ...e.headers diff --git a/dist/providers/OllamaProvider.d.ts.map b/dist/providers/OllamaProvider.d.ts.map index bf88f1f..61c5a49 100644 --- a/dist/providers/OllamaProvider.d.ts.map +++ b/dist/providers/OllamaProvider.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA6B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA2B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file +{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA4B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts index 944d9d3..c19dd13 100644 --- a/src/providers/OllamaProvider.ts +++ b/src/providers/OllamaProvider.ts @@ -24,7 +24,7 @@ class OllamaProvider implements Provider { */ public constructor(config: OpenaiProviderConfig) { this.method = config.method ?? 'POST'; - this.endpoint = config.baseUrl ?? 'https://api.openai.com/v1/chat/completions'; + this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/chat'; this.systemMessage = config.systemMessage; this.responseFormat = config.responseFormat ?? 'stream'; this.messageParser = config.messageParser; @@ -126,10 +126,8 @@ class OllamaProvider implements Provider { private constructBodyWithMessages = (messages: Message[]) => { let parsedMessages; if (this.messageParser) { - // use parser if specified parsedMessages = this.messageParser(messages); } else { - // only handle message contents of type string and exclude chatbot system messages const filteredMessages = messages.filter( (message) => typeof message.content === 'string' && message.sender.toUpperCase() !== 'SYSTEM' ); @@ -148,9 +146,10 @@ class OllamaProvider implements Provider { parsedMessages = [{ role: 'system', content: this.systemMessage }, ...parsedMessages]; } + // Only include model and messages for Ollama return { + model: this.body.model, messages: parsedMessages, - ...this.body, }; }; @@ -179,8 +178,9 @@ class OllamaProvider implements Provider { try { const event = JSON.parse(json); if (event.done === true) return; - const chunk = event.choices?.[0]?.delta?.content; - if (chunk) yield chunk; + if (event.message && typeof event.message.content === 'string') { + yield event.message.content; + } } catch (err) { console.error('Stream parse error', err); } From d6ff295e6a3e794e06e6ce61ee7bdfc2484fa6ea Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 11:11:57 +0300 Subject: [PATCH 6/8] fix --- dist/index.cjs | 4 +- dist/index.js | 80 +++++++++++++------------- dist/providers/OllamaProvider.d.ts.map | 2 +- src/providers/OllamaProvider.ts | 1 + 4 files changed, 44 insertions(+), 43 deletions(-) diff --git a/dist/index.cjs b/dist/index.cjs index 72e9ee5..6f8f81a 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -1,2 +1,2 @@ -"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),u=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=g.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);u.useOnRcbEvent(u.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:c}=e,d=g.useCallback(l=>{var m;const p=l.data.block;p.llmConnector&&(l.preventDefault(),l.type==="rcb-pre-process-block"&&((m=p.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{c()&&r()})))},[n,s,r,c]);u.useOnRcbEvent(u.RcbEvent.PRE_PROCESS_BLOCK,d),u.useOnRcbEvent(u.RcbEvent.POST_PROCESS_BLOCK,d)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:c,streamMessage:d,endStreamMessage:l,getIsChatBotVisible:p}=t,m=e.providerRef.current.sendMessages(a),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let h="";for await(const f of m){if((R=s.signal)!=null&&R.aborted)break;h+=f}r(!1),c(h),setTimeout(()=>{o(!1),p()&&i()})}else{const h=X(Z(m,n),b,y);let f="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=C,d(f)}l(),setTimeout(()=>{o(!1),p()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:d,toggleIsBotTyping:l,goToPath:p,focusTextArea:m,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,f=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(l(!0),d(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=y.current)==null||v.abort(),y.current=null,p(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;y.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{l(!1),d(!1),setTimeout(()=>{b()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):c(o.current)})},te))},[a,e]);u.useOnRcbEvent(u.RcbEvent.POST_INJECT_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async h=>{var f;if(r.current){const S=await r.current(h);S&&((f=y.current)==null||f.abort(),y.current=null,p(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),n=g.useRef(30),r=g.useRef(0),o=g.useRef(""),i=g.useRef("Unable to get response, please try again."),c=g.useRef(null),d=g.useRef(null),{getFlow:l}=u.useFlow(),{speakAudio:p}=u.useAudio(),{messages:m,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=u.useMessages(),{goToPath:h}=u.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=u.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=u.useChatWindow(),E={...Y,...a??{}};g.useEffect(()=>{e.current=m},[m]),q(l,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",c.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,d.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:d},P={speakAudio:p,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),c=o.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,c,d,l;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:p,done:m}=await s.read();if(m)break;r+=n.decode(p,{stream:!0});const b=r.split(` -`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(l=(d=(c=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:d[0])==null?void 0:l.text;f&&(yield f)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),d={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:d,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const d of this.handleStreamResponse(c))yield d}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const d=(i=(o=(r=(n=(s=c.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof d=="string")yield d;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:d}=await t.read();if(d)break;n+=s.decode(c,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const p of l){if(!p.startsWith("data: "))continue;const m=p.slice(6).trim();if(m==="[DONE]")return;try{const y=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const d of t){const l=(r=(n=d.choices[0])==null?void 0:n.delta)==null?void 0:r.content;l&&(yield l)}else(c=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{model:this.body.model,messages:s}},this.handleStreamResponse=async function*(t){const s=new TextDecoder("utf-8");let n="";for(;;){const{value:r,done:o}=await t.read();if(o)break;n+=s.decode(r,{stream:!0});const i=n.split(/\r?\n/);n=i.pop();for(const c of i){if(!c.startsWith("data: "))continue;const d=c.slice(6).trim();try{const l=JSON.parse(d);if(l.done===!0)return;l.message&&typeof l.message.content=="string"&&(yield l.message.content)}catch(l){console.error("Stream parse error",l)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"http://localhost:11434/api/chat",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; +"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),u=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=g.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);u.useOnRcbEvent(u.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:c}=e,l=g.useCallback(d=>{var m;const p=d.data.block;p.llmConnector&&(d.preventDefault(),d.type==="rcb-pre-process-block"&&((m=p.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{c()&&r()})))},[n,s,r,c]);u.useOnRcbEvent(u.RcbEvent.PRE_PROCESS_BLOCK,l),u.useOnRcbEvent(u.RcbEvent.POST_PROCESS_BLOCK,l)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:c,streamMessage:l,endStreamMessage:d,getIsChatBotVisible:p}=t,m=e.providerRef.current.sendMessages(a),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let h="";for await(const f of m){if((R=s.signal)!=null&&R.aborted)break;h+=f}r(!1),c(h),setTimeout(()=>{o(!1),p()&&i()})}else{const h=X(Z(m,n),b,y);let f="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=C,l(f)}d(),setTimeout(()=>{o(!1),p()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:l,toggleIsBotTyping:d,goToPath:p,focusTextArea:m,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,f=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(d(!0),l(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=y.current)==null||v.abort(),y.current=null,p(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;y.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{d(!1),l(!1),setTimeout(()=>{b()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):c(o.current)})},te))},[a,e]);u.useOnRcbEvent(u.RcbEvent.POST_INJECT_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async h=>{var f;if(r.current){const S=await r.current(h);S&&((f=y.current)==null||f.abort(),y.current=null,p(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),n=g.useRef(30),r=g.useRef(0),o=g.useRef(""),i=g.useRef("Unable to get response, please try again."),c=g.useRef(null),l=g.useRef(null),{getFlow:d}=u.useFlow(),{speakAudio:p}=u.useAudio(),{messages:m,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=u.useMessages(),{goToPath:h}=u.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=u.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=u.useChatWindow(),E={...Y,...a??{}};g.useEffect(()=>{e.current=m},[m]),q(d,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",c.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,l.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:l},P={speakAudio:p,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),c=o.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,c,l,d;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:p,done:m}=await s.read();if(m)break;r+=n.decode(p,{stream:!0});const b=r.split(` +`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(d=(l=(c=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:l[0])==null?void 0:d.text;f&&(yield f)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),l={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:l,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const l of this.handleStreamResponse(c))yield l}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const l=(i=(o=(r=(n=(s=c.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof l=="string")yield l;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:l}=await t.read();if(l)break;n+=s.decode(c,{stream:!0});const d=n.split(/\r?\n/);n=d.pop();for(const p of d){if(!p.startsWith("data: "))continue;const m=p.slice(6).trim();if(m==="[DONE]")return;try{const y=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const l of t){const d=(r=(n=l.choices[0])==null?void 0:n.delta)==null?void 0:r.content;d&&(yield d)}else(c=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{model:this.body.model,messages:s}},this.handleStreamResponse=async function*(t){const s=new TextDecoder("utf-8");let n="";for(;;){const{value:r,done:o}=await t.read();if(o)break;n+=s.decode(r,{stream:!0});const i=n.split(/\r?\n/);n=i.pop();for(const c of i){if(!c.startsWith("data: "))continue;const l=c.slice(6).trim();try{const d=JSON.parse(l);if(console.log(d),d.done===!0)return;d.message&&typeof d.message.content=="string"&&(yield d.message.content)}catch(d){console.error("Stream parse error",d)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"http://localhost:11434/api/chat",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; diff --git a/dist/index.js b/dist/index.js index 988c514..fc3f640 100644 --- a/dist/index.js +++ b/dist/index.js @@ -19,17 +19,17 @@ const H = { injectMessage: o, simulateStreamMessage: a, getIsChatBotVisible: c - } = e, d = O( - (l) => { + } = e, l = O( + (d) => { var p; - const u = l.data.block; - u.llmConnector && (l.preventDefault(), l.type === "rcb-pre-process-block" && ((p = u.llmConnector) != null && p.initialMessage && (t.current === "full" ? o(i.initialMessageRef.current) : a(i.initialMessageRef.current)), n(!1), s(!1), setTimeout(() => { + const u = d.data.block; + u.llmConnector && (d.preventDefault(), d.type === "rcb-pre-process-block" && ((p = u.llmConnector) != null && p.initialMessage && (t.current === "full" ? o(i.initialMessageRef.current) : a(i.initialMessageRef.current)), n(!1), s(!1), setTimeout(() => { c() && r(); }))); }, [n, s, r, c] ); - C(T.PRE_PROCESS_BLOCK, d), C(T.POST_PROCESS_BLOCK, d); + C(T.PRE_PROCESS_BLOCK, l), C(T.POST_PROCESS_BLOCK, l); }, Q = async function* (i, e) { for await (const t of i) for (const s of t) @@ -52,8 +52,8 @@ const H = { toggleTextAreaDisabled: o, focusTextArea: a, injectMessage: c, - streamMessage: d, - endStreamMessage: l, + streamMessage: l, + endStreamMessage: d, getIsChatBotVisible: u } = t, p = e.providerRef.current.sendMessages(i), f = e.outputTypeRef.current, g = e.outputSpeedRef.current; if (f === "full") { @@ -71,9 +71,9 @@ const H = { for await (const E of h) { if ((y = s.signal) != null && y.aborted) break; - b || (r(!1), b = !0), m += E, d(m); + b || (r(!1), b = !0), m += E, l(m); } - l(), setTimeout(() => { + d(), setTimeout(() => { o(!1), u() && a(); }); } @@ -81,8 +81,8 @@ const H = { const { messagesRef: t, outputTypeRef: s, onUserMessageRef: n, onKeyDownRef: r, errorMessageRef: o } = i, { injectMessage: a, simulateStreamMessage: c, - toggleTextAreaDisabled: d, - toggleIsBotTyping: l, + toggleTextAreaDisabled: l, + toggleIsBotTyping: d, goToPath: u, focusTextArea: p, getIsChatBotVisible: f @@ -91,7 +91,7 @@ const H = { if (!i.providerRef.current) return; const h = y.data.message, m = h.sender.toUpperCase(); - h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (l(!0), d(!0), setTimeout(async () => { + h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (d(!0), l(!0), setTimeout(async () => { var v; if (n.current) { const R = await n.current(h); @@ -100,7 +100,7 @@ const H = { } const b = i.historySizeRef.current, E = t.current, x = b ? [...E.slice(-(b - 1)), h] : [h], P = new AbortController(); g.current = P, te(x, i, e, { signal: P.signal }).catch((R) => { - l(!1), d(!1), setTimeout(() => { + d(!1), l(!1), setTimeout(() => { f() && p(); }), console.error("LLM prompt failed", R), s.current === "full" ? a(o.current) : c(o.current); }); @@ -119,12 +119,12 @@ const H = { return window.addEventListener("keydown", y), () => window.removeEventListener("keydown", y); }, []); }, oe = (i) => { - const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), c = S(null), d = S(null), { getFlow: l } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: f, simulateStreamMessage: g, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: m, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; + const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), c = S(null), l = S(null), { getFlow: d } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: f, simulateStreamMessage: g, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: m, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; z(() => { e.current = p; - }, [p]), q(l, (w) => { + }, [p]), q(d, (w) => { var k, B, U, F, I, W, j, D, $, _; - t.current = ((k = w.llmConnector) == null ? void 0 : k.provider) ?? null, s.current = ((B = w.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", n.current = ((U = w.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, r.current = ((F = w.llmConnector) == null ? void 0 : F.historySize) ?? 0, o.current = ((I = w.llmConnector) == null ? void 0 : I.initialMessage) ?? "", a.current = ((W = w.llmConnector) == null ? void 0 : W.errorMessage) ?? "Unable to get response, please try again.", c.current = ((D = (j = w.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : D.onUserMessage) ?? null, d.current = ((_ = ($ = w.llmConnector) == null ? void 0 : $.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; + t.current = ((k = w.llmConnector) == null ? void 0 : k.provider) ?? null, s.current = ((B = w.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", n.current = ((U = w.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, r.current = ((F = w.llmConnector) == null ? void 0 : F.historySize) ?? 0, o.current = ((I = w.llmConnector) == null ? void 0 : I.initialMessage) ?? "", a.current = ((W = w.llmConnector) == null ? void 0 : W.errorMessage) ?? "Unable to get response, please try again.", c.current = ((D = (j = w.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : D.onUserMessage) ?? null, l.current = ((_ = ($ = w.llmConnector) == null ? void 0 : $.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; }); const v = { providerRef: t, @@ -135,7 +135,7 @@ const H = { initialMessageRef: o, errorMessageRef: a, onUserMessageRef: c, - onKeyDownRef: d + onKeyDownRef: l }, R = { speakAudio: u, injectMessage: f, @@ -191,7 +191,7 @@ class ce { ...this.body }; }, this.handleStreamResponse = async function* (s) { - var o, a, c, d, l; + var o, a, c, l, d; const n = new TextDecoder("utf-8"); let r = ""; for (; ; ) { @@ -206,7 +206,7 @@ class ce { if (!M.startsWith("data: ")) continue; const y = M.slice(6); try { - const m = (l = (d = (c = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : d[0]) == null ? void 0 : l.text; + const m = (d = (l = (c = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : l[0]) == null ? void 0 : d.text; m && (yield m); } catch (h) { console.error("SSE JSON parse error:", y, h); @@ -234,11 +234,11 @@ class ce { async *sendMessages(e) { var s, n, r, o, a; if (this.debug) { - const c = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), d = { ...this.headers }; + const c = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), l = { ...this.headers }; console.log("[GeminiProvider] Request:", { method: this.method, endpoint: c, - headers: d, + headers: l, body: this.constructBodyWithMessages(e) }); } @@ -253,14 +253,14 @@ class ce { if (!t.body) throw new Error("Response body is empty – cannot stream"); const c = t.body.getReader(); - for await (const d of this.handleStreamResponse(c)) - yield d; + for await (const l of this.handleStreamResponse(c)) + yield l; } else { const c = await t.json(); this.debug && console.log("[GeminiProvider] Response body:", c); - const d = (a = (o = (r = (n = (s = c.candidates) == null ? void 0 : s[0]) == null ? void 0 : n.content) == null ? void 0 : r.parts) == null ? void 0 : o[0]) == null ? void 0 : a.text; - if (typeof d == "string") - yield d; + const l = (a = (o = (r = (n = (s = c.candidates) == null ? void 0 : s[0]) == null ? void 0 : n.content) == null ? void 0 : r.parts) == null ? void 0 : o[0]) == null ? void 0 : a.text; + if (typeof l == "string") + yield l; else throw new Error("Unexpected response shape – no text candidate"); } @@ -302,12 +302,12 @@ class de { const s = new TextDecoder("utf-8"); let n = ""; for (; ; ) { - const { value: c, done: d } = await t.read(); - if (d) break; + const { value: c, done: l } = await t.read(); + if (l) break; n += s.decode(c, { stream: !0 }); - const l = n.split(/\r?\n/); - n = l.pop(); - for (const u of l) { + const d = n.split(/\r?\n/); + n = d.pop(); + for (const u of d) { if (!u.startsWith("data: ")) continue; const p = u.slice(6).trim(); if (p === "[DONE]") return; @@ -441,9 +441,9 @@ class le { }); const t = await ((s = this.engine) == null ? void 0 : s.chat.completions.create(this.constructBodyWithMessages(e))); if (this.debug && console.log("[WebLlmProvider] Response:", t), t && Symbol.asyncIterator in t) - for await (const d of t) { - const l = (r = (n = d.choices[0]) == null ? void 0 : n.delta) == null ? void 0 : r.content; - l && (yield l); + for await (const l of t) { + const d = (r = (n = l.choices[0]) == null ? void 0 : n.delta) == null ? void 0 : r.content; + d && (yield d); } else (c = (a = (o = t == null ? void 0 : t.choices) == null ? void 0 : o[0]) == null ? void 0 : a.message) != null && c.content && (yield t.choices[0].message.content); } @@ -490,13 +490,13 @@ class he { n = a.pop(); for (const c of a) { if (!c.startsWith("data: ")) continue; - const d = c.slice(6).trim(); + const l = c.slice(6).trim(); try { - const l = JSON.parse(d); - if (l.done === !0) return; - l.message && typeof l.message.content == "string" && (yield l.message.content); - } catch (l) { - console.error("Stream parse error", l); + const d = JSON.parse(l); + if (console.log(d), d.done === !0) return; + d.message && typeof d.message.content == "string" && (yield d.message.content); + } catch (d) { + console.error("Stream parse error", d); } } } diff --git a/dist/providers/OllamaProvider.d.ts.map b/dist/providers/OllamaProvider.d.ts.map index 61c5a49..e020807 100644 --- a/dist/providers/OllamaProvider.d.ts.map +++ b/dist/providers/OllamaProvider.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA4B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file +{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA6B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts index c19dd13..5f3c0e6 100644 --- a/src/providers/OllamaProvider.ts +++ b/src/providers/OllamaProvider.ts @@ -177,6 +177,7 @@ class OllamaProvider implements Provider { const json = line.slice('data: '.length).trim(); try { const event = JSON.parse(json); + console.log(event); if (event.done === true) return; if (event.message && typeof event.message.content === 'string') { yield event.message.content; From fa8bd10894dbcb861a4ea130380f1821de5daaa7 Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 11:51:09 +0300 Subject: [PATCH 7/8] fixed --- dist/App.d.ts.map | 2 +- dist/index.cjs | 4 +- dist/index.js | 129 +++++---- dist/providers/OllamaProvider.d.ts.map | 2 +- src/App.tsx | 3 +- src/providers/OllamaProvider.ts | 368 ++++++++++++------------- 6 files changed, 252 insertions(+), 256 deletions(-) diff --git a/dist/App.d.ts.map b/dist/App.d.ts.map index 5711b1e..05a5b83 100644 --- a/dist/App.d.ts.map +++ b/dist/App.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"App.d.ts","sourceRoot":"","sources":["../src/App.tsx"],"names":[],"mappings":"AAaA,QAAA,MAAM,GAAG,+CAsHR,CAAC;AAEF,eAAe,GAAG,CAAC"} \ No newline at end of file +{"version":3,"file":"App.d.ts","sourceRoot":"","sources":["../src/App.tsx"],"names":[],"mappings":"AAaA,QAAA,MAAM,GAAG,+CAuHR,CAAC;AAEF,eAAe,GAAG,CAAC"} \ No newline at end of file diff --git a/dist/index.cjs b/dist/index.cjs index 6f8f81a..722d057 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -1,2 +1,2 @@ -"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),u=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=g.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);u.useOnRcbEvent(u.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:c}=e,l=g.useCallback(d=>{var m;const p=d.data.block;p.llmConnector&&(d.preventDefault(),d.type==="rcb-pre-process-block"&&((m=p.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{c()&&r()})))},[n,s,r,c]);u.useOnRcbEvent(u.RcbEvent.PRE_PROCESS_BLOCK,l),u.useOnRcbEvent(u.RcbEvent.POST_PROCESS_BLOCK,l)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:c,streamMessage:l,endStreamMessage:d,getIsChatBotVisible:p}=t,m=e.providerRef.current.sendMessages(a),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let h="";for await(const f of m){if((R=s.signal)!=null&&R.aborted)break;h+=f}r(!1),c(h),setTimeout(()=>{o(!1),p()&&i()})}else{const h=X(Z(m,n),b,y);let f="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=C,l(f)}d(),setTimeout(()=>{o(!1),p()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:c,toggleTextAreaDisabled:l,toggleIsBotTyping:d,goToPath:p,focusTextArea:m,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,f=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(d(!0),l(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=y.current)==null||v.abort(),y.current=null,p(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;y.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{d(!1),l(!1),setTimeout(()=>{b()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):c(o.current)})},te))},[a,e]);u.useOnRcbEvent(u.RcbEvent.POST_INJECT_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async h=>{var f;if(r.current){const S=await r.current(h);S&&((f=y.current)==null||f.abort(),y.current=null,p(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),n=g.useRef(30),r=g.useRef(0),o=g.useRef(""),i=g.useRef("Unable to get response, please try again."),c=g.useRef(null),l=g.useRef(null),{getFlow:d}=u.useFlow(),{speakAudio:p}=u.useAudio(),{messages:m,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=u.useMessages(),{goToPath:h}=u.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=u.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=u.useChatWindow(),E={...Y,...a??{}};g.useEffect(()=>{e.current=m},[m]),q(d,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",c.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,l.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:c,onKeyDownRef:l},P={speakAudio:p,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),c=o.content;return{role:i,parts:[{text:c}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,c,l,d;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:p,done:m}=await s.read();if(m)break;r+=n.decode(p,{stream:!0});const b=r.split(` -`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(d=(l=(c=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:c.parts)==null?void 0:l[0])==null?void 0:d.text;f&&(yield f)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const c=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),l={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:c,headers:l,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const c=t.body.getReader();for await(const l of this.handleStreamResponse(c))yield l}else{const c=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",c);const l=(i=(o=(r=(n=(s=c.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof l=="string")yield l;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:c,done:l}=await t.read();if(l)break;n+=s.decode(c,{stream:!0});const d=n.split(/\r?\n/);n=d.pop();for(const p of d){if(!p.startsWith("data: "))continue;const m=p.slice(6).trim();if(m==="[DONE]")return;try{const y=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,c;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const l of t){const d=(r=(n=l.choices[0])==null?void 0:n.delta)==null?void 0:r.content;d&&(yield d)}else(c=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&c.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{model:this.body.model,messages:s}},this.handleStreamResponse=async function*(t){const s=new TextDecoder("utf-8");let n="";for(;;){const{value:r,done:o}=await t.read();if(o)break;n+=s.decode(r,{stream:!0});const i=n.split(/\r?\n/);n=i.pop();for(const c of i){if(!c.startsWith("data: "))continue;const l=c.slice(6).trim();try{const d=JSON.parse(l);if(console.log(d),d.done===!0)return;d.message&&typeof d.message.content=="string"&&(yield d.message.content)}catch(d){console.error("Stream parse error",d)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"http://localhost:11434/api/chat",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; +"use strict";var _=Object.create;var $=Object.defineProperty;var z=Object.getOwnPropertyDescriptor;var G=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var K=(a,e,t,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of G(e))!N.call(a,n)&&n!==t&&$(a,n,{get:()=>e[n],enumerable:!(s=z(e,n))||s.enumerable});return a};var J=(a,e,t)=>(t=a!=null?_(L(a)):{},K(e||!a||!a.__esModule?$(t,"default",{value:a,enumerable:!0}):t,a));Object.defineProperties(exports,{__esModule:{value:!0},[Symbol.toStringTag]:{value:"Module"}});const g=require("react"),u=require("react-chatbotify"),Y={autoConfig:!0},q=(a,e)=>{const t=g.useCallback(s=>{const r=a()[s.data.nextPath];e(r)},[a,e]);u.useOnRcbEvent(u.RcbEvent.CHANGE_PATH,t)},H=(a,e)=>{const{outputTypeRef:t}=a,{toggleTextAreaDisabled:s,toggleIsBotTyping:n,focusTextArea:r,injectMessage:o,simulateStreamMessage:i,getIsChatBotVisible:d}=e,c=g.useCallback(l=>{var m;const p=l.data.block;p.llmConnector&&(l.preventDefault(),l.type==="rcb-pre-process-block"&&((m=p.llmConnector)!=null&&m.initialMessage&&(t.current==="full"?o(a.initialMessageRef.current):i(a.initialMessageRef.current)),n(!1),s(!1),setTimeout(()=>{d()&&r()})))},[n,s,r,d]);u.useOnRcbEvent(u.RcbEvent.PRE_PROCESS_BLOCK,c),u.useOnRcbEvent(u.RcbEvent.POST_PROCESS_BLOCK,c)},V=async function*(a,e){for await(const t of a)for(const s of t)yield s,await new Promise(n=>setTimeout(n,e))},Q=async function*(a,e){for await(const t of a)yield t,await new Promise(s=>setTimeout(s,e))},X=async function*(a,e,t){e==="character"?yield*V(a,t):yield*Q(a,t)},Z=async function*(a,e){for await(const t of a)e(t),yield t},ee=async(a,e,t,s={})=>{var R,M;if(!e.providerRef.current)return;const{speakAudio:n,toggleIsBotTyping:r,toggleTextAreaDisabled:o,focusTextArea:i,injectMessage:d,streamMessage:c,endStreamMessage:l,getIsChatBotVisible:p}=t,m=e.providerRef.current.sendMessages(a),b=e.outputTypeRef.current,y=e.outputSpeedRef.current;if(b==="full"){let h="";for await(const f of m){if((R=s.signal)!=null&&R.aborted)break;h+=f}r(!1),d(h),setTimeout(()=>{o(!1),p()&&i()})}else{const h=X(Z(m,n),b,y);let f="",S=!1;for await(const C of h){if((M=s.signal)!=null&&M.aborted)break;S||(r(!1),S=!0),f+=C,c(f)}l(),setTimeout(()=>{o(!1),p()&&i()})}},te=500,se=(a,e)=>{const{messagesRef:t,outputTypeRef:s,onUserMessageRef:n,onKeyDownRef:r,errorMessageRef:o}=a,{injectMessage:i,simulateStreamMessage:d,toggleTextAreaDisabled:c,toggleIsBotTyping:l,goToPath:p,focusTextArea:m,getIsChatBotVisible:b}=e,y=g.useRef(null),R=g.useCallback(M=>{if(!a.providerRef.current)return;const h=M.data.message,f=h.sender.toUpperCase();h.tags=h.tags??[],h.tags.push(`rcb-llm-connector-plugin:${f}`),f==="USER"&&(l(!0),c(!0),setTimeout(async()=>{var v;if(n.current){const P=await n.current(h);if(P)return(v=y.current)==null||v.abort(),y.current=null,p(P)}const S=a.historySizeRef.current,C=t.current,T=S?[...C.slice(-(S-1)),h]:[h],E=new AbortController;y.current=E,ee(T,a,e,{signal:E.signal}).catch(P=>{l(!1),c(!1),setTimeout(()=>{b()&&m()}),console.error("LLM prompt failed",P),s.current==="full"?i(o.current):d(o.current)})},te))},[a,e]);u.useOnRcbEvent(u.RcbEvent.POST_INJECT_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_SIMULATE_STREAM_MESSAGE,R),u.useOnRcbEvent(u.RcbEvent.STOP_STREAM_MESSAGE,R),g.useEffect(()=>{const M=async h=>{var f;if(r.current){const S=await r.current(h);S&&((f=y.current)==null||f.abort(),y.current=null,p(S))}};return window.addEventListener("keydown",M),()=>window.removeEventListener("keydown",M)},[])},re=a=>{const e=g.useRef([]),t=g.useRef(null),s=g.useRef("chunk"),n=g.useRef(30),r=g.useRef(0),o=g.useRef(""),i=g.useRef("Unable to get response, please try again."),d=g.useRef(null),c=g.useRef(null),{getFlow:l}=u.useFlow(),{speakAudio:p}=u.useAudio(),{messages:m,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M}=u.useMessages(),{goToPath:h}=u.usePaths(),{toggleTextAreaDisabled:f,focusTextArea:S}=u.useTextArea(),{toggleIsBotTyping:C,getIsChatBotVisible:T}=u.useChatWindow(),E={...Y,...a??{}};g.useEffect(()=>{e.current=m},[m]),q(l,w=>{var x,A,k,B,U,F,I,W,j,D;t.current=((x=w.llmConnector)==null?void 0:x.provider)??null,s.current=((A=w.llmConnector)==null?void 0:A.outputType)??"chunk",n.current=((k=w.llmConnector)==null?void 0:k.outputSpeed)??30,r.current=((B=w.llmConnector)==null?void 0:B.historySize)??0,o.current=((U=w.llmConnector)==null?void 0:U.initialMessage)??"",i.current=((F=w.llmConnector)==null?void 0:F.errorMessage)??"Unable to get response, please try again.",d.current=((W=(I=w.llmConnector)==null?void 0:I.stopConditions)==null?void 0:W.onUserMessage)??null,c.current=((D=(j=w.llmConnector)==null?void 0:j.stopConditions)==null?void 0:D.onKeyDown)??null});const v={providerRef:t,messagesRef:e,outputTypeRef:s,outputSpeedRef:n,historySizeRef:r,initialMessageRef:o,errorMessageRef:i,onUserMessageRef:d,onKeyDownRef:c},P={speakAudio:p,injectMessage:b,simulateStreamMessage:y,streamMessage:R,endStreamMessage:M,toggleTextAreaDisabled:f,toggleIsBotTyping:C,focusTextArea:S,goToPath:h,getIsChatBotVisible:T};H(v,P),se(v,P);const O={name:"@rcb-plugins/llm-connector"};return E!=null&&E.autoConfig&&(O.settings={event:{rcbChangePath:!0,rcbPostInjectMessage:!0,rcbStopSimulateStreamMessage:!0,rcbStopStreamMessage:!0,rcbPreProcessBlock:!0,rcbPostProcessBlock:!0}}),O},oe=a=>()=>re(a);class ne{constructor(e){this.debug=!1,this.roleMap=s=>{switch(s){case"USER":return"user";default:return"model"}},this.constructBodyWithMessages=s=>{let n;return this.messageParser?n=this.messageParser(s):n=s.filter(o=>typeof o.content=="string"&&o.sender.toUpperCase()!=="SYSTEM").map(o=>{const i=this.roleMap(o.sender.toUpperCase()),d=o.content;return{role:i,parts:[{text:d}]}}),this.systemMessage&&(n=[{role:"user",parts:[{text:this.systemMessage}]},...n]),{contents:n,...this.body}},this.handleStreamResponse=async function*(s){var o,i,d,c,l;const n=new TextDecoder("utf-8");let r="";for(;;){const{value:p,done:m}=await s.read();if(m)break;r+=n.decode(p,{stream:!0});const b=r.split(` +`);r=b.pop();for(const y of b){const R=y.trim();if(!R.startsWith("data: "))continue;const M=R.slice(6);try{const f=(l=(c=(d=(i=(o=JSON.parse(M).candidates)==null?void 0:o[0])==null?void 0:i.content)==null?void 0:d.parts)==null?void 0:c[0])==null?void 0:l.text;f&&(yield f)}catch(h){console.error("SSE JSON parse error:",M,h)}}}},this.method=e.method??"POST",this.body=e.body??{},this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers};const t=e.baseUrl??"https://generativelanguage.googleapis.com/v1beta";if(e.mode==="direct")this.endpoint=this.responseFormat==="stream"?`${t}/models/${e.model}:streamGenerateContent?alt=sse&key=${e.apiKey||""}`:`${t}/models/${e.model}:generateContent?key=${e.apiKey||""}`;else if(e.mode==="proxy")this.endpoint=`${t}/${e.model}`;else throw Error("Invalid mode specified for Gemini provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r,o,i;if(this.debug){const d=this.endpoint.replace(/\?key=([^&]+)/,"?key=[REDACTED]"),c={...this.headers};console.log("[GeminiProvider] Request:",{method:this.method,endpoint:d,headers:c,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[GeminiProvider] Response status:",t.status),!t.ok)throw new Error(`Gemini API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const d=t.body.getReader();for await(const c of this.handleStreamResponse(d))yield c}else{const d=await t.json();this.debug&&console.log("[GeminiProvider] Response body:",d);const c=(i=(o=(r=(n=(s=d.candidates)==null?void 0:s[0])==null?void 0:n.content)==null?void 0:r.parts)==null?void 0:o[0])==null?void 0:i.text;if(typeof c=="string")yield c;else throw new Error("Unexpected response shape – no text candidate")}}}class ae{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,...this.body}},this.handleStreamResponse=async function*(t){var r,o,i;const s=new TextDecoder("utf-8");let n="";for(;;){const{value:d,done:c}=await t.read();if(c)break;n+=s.decode(d,{stream:!0});const l=n.split(/\r?\n/);n=l.pop();for(const p of l){if(!p.startsWith("data: "))continue;const m=p.slice(6).trim();if(m==="[DONE]")return;try{const y=(i=(o=(r=JSON.parse(m).choices)==null?void 0:r[0])==null?void 0:o.delta)==null?void 0:i.content;y&&(yield y)}catch(b){console.error("Stream parse error",b)}}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"https://api.openai.com/v1/chat/completions",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OpenaiProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OpenaiProvider] Response status:",t.status),!t.ok)throw new Error(`Openai API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OpenaiProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}class ie{constructor(e){this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{messages:s,stream:this.responseFormat==="stream",...this.chatCompletionOptions}},this.model=e.model,this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.engineConfig=e.engineConfig??{},this.chatCompletionOptions=e.chatCompletionOptions??{},this.debug=e.debug??!1,this.createEngine()}async createEngine(){const{CreateMLCEngine:e}=await import("@mlc-ai/web-llm");this.engine=await e(this.model,{...this.engineConfig})}async*sendMessages(e){var s,n,r,o,i,d;this.engine||await this.createEngine(),this.debug&&console.log("[WebLlmProvider] Request:",{model:this.model,systemMessage:this.systemMessage,responseFormat:this.responseFormat,engineConfig:this.engineConfig,chatCompletionOptions:this.chatCompletionOptions,messages:this.constructBodyWithMessages(e).messages});const t=await((s=this.engine)==null?void 0:s.chat.completions.create(this.constructBodyWithMessages(e)));if(this.debug&&console.log("[WebLlmProvider] Response:",t),t&&Symbol.asyncIterator in t)for await(const c of t){const l=(r=(n=c.choices[0])==null?void 0:n.delta)==null?void 0:r.content;l&&(yield l)}else(d=(i=(o=t==null?void 0:t.choices)==null?void 0:o[0])==null?void 0:i.message)!=null&&d.content&&(yield t.choices[0].message.content)}}class ce{constructor(e){if(this.debug=!1,this.roleMap=t=>{switch(t){case"USER":return"user";case"SYSTEM":return"system";default:return"assistant"}},this.constructBodyWithMessages=t=>{let s;return this.messageParser?s=this.messageParser(t):s=t.filter(r=>typeof r.content=="string"&&r.sender.toUpperCase()!=="SYSTEM").map(r=>{const o=this.roleMap(r.sender.toUpperCase()),i=r.content;return{role:o,content:i}}),this.systemMessage&&(s=[{role:"system",content:this.systemMessage},...s]),{model:this.body.model,messages:s}},this.handleStreamResponse=async function*(t){const s=new TextDecoder("utf-8");let n="";for(;;){const{value:r,done:o}=await t.read();if(o)break;n+=s.decode(r,{stream:!0});const i=n.split(/\r?\n/);n=i.pop();for(const d of i)try{const c=JSON.parse(d);if(c.done===!0)return;c.message&&typeof c.message.content=="string"&&(yield c.message.content)}catch(c){console.error("Stream parse error",c)}}},this.method=e.method??"POST",this.endpoint=e.baseUrl??"http://localhost:11434/api/chat",this.systemMessage=e.systemMessage,this.responseFormat=e.responseFormat??"stream",this.messageParser=e.messageParser,this.debug=e.debug??!1,this.headers={"Content-Type":"application/json",Accept:this.responseFormat==="stream"?"text/event-stream":"application/json",...e.headers},this.body={model:e.model,stream:this.responseFormat==="stream",...e.body},e.mode==="direct"){this.headers={...this.headers,Authorization:`Bearer ${e.apiKey}`};return}if(e.mode!=="proxy")throw Error("Invalid mode specified for Ollama provider ('direct' or 'proxy').")}async*sendMessages(e){var s,n,r;if(this.debug){const o={...this.headers};delete o.Authorization,console.log("[OllamaProvider] Request:",{method:this.method,endpoint:this.endpoint,headers:o,body:this.constructBodyWithMessages(e)})}const t=await fetch(this.endpoint,{method:this.method,headers:this.headers,body:JSON.stringify(this.constructBodyWithMessages(e))});if(this.debug&&console.log("[OllamaProvider] Response status:",t.status),!t.ok)throw new Error(`Ollama API error ${t.status}: ${await t.text()}`);if(this.responseFormat==="stream"){if(!t.body)throw new Error("Response body is empty – cannot stream");const o=t.body.getReader();for await(const i of this.handleStreamResponse(o))yield i}else{const o=await t.json();this.debug&&console.log("[OllamaProvider] Response body:",o);const i=(r=(n=(s=o.choices)==null?void 0:s[0])==null?void 0:n.message)==null?void 0:r.content;if(typeof i=="string")yield i;else throw new Error("Unexpected response shape – no text candidate")}}}exports.GeminiProvider=ne;exports.OllamaProvider=ce;exports.OpenaiProvider=ae;exports.WebLlmProvider=ie;exports.default=oe; diff --git a/dist/index.js b/dist/index.js index fc3f640..8a8f06f 100644 --- a/dist/index.js +++ b/dist/index.js @@ -18,18 +18,18 @@ const H = { focusTextArea: r, injectMessage: o, simulateStreamMessage: a, - getIsChatBotVisible: c - } = e, l = O( - (d) => { + getIsChatBotVisible: d + } = e, c = O( + (l) => { var p; - const u = d.data.block; - u.llmConnector && (d.preventDefault(), d.type === "rcb-pre-process-block" && ((p = u.llmConnector) != null && p.initialMessage && (t.current === "full" ? o(i.initialMessageRef.current) : a(i.initialMessageRef.current)), n(!1), s(!1), setTimeout(() => { - c() && r(); + const u = l.data.block; + u.llmConnector && (l.preventDefault(), l.type === "rcb-pre-process-block" && ((p = u.llmConnector) != null && p.initialMessage && (t.current === "full" ? o(i.initialMessageRef.current) : a(i.initialMessageRef.current)), n(!1), s(!1), setTimeout(() => { + d() && r(); }))); }, - [n, s, r, c] + [n, s, r, d] ); - C(T.PRE_PROCESS_BLOCK, l), C(T.POST_PROCESS_BLOCK, l); + C(T.PRE_PROCESS_BLOCK, c), C(T.POST_PROCESS_BLOCK, c); }, Q = async function* (i, e) { for await (const t of i) for (const s of t) @@ -51,9 +51,9 @@ const H = { toggleIsBotTyping: r, toggleTextAreaDisabled: o, focusTextArea: a, - injectMessage: c, - streamMessage: l, - endStreamMessage: d, + injectMessage: d, + streamMessage: c, + endStreamMessage: l, getIsChatBotVisible: u } = t, p = e.providerRef.current.sendMessages(i), f = e.outputTypeRef.current, g = e.outputSpeedRef.current; if (f === "full") { @@ -62,7 +62,7 @@ const H = { if ((M = s.signal) != null && M.aborted) break; h += m; } - r(!1), c(h), setTimeout(() => { + r(!1), d(h), setTimeout(() => { o(!1), u() && a(); }); } else { @@ -71,18 +71,18 @@ const H = { for await (const E of h) { if ((y = s.signal) != null && y.aborted) break; - b || (r(!1), b = !0), m += E, l(m); + b || (r(!1), b = !0), m += E, c(m); } - d(), setTimeout(() => { + l(), setTimeout(() => { o(!1), u() && a(); }); } }, se = 500, re = (i, e) => { const { messagesRef: t, outputTypeRef: s, onUserMessageRef: n, onKeyDownRef: r, errorMessageRef: o } = i, { injectMessage: a, - simulateStreamMessage: c, - toggleTextAreaDisabled: l, - toggleIsBotTyping: d, + simulateStreamMessage: d, + toggleTextAreaDisabled: c, + toggleIsBotTyping: l, goToPath: u, focusTextArea: p, getIsChatBotVisible: f @@ -91,7 +91,7 @@ const H = { if (!i.providerRef.current) return; const h = y.data.message, m = h.sender.toUpperCase(); - h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (d(!0), l(!0), setTimeout(async () => { + h.tags = h.tags ?? [], h.tags.push(`rcb-llm-connector-plugin:${m}`), m === "USER" && (l(!0), c(!0), setTimeout(async () => { var v; if (n.current) { const R = await n.current(h); @@ -100,9 +100,9 @@ const H = { } const b = i.historySizeRef.current, E = t.current, x = b ? [...E.slice(-(b - 1)), h] : [h], P = new AbortController(); g.current = P, te(x, i, e, { signal: P.signal }).catch((R) => { - d(!1), l(!1), setTimeout(() => { + l(!1), c(!1), setTimeout(() => { f() && p(); - }), console.error("LLM prompt failed", R), s.current === "full" ? a(o.current) : c(o.current); + }), console.error("LLM prompt failed", R), s.current === "full" ? a(o.current) : d(o.current); }); }, se)); }, @@ -119,12 +119,12 @@ const H = { return window.addEventListener("keydown", y), () => window.removeEventListener("keydown", y); }, []); }, oe = (i) => { - const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), c = S(null), l = S(null), { getFlow: d } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: f, simulateStreamMessage: g, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: m, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; + const e = S([]), t = S(null), s = S("chunk"), n = S(30), r = S(0), o = S(""), a = S("Unable to get response, please try again."), d = S(null), c = S(null), { getFlow: l } = G(), { speakAudio: u } = L(), { messages: p, injectMessage: f, simulateStreamMessage: g, streamMessage: M, endStreamMessage: y } = N(), { goToPath: h } = K(), { toggleTextAreaDisabled: m, focusTextArea: b } = J(), { toggleIsBotTyping: E, getIsChatBotVisible: x } = Y(), P = { ...H, ...i ?? {} }; z(() => { e.current = p; - }, [p]), q(d, (w) => { - var k, B, U, F, I, W, j, D, $, _; - t.current = ((k = w.llmConnector) == null ? void 0 : k.provider) ?? null, s.current = ((B = w.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", n.current = ((U = w.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, r.current = ((F = w.llmConnector) == null ? void 0 : F.historySize) ?? 0, o.current = ((I = w.llmConnector) == null ? void 0 : I.initialMessage) ?? "", a.current = ((W = w.llmConnector) == null ? void 0 : W.errorMessage) ?? "Unable to get response, please try again.", c.current = ((D = (j = w.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : D.onUserMessage) ?? null, l.current = ((_ = ($ = w.llmConnector) == null ? void 0 : $.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; + }, [p]), q(l, (w) => { + var k, B, U, F, I, W, D, $, j, _; + t.current = ((k = w.llmConnector) == null ? void 0 : k.provider) ?? null, s.current = ((B = w.llmConnector) == null ? void 0 : B.outputType) ?? "chunk", n.current = ((U = w.llmConnector) == null ? void 0 : U.outputSpeed) ?? 30, r.current = ((F = w.llmConnector) == null ? void 0 : F.historySize) ?? 0, o.current = ((I = w.llmConnector) == null ? void 0 : I.initialMessage) ?? "", a.current = ((W = w.llmConnector) == null ? void 0 : W.errorMessage) ?? "Unable to get response, please try again.", d.current = (($ = (D = w.llmConnector) == null ? void 0 : D.stopConditions) == null ? void 0 : $.onUserMessage) ?? null, c.current = ((_ = (j = w.llmConnector) == null ? void 0 : j.stopConditions) == null ? void 0 : _.onKeyDown) ?? null; }); const v = { providerRef: t, @@ -134,8 +134,8 @@ const H = { historySizeRef: r, initialMessageRef: o, errorMessageRef: a, - onUserMessageRef: c, - onKeyDownRef: l + onUserMessageRef: d, + onKeyDownRef: c }, R = { speakAudio: u, injectMessage: f, @@ -181,17 +181,17 @@ class ce { return this.messageParser ? n = this.messageParser(s) : n = s.filter( (o) => typeof o.content == "string" && o.sender.toUpperCase() !== "SYSTEM" ).map((o) => { - const a = this.roleMap(o.sender.toUpperCase()), c = o.content; + const a = this.roleMap(o.sender.toUpperCase()), d = o.content; return { role: a, - parts: [{ text: c }] + parts: [{ text: d }] }; }), this.systemMessage && (n = [{ role: "user", parts: [{ text: this.systemMessage }] }, ...n]), { contents: n, ...this.body }; }, this.handleStreamResponse = async function* (s) { - var o, a, c, l, d; + var o, a, d, c, l; const n = new TextDecoder("utf-8"); let r = ""; for (; ; ) { @@ -206,7 +206,7 @@ class ce { if (!M.startsWith("data: ")) continue; const y = M.slice(6); try { - const m = (d = (l = (c = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : c.parts) == null ? void 0 : l[0]) == null ? void 0 : d.text; + const m = (l = (c = (d = (a = (o = JSON.parse(y).candidates) == null ? void 0 : o[0]) == null ? void 0 : a.content) == null ? void 0 : d.parts) == null ? void 0 : c[0]) == null ? void 0 : l.text; m && (yield m); } catch (h) { console.error("SSE JSON parse error:", y, h); @@ -234,11 +234,11 @@ class ce { async *sendMessages(e) { var s, n, r, o, a; if (this.debug) { - const c = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), l = { ...this.headers }; + const d = this.endpoint.replace(/\?key=([^&]+)/, "?key=[REDACTED]"), c = { ...this.headers }; console.log("[GeminiProvider] Request:", { method: this.method, - endpoint: c, - headers: l, + endpoint: d, + headers: c, body: this.constructBodyWithMessages(e) }); } @@ -252,15 +252,15 @@ class ce { if (this.responseFormat === "stream") { if (!t.body) throw new Error("Response body is empty – cannot stream"); - const c = t.body.getReader(); - for await (const l of this.handleStreamResponse(c)) - yield l; + const d = t.body.getReader(); + for await (const c of this.handleStreamResponse(d)) + yield c; } else { - const c = await t.json(); - this.debug && console.log("[GeminiProvider] Response body:", c); - const l = (a = (o = (r = (n = (s = c.candidates) == null ? void 0 : s[0]) == null ? void 0 : n.content) == null ? void 0 : r.parts) == null ? void 0 : o[0]) == null ? void 0 : a.text; - if (typeof l == "string") - yield l; + const d = await t.json(); + this.debug && console.log("[GeminiProvider] Response body:", d); + const c = (a = (o = (r = (n = (s = d.candidates) == null ? void 0 : s[0]) == null ? void 0 : n.content) == null ? void 0 : r.parts) == null ? void 0 : o[0]) == null ? void 0 : a.text; + if (typeof c == "string") + yield c; else throw new Error("Unexpected response shape – no text candidate"); } @@ -302,12 +302,12 @@ class de { const s = new TextDecoder("utf-8"); let n = ""; for (; ; ) { - const { value: c, done: l } = await t.read(); - if (l) break; - n += s.decode(c, { stream: !0 }); - const d = n.split(/\r?\n/); - n = d.pop(); - for (const u of d) { + const { value: d, done: c } = await t.read(); + if (c) break; + n += s.decode(d, { stream: !0 }); + const l = n.split(/\r?\n/); + n = l.pop(); + for (const u of l) { if (!u.startsWith("data: ")) continue; const p = u.slice(6).trim(); if (p === "[DONE]") return; @@ -429,7 +429,7 @@ class le { * @param messages messages to include in the request */ async *sendMessages(e) { - var s, n, r, o, a, c; + var s, n, r, o, a, d; this.engine || await this.createEngine(), this.debug && console.log("[WebLlmProvider] Request:", { model: this.model, systemMessage: this.systemMessage, @@ -441,11 +441,11 @@ class le { }); const t = await ((s = this.engine) == null ? void 0 : s.chat.completions.create(this.constructBodyWithMessages(e))); if (this.debug && console.log("[WebLlmProvider] Response:", t), t && Symbol.asyncIterator in t) - for await (const l of t) { - const d = (r = (n = l.choices[0]) == null ? void 0 : n.delta) == null ? void 0 : r.content; - d && (yield d); + for await (const c of t) { + const l = (r = (n = c.choices[0]) == null ? void 0 : n.delta) == null ? void 0 : r.content; + l && (yield l); } - else (c = (a = (o = t == null ? void 0 : t.choices) == null ? void 0 : o[0]) == null ? void 0 : a.message) != null && c.content && (yield t.choices[0].message.content); + else (d = (a = (o = t == null ? void 0 : t.choices) == null ? void 0 : o[0]) == null ? void 0 : a.message) != null && d.content && (yield t.choices[0].message.content); } } class he { @@ -488,17 +488,14 @@ class he { n += s.decode(r, { stream: !0 }); const a = n.split(/\r?\n/); n = a.pop(); - for (const c of a) { - if (!c.startsWith("data: ")) continue; - const l = c.slice(6).trim(); + for (const d of a) try { - const d = JSON.parse(l); - if (console.log(d), d.done === !0) return; - d.message && typeof d.message.content == "string" && (yield d.message.content); - } catch (d) { - console.error("Stream parse error", d); + const c = JSON.parse(d); + if (c.done === !0) return; + c.message && typeof c.message.content == "string" && (yield c.message.content); + } catch (c) { + console.error("Stream parse error", c); } - } } }, this.method = e.method ?? "POST", this.endpoint = e.baseUrl ?? "http://localhost:11434/api/chat", this.systemMessage = e.systemMessage, this.responseFormat = e.responseFormat ?? "stream", this.messageParser = e.messageParser, this.debug = e.debug ?? !1, this.headers = { "Content-Type": "application/json", @@ -513,7 +510,7 @@ class he { return; } if (e.mode !== "proxy") - throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy')."); + throw Error("Invalid mode specified for Ollama provider ('direct' or 'proxy')."); } /** * Calls Openai and yields each chunk (or the full text). @@ -524,7 +521,7 @@ class he { var s, n, r; if (this.debug) { const o = { ...this.headers }; - delete o.Authorization, console.log("[OpenaiProvider] Request:", { + delete o.Authorization, console.log("[OllamaProvider] Request:", { method: this.method, endpoint: this.endpoint, headers: o, @@ -536,8 +533,8 @@ class he { headers: this.headers, body: JSON.stringify(this.constructBodyWithMessages(e)) }); - if (this.debug && console.log("[OpenaiProvider] Response status:", t.status), !t.ok) - throw new Error(`Openai API error ${t.status}: ${await t.text()}`); + if (this.debug && console.log("[OllamaProvider] Response status:", t.status), !t.ok) + throw new Error(`Ollama API error ${t.status}: ${await t.text()}`); if (this.responseFormat === "stream") { if (!t.body) throw new Error("Response body is empty – cannot stream"); @@ -546,7 +543,7 @@ class he { yield a; } else { const o = await t.json(); - this.debug && console.log("[OpenaiProvider] Response body:", o); + this.debug && console.log("[OllamaProvider] Response body:", o); const a = (r = (n = (s = o.choices) == null ? void 0 : s[0]) == null ? void 0 : n.message) == null ? void 0 : r.content; if (typeof a == "string") yield a; diff --git a/dist/providers/OllamaProvider.d.ts.map b/dist/providers/OllamaProvider.d.ts.map index e020807..bf0fe9f 100644 --- a/dist/providers/OllamaProvider.d.ts.map +++ b/dist/providers/OllamaProvider.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA6B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file +{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,QAAQ,EAAC,MAAM,mBAAmB,CAAC;AAC3C,OAAO,EAAC,OAAO,EAAC,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAC,oBAAoB,EAAC,MAAM,+CAA+C,CAAC;AAGnF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACpC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IAgDvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA0B1B;CACL;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/src/App.tsx b/src/App.tsx index 372830e..a8514f7 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -41,7 +41,7 @@ const App = () => { } return 'Pick another model to try!'; }, - options: ['WebLlm', 'Gemini', 'OpenAI'], + options: ['WebLlm', 'Gemini', 'OpenAI', 'Ollama'], chatDisabled: true, path: async (params: Params) => { // if browser model chosen, give a gentle warning about performance @@ -118,6 +118,7 @@ const App = () => { mode: 'direct', model: 'robot', apiKey: '', + debug:true, }), outputType: 'character', stopConditions: { diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts index 5f3c0e6..f279c3a 100644 --- a/src/providers/OllamaProvider.ts +++ b/src/providers/OllamaProvider.ts @@ -1,193 +1,191 @@ -import { Provider } from '../types/Provider'; -import { Message } from 'react-chatbotify'; -import { OpenaiProviderConfig } from '../types/provider-config/OpenaiProviderConfig'; -import { OpenaiProviderMessage } from '../types/provider-message/OpenaiProviderMessage'; +import {Provider} from '../types/Provider'; +import {Message} from 'react-chatbotify'; +import {OpenaiProviderConfig} from '../types/provider-config/OpenaiProviderConfig'; +import {OpenaiProviderMessage} from '../types/provider-message/OpenaiProviderMessage'; /** * Provider for Openai’s API, supports both direct and proxy modes. */ class OllamaProvider implements Provider { - private method!: string; - private endpoint!: string; - private headers!: Record; - private body!: Record; - private systemMessage?: string; - private responseFormat!: 'stream' | 'json'; - private messageParser?: (messages: Message[]) => OpenaiProviderMessage[]; - private debug: boolean = false; - - /** - * Sets default values for the provider based on given configuration. Configuration guide here: - * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md - * - * @param config configuration for setup - */ - public constructor(config: OpenaiProviderConfig) { - this.method = config.method ?? 'POST'; - this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/chat'; - this.systemMessage = config.systemMessage; - this.responseFormat = config.responseFormat ?? 'stream'; - this.messageParser = config.messageParser; - this.debug = config.debug ?? false; - this.headers = { - 'Content-Type': 'application/json', - Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json', - ...config.headers, - }; - this.body = { - model: config.model, - stream: this.responseFormat === 'stream', - ...config.body, - }; - - if (config.mode === 'direct') { - this.headers = { ...this.headers, Authorization: `Bearer ${config.apiKey}` }; - return; - } - - if (config.mode !== 'proxy') { - throw Error("Invalid mode specified for OpenAI provider ('direct' or 'proxy')."); - } - } - - /** - * Calls Openai and yields each chunk (or the full text). - * - * @param messages messages to include in the request - */ - public async *sendMessages(messages: Message[]): AsyncGenerator { - if (this.debug) { - const sanitizedHeaders = { ...this.headers }; - delete sanitizedHeaders['Authorization']; - console.log('[OpenaiProvider] Request:', { - method: this.method, - endpoint: this.endpoint, - headers: sanitizedHeaders, - body: this.constructBodyWithMessages(messages), - }); - } - const res = await fetch(this.endpoint, { - method: this.method, - headers: this.headers as HeadersInit, - body: JSON.stringify(this.constructBodyWithMessages(messages)), - }); - - if (this.debug) { - console.log('[OpenaiProvider] Response status:', res.status); - } - - if (!res.ok) { - throw new Error(`Openai API error ${res.status}: ${await res.text()}`); - } - - if (this.responseFormat === 'stream') { - if (!res.body) { - throw new Error('Response body is empty – cannot stream'); - } - const reader = res.body.getReader(); - for await (const chunk of this.handleStreamResponse(reader)) { - yield chunk; - } - } else { - const payload = await res.json(); - if (this.debug) { - console.log('[OpenaiProvider] Response body:', payload); - } - const text = payload.choices?.[0]?.message?.content; - if (typeof text === 'string') { - yield text; - } else { - throw new Error('Unexpected response shape – no text candidate'); - } - } - } - - /** - * Maps the chatbot message sender to the provider message sender. - * - * @param sender sender from the chatbot - */ - private roleMap = (sender: string): 'system' | 'user' | 'assistant' => { - switch (sender) { - case 'USER': - return 'user'; - case 'SYSTEM': - return 'system'; - default: - return 'assistant'; - } - }; - - /** - * Builds the full request body. - * - * @param messages messages to parse - */ - private constructBodyWithMessages = (messages: Message[]) => { - let parsedMessages; - if (this.messageParser) { - parsedMessages = this.messageParser(messages); - } else { - const filteredMessages = messages.filter( - (message) => typeof message.content === 'string' && message.sender.toUpperCase() !== 'SYSTEM' - ); - parsedMessages = filteredMessages.map((message) => { - const role = this.roleMap(message.sender.toUpperCase()); - const text = message.content; - return { - role, - content: text, - }; - }); - } - - // append system message if specified - if (this.systemMessage) { - parsedMessages = [{ role: 'system', content: this.systemMessage }, ...parsedMessages]; - } - - // Only include model and messages for Ollama - return { - model: this.body.model, - messages: parsedMessages, - }; - }; - - /** - * Consumes an SSE/text stream Response and yield each text chunk. - * - * @reader request body reader - */ - private handleStreamResponse = async function* ( - reader: ReadableStreamDefaultReader> - ): AsyncGenerator { - const decoder = new TextDecoder('utf-8'); - let buffer = ''; - - while (true) { - const { value, done } = await reader.read(); - if (done) break; - - buffer += decoder.decode(value, { stream: true }); - const parts = buffer.split(/\r?\n/); - buffer = parts.pop()!; - - for (const line of parts) { - if (!line.startsWith('data: ')) continue; - const json = line.slice('data: '.length).trim(); - try { - const event = JSON.parse(json); - console.log(event); - if (event.done === true) return; - if (event.message && typeof event.message.content === 'string') { - yield event.message.content; - } - } catch (err) { - console.error('Stream parse error', err); - } - } - } - }; + private method!: string; + private endpoint!: string; + private headers!: Record; + private body!: Record; + private systemMessage?: string; + private responseFormat!: 'stream' | 'json'; + private messageParser?: (messages: Message[]) => OpenaiProviderMessage[]; + private debug: boolean = false; + + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ + public constructor(config: OpenaiProviderConfig) { + this.method = config.method ?? 'POST'; + this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/chat'; + this.systemMessage = config.systemMessage; + this.responseFormat = config.responseFormat ?? 'stream'; + this.messageParser = config.messageParser; + this.debug = config.debug ?? false; + this.headers = { + 'Content-Type': 'application/json', + Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json', + ...config.headers, + }; + this.body = { + model: config.model, + stream: this.responseFormat === 'stream', + ...config.body, + }; + + if (config.mode === 'direct') { + this.headers = {...this.headers, Authorization: `Bearer ${config.apiKey}`}; + return; + } + + if (config.mode !== 'proxy') { + throw Error("Invalid mode specified for Ollama provider ('direct' or 'proxy')."); + } + } + + /** + * Calls Openai and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + public async* sendMessages(messages: Message[]): AsyncGenerator { + if (this.debug) { + const sanitizedHeaders = {...this.headers}; + delete sanitizedHeaders['Authorization']; + console.log('[OllamaProvider] Request:', { + method: this.method, + endpoint: this.endpoint, + headers: sanitizedHeaders, + body: this.constructBodyWithMessages(messages), + }); + } + const res = await fetch(this.endpoint, { + method: this.method, + headers: this.headers as HeadersInit, + body: JSON.stringify(this.constructBodyWithMessages(messages)), + }); + + + if (this.debug) { + console.log('[OllamaProvider] Response status:', res.status); + } + + if (!res.ok) { + throw new Error(`Ollama API error ${res.status}: ${await res.text()}`); + } + + if (this.responseFormat === 'stream') { + if (!res.body) { + throw new Error('Response body is empty – cannot stream'); + } + const reader = res.body.getReader(); + for await (const chunk of this.handleStreamResponse(reader)) { + yield chunk; + } + } else { + const payload = await res.json(); + if (this.debug) { + console.log('[OllamaProvider] Response body:', payload); + } + const text = payload.choices?.[0]?.message?.content; + if (typeof text === 'string') { + yield text; + } else { + throw new Error('Unexpected response shape – no text candidate'); + } + } + } + + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap = (sender: string): 'system' | 'user' | 'assistant' => { + switch (sender) { + case 'USER': + return 'user'; + case 'SYSTEM': + return 'system'; + default: + return 'assistant'; + } + }; + + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages = (messages: Message[]) => { + let parsedMessages; + if (this.messageParser) { + parsedMessages = this.messageParser(messages); + } else { + const filteredMessages = messages.filter( + (message) => typeof message.content === 'string' && message.sender.toUpperCase() !== 'SYSTEM' + ); + parsedMessages = filteredMessages.map((message) => { + const role = this.roleMap(message.sender.toUpperCase()); + const text = message.content; + return { + role, + content: text, + }; + }); + } + + // append system message if specified + if (this.systemMessage) { + parsedMessages = [{role: 'system', content: this.systemMessage}, ...parsedMessages]; + } + + // Only include model and messages for Ollama + return { + model: this.body.model, + messages: parsedMessages, + }; + }; + + /** + * Consumes an SSE/text stream Response and yield each text chunk. + * + * @reader request body reader + */ + private handleStreamResponse = async function* ( + reader: ReadableStreamDefaultReader> + ): AsyncGenerator { + const decoder = new TextDecoder('utf-8'); + let buffer = ''; + + while (true) { + const {value, done} = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, {stream: true}); + const parts = buffer.split(/\r?\n/); + buffer = parts.pop()!; + + for (const line of parts) { + try { + const event = JSON.parse(line); + if (event.done === true) return; + if (event.message && typeof event.message.content === 'string') { + yield event.message.content; + } + } catch (err) { + console.error('Stream parse error', err); + } + } + } + }; } export default OllamaProvider; From 88884e7cb8315692340c710fcbbb8f633ad878a2 Mon Sep 17 00:00:00 2001 From: philipAthanasopoulos Date: Wed, 30 Jul 2025 14:43:49 +0300 Subject: [PATCH 8/8] refactored OllamaProvider --- dist/index.js | 2 +- dist/providers/OllamaProvider.d.ts | 4 +- dist/providers/OllamaProvider.d.ts.map | 2 +- dist/tsconfig.tsbuildinfo | 2 +- .../provider-config/OllamaProviderConfig.d.ts | 39 ++ .../OllamaProviderConfig.d.ts.map | 1 + .../OllamaProviderMessage.d.ts | 8 + .../OllamaProviderMessage.d.ts.map | 2 +- src/App.tsx | 2 +- src/providers/OllamaProvider.ts | 367 +++++++++--------- .../provider-config/OllamaProviderConfig.ts | 43 ++ .../provider-message/OllamaProviderMessage.ts | 8 + 12 files changed, 289 insertions(+), 191 deletions(-) create mode 100644 dist/types/provider-config/OllamaProviderConfig.d.ts create mode 100644 dist/types/provider-config/OllamaProviderConfig.d.ts.map create mode 100644 src/types/provider-config/OllamaProviderConfig.ts diff --git a/dist/index.js b/dist/index.js index 8a8f06f..5116245 100644 --- a/dist/index.js +++ b/dist/index.js @@ -513,7 +513,7 @@ class he { throw Error("Invalid mode specified for Ollama provider ('direct' or 'proxy')."); } /** - * Calls Openai and yields each chunk (or the full text). + * Calls Ollama and yields each chunk (or the full text). * * @param messages messages to include in the request */ diff --git a/dist/providers/OllamaProvider.d.ts b/dist/providers/OllamaProvider.d.ts index 431a8bc..26726a0 100644 --- a/dist/providers/OllamaProvider.d.ts +++ b/dist/providers/OllamaProvider.d.ts @@ -2,7 +2,7 @@ import { Provider } from '../types/Provider'; import { Message } from 'react-chatbotify'; import { OpenaiProviderConfig } from '../types/provider-config/OpenaiProviderConfig'; /** - * Provider for Openai’s API, supports both direct and proxy modes. + * Provider for Ollama’s API, supports both direct and proxy modes. */ declare class OllamaProvider implements Provider { private method; @@ -21,7 +21,7 @@ declare class OllamaProvider implements Provider { */ constructor(config: OpenaiProviderConfig); /** - * Calls Openai and yields each chunk (or the full text). + * Calls Ollama and yields each chunk (or the full text). * * @param messages messages to include in the request */ diff --git a/dist/providers/OllamaProvider.d.ts.map b/dist/providers/OllamaProvider.d.ts.map index bf0fe9f..9785e68 100644 --- a/dist/providers/OllamaProvider.d.ts.map +++ b/dist/providers/OllamaProvider.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,QAAQ,EAAC,MAAM,mBAAmB,CAAC;AAC3C,OAAO,EAAC,OAAO,EAAC,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAC,oBAAoB,EAAC,MAAM,+CAA+C,CAAC;AAGnF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACpC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IAgDvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA0B1B;CACL;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file +{"version":3,"file":"OllamaProvider.d.ts","sourceRoot":"","sources":["../../src/providers/OllamaProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAC3C,OAAO,EAAE,oBAAoB,EAAE,MAAM,+CAA+C,CAAC;AAGrF;;GAEG;AACH,cAAM,cAAe,YAAW,QAAQ;IACvC,OAAO,CAAC,MAAM,CAAU;IACxB,OAAO,CAAC,QAAQ,CAAU;IAC1B,OAAO,CAAC,OAAO,CAA2B;IAC1C,OAAO,CAAC,IAAI,CAA2B;IACvC,OAAO,CAAC,aAAa,CAAC,CAAS;IAC/B,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAmD;IACzE,OAAO,CAAC,KAAK,CAAkB;IAE/B;;;;;OAKG;gBACgB,MAAM,EAAE,oBAAoB;IA4B/C;;;;OAIG;IACW,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,cAAc,CAAC,MAAM,CAAC;IA+CvE;;;;OAIG;IACH,OAAO,CAAC,OAAO,CASb;IAEF;;;;OAIG;IACH,OAAO,CAAC,yBAAyB,CA4B/B;IAEF;;;;OAIG;IACH,OAAO,CAAC,oBAAoB,CA0B1B;CACF;AAED,eAAe,cAAc,CAAC"} \ No newline at end of file diff --git a/dist/tsconfig.tsbuildinfo b/dist/tsconfig.tsbuildinfo index afe2545..12d7934 100644 --- a/dist/tsconfig.tsbuildinfo +++ b/dist/tsconfig.tsbuildinfo @@ -1 +1 @@ -{"root":["../src/app.tsx","../src/development.tsx","../src/index.tsx","../src/vite-env.d.ts","../src/constants/defaultpluginconfig.ts","../src/core/usercbplugin.tsx","../src/factory/rcbpluginfactory.ts","../src/hooks/usechangepath.ts","../src/hooks/usemessagehandler.ts","../src/hooks/useprocessblock.ts","../src/providers/geminiprovider.ts","../src/providers/ollamaprovider.ts","../src/providers/openaiprovider.ts","../src/providers/webllmprovider.ts","../src/types/llmconnectorblock.ts","../src/types/pluginconfig.ts","../src/types/provider.ts","../src/types/provider-config/geminiproviderconfig.ts","../src/types/provider-config/openaiproviderconfig.ts","../src/types/provider-config/webllmproviderconfig.ts","../src/types/provider-message/geminiprovidermessage.ts","../src/types/provider-message/ollamaprovidermessage.ts","../src/types/provider-message/openaiprovidermessage.ts","../src/types/provider-message/webllmprovidermessage.ts","../src/utils/prompthandler.tsx","../src/utils/streamcontroller.ts"],"version":"5.8.3"} \ No newline at end of file +{"root":["../src/app.tsx","../src/development.tsx","../src/index.tsx","../src/vite-env.d.ts","../src/constants/defaultpluginconfig.ts","../src/core/usercbplugin.tsx","../src/factory/rcbpluginfactory.ts","../src/hooks/usechangepath.ts","../src/hooks/usemessagehandler.ts","../src/hooks/useprocessblock.ts","../src/providers/geminiprovider.ts","../src/providers/ollamaprovider.ts","../src/providers/openaiprovider.ts","../src/providers/webllmprovider.ts","../src/types/llmconnectorblock.ts","../src/types/pluginconfig.ts","../src/types/provider.ts","../src/types/provider-config/geminiproviderconfig.ts","../src/types/provider-config/ollamaproviderconfig.ts","../src/types/provider-config/openaiproviderconfig.ts","../src/types/provider-config/webllmproviderconfig.ts","../src/types/provider-message/geminiprovidermessage.ts","../src/types/provider-message/ollamaprovidermessage.ts","../src/types/provider-message/openaiprovidermessage.ts","../src/types/provider-message/webllmprovidermessage.ts","../src/utils/prompthandler.tsx","../src/utils/streamcontroller.ts"],"version":"5.8.3"} \ No newline at end of file diff --git a/dist/types/provider-config/OllamaProviderConfig.d.ts b/dist/types/provider-config/OllamaProviderConfig.d.ts new file mode 100644 index 0000000..0520ad2 --- /dev/null +++ b/dist/types/provider-config/OllamaProviderConfig.d.ts @@ -0,0 +1,39 @@ +import { Message } from 'react-chatbotify'; +import { OllamaProviderMessage } from '../provider-message/OllamaProviderMessage'; +/** + * Configurations for OllamaProvider in direct mode. + */ +type DirectConfig = { + mode: 'direct'; + model: string; + apiKey: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + baseUrl?: string; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => OllamaProviderMessage[]; + debug?: boolean; +}; +/** + * Configurations for OllamaProvider in proxy mode. + */ +type ProxyConfig = { + mode: 'proxy'; + model: string; + baseUrl: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => OllamaProviderMessage[]; + debug?: boolean; +}; +/** + * Combined openai provider configurations. + */ +type OllamaProviderConfig = DirectConfig | ProxyConfig; +export type { OllamaProviderConfig }; +//# sourceMappingURL=OllamaProviderConfig.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-config/OllamaProviderConfig.d.ts.map b/dist/types/provider-config/OllamaProviderConfig.d.ts.map new file mode 100644 index 0000000..2da5729 --- /dev/null +++ b/dist/types/provider-config/OllamaProviderConfig.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"OllamaProviderConfig.d.ts","sourceRoot":"","sources":["../../../src/types/provider-config/OllamaProviderConfig.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,OAAO,EAAE,qBAAqB,EAAE,MAAM,2CAA2C,CAAC;AAElF;;GAEG;AACH,KAAK,YAAY,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF;;GAEG;AACH,KAAK,WAAW,GAAG;IAClB,IAAI,EAAE,OAAO,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IACnC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,aAAa,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,qBAAqB,EAAE,CAAC;IACjE,KAAK,CAAC,EAAE,OAAO,CAAC;CAChB,CAAC;AAEF;;GAEG;AACH,KAAK,oBAAoB,GAAG,YAAY,GAAG,WAAW,CAAC;AAEvD,YAAY,EAAE,oBAAoB,EAAE,CAAC"} \ No newline at end of file diff --git a/dist/types/provider-message/OllamaProviderMessage.d.ts b/dist/types/provider-message/OllamaProviderMessage.d.ts index 11e016e..8e0b286 100644 --- a/dist/types/provider-message/OllamaProviderMessage.d.ts +++ b/dist/types/provider-message/OllamaProviderMessage.d.ts @@ -1 +1,9 @@ +/** + * Message format for OpenAI. + */ +type OllamaProviderMessage = { + role: 'user' | 'assistant' | 'system'; + content: string; +}; +export type { OllamaProviderMessage }; //# sourceMappingURL=OllamaProviderMessage.d.ts.map \ No newline at end of file diff --git a/dist/types/provider-message/OllamaProviderMessage.d.ts.map b/dist/types/provider-message/OllamaProviderMessage.d.ts.map index 06ca4ad..159ad2f 100644 --- a/dist/types/provider-message/OllamaProviderMessage.d.ts.map +++ b/dist/types/provider-message/OllamaProviderMessage.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"OllamaProviderMessage.d.ts","sourceRoot":"","sources":["../../../src/types/provider-message/OllamaProviderMessage.ts"],"names":[],"mappings":""} \ No newline at end of file +{"version":3,"file":"OllamaProviderMessage.d.ts","sourceRoot":"","sources":["../../../src/types/provider-message/OllamaProviderMessage.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,KAAK,qBAAqB,GAAG;IAC5B,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,YAAY,EAAE,qBAAqB,EAAE,CAAC"} \ No newline at end of file diff --git a/src/App.tsx b/src/App.tsx index a8514f7..6059188 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -118,7 +118,7 @@ const App = () => { mode: 'direct', model: 'robot', apiKey: '', - debug:true, + debug: true, }), outputType: 'character', stopConditions: { diff --git a/src/providers/OllamaProvider.ts b/src/providers/OllamaProvider.ts index f279c3a..cfb4ff9 100644 --- a/src/providers/OllamaProvider.ts +++ b/src/providers/OllamaProvider.ts @@ -1,191 +1,190 @@ -import {Provider} from '../types/Provider'; -import {Message} from 'react-chatbotify'; -import {OpenaiProviderConfig} from '../types/provider-config/OpenaiProviderConfig'; -import {OpenaiProviderMessage} from '../types/provider-message/OpenaiProviderMessage'; +import { Provider } from '../types/Provider'; +import { Message } from 'react-chatbotify'; +import { OpenaiProviderConfig } from '../types/provider-config/OpenaiProviderConfig'; +import { OllamaProviderMessage } from '../types/provider-message/OllamaProviderMessage'; /** - * Provider for Openai’s API, supports both direct and proxy modes. + * Provider for Ollama’s API, supports both direct and proxy modes. */ class OllamaProvider implements Provider { - private method!: string; - private endpoint!: string; - private headers!: Record; - private body!: Record; - private systemMessage?: string; - private responseFormat!: 'stream' | 'json'; - private messageParser?: (messages: Message[]) => OpenaiProviderMessage[]; - private debug: boolean = false; - - /** - * Sets default values for the provider based on given configuration. Configuration guide here: - * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md - * - * @param config configuration for setup - */ - public constructor(config: OpenaiProviderConfig) { - this.method = config.method ?? 'POST'; - this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/chat'; - this.systemMessage = config.systemMessage; - this.responseFormat = config.responseFormat ?? 'stream'; - this.messageParser = config.messageParser; - this.debug = config.debug ?? false; - this.headers = { - 'Content-Type': 'application/json', - Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json', - ...config.headers, - }; - this.body = { - model: config.model, - stream: this.responseFormat === 'stream', - ...config.body, - }; - - if (config.mode === 'direct') { - this.headers = {...this.headers, Authorization: `Bearer ${config.apiKey}`}; - return; - } - - if (config.mode !== 'proxy') { - throw Error("Invalid mode specified for Ollama provider ('direct' or 'proxy')."); - } - } - - /** - * Calls Openai and yields each chunk (or the full text). - * - * @param messages messages to include in the request - */ - public async* sendMessages(messages: Message[]): AsyncGenerator { - if (this.debug) { - const sanitizedHeaders = {...this.headers}; - delete sanitizedHeaders['Authorization']; - console.log('[OllamaProvider] Request:', { - method: this.method, - endpoint: this.endpoint, - headers: sanitizedHeaders, - body: this.constructBodyWithMessages(messages), - }); - } - const res = await fetch(this.endpoint, { - method: this.method, - headers: this.headers as HeadersInit, - body: JSON.stringify(this.constructBodyWithMessages(messages)), - }); - - - if (this.debug) { - console.log('[OllamaProvider] Response status:', res.status); - } - - if (!res.ok) { - throw new Error(`Ollama API error ${res.status}: ${await res.text()}`); - } - - if (this.responseFormat === 'stream') { - if (!res.body) { - throw new Error('Response body is empty – cannot stream'); - } - const reader = res.body.getReader(); - for await (const chunk of this.handleStreamResponse(reader)) { - yield chunk; - } - } else { - const payload = await res.json(); - if (this.debug) { - console.log('[OllamaProvider] Response body:', payload); - } - const text = payload.choices?.[0]?.message?.content; - if (typeof text === 'string') { - yield text; - } else { - throw new Error('Unexpected response shape – no text candidate'); - } - } - } - - /** - * Maps the chatbot message sender to the provider message sender. - * - * @param sender sender from the chatbot - */ - private roleMap = (sender: string): 'system' | 'user' | 'assistant' => { - switch (sender) { - case 'USER': - return 'user'; - case 'SYSTEM': - return 'system'; - default: - return 'assistant'; - } - }; - - /** - * Builds the full request body. - * - * @param messages messages to parse - */ - private constructBodyWithMessages = (messages: Message[]) => { - let parsedMessages; - if (this.messageParser) { - parsedMessages = this.messageParser(messages); - } else { - const filteredMessages = messages.filter( - (message) => typeof message.content === 'string' && message.sender.toUpperCase() !== 'SYSTEM' - ); - parsedMessages = filteredMessages.map((message) => { - const role = this.roleMap(message.sender.toUpperCase()); - const text = message.content; - return { - role, - content: text, - }; - }); - } - - // append system message if specified - if (this.systemMessage) { - parsedMessages = [{role: 'system', content: this.systemMessage}, ...parsedMessages]; - } - - // Only include model and messages for Ollama - return { - model: this.body.model, - messages: parsedMessages, - }; - }; - - /** - * Consumes an SSE/text stream Response and yield each text chunk. - * - * @reader request body reader - */ - private handleStreamResponse = async function* ( - reader: ReadableStreamDefaultReader> - ): AsyncGenerator { - const decoder = new TextDecoder('utf-8'); - let buffer = ''; - - while (true) { - const {value, done} = await reader.read(); - if (done) break; - - buffer += decoder.decode(value, {stream: true}); - const parts = buffer.split(/\r?\n/); - buffer = parts.pop()!; - - for (const line of parts) { - try { - const event = JSON.parse(line); - if (event.done === true) return; - if (event.message && typeof event.message.content === 'string') { - yield event.message.content; - } - } catch (err) { - console.error('Stream parse error', err); - } - } - } - }; + private method!: string; + private endpoint!: string; + private headers!: Record; + private body!: Record; + private systemMessage?: string; + private responseFormat!: 'stream' | 'json'; + private messageParser?: (messages: Message[]) => OllamaProviderMessage[]; + private debug: boolean = false; + + /** + * Sets default values for the provider based on given configuration. Configuration guide here: + * https://github.com/React-ChatBotify-Plugins/llm-connector/blob/main/docs/providers/OpenAI.md + * + * @param config configuration for setup + */ + public constructor(config: OpenaiProviderConfig) { + this.method = config.method ?? 'POST'; + this.endpoint = config.baseUrl ?? 'http://localhost:11434/api/chat'; + this.systemMessage = config.systemMessage; + this.responseFormat = config.responseFormat ?? 'stream'; + this.messageParser = config.messageParser; + this.debug = config.debug ?? false; + this.headers = { + 'Content-Type': 'application/json', + Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json', + ...config.headers, + }; + this.body = { + model: config.model, + stream: this.responseFormat === 'stream', + ...config.body, + }; + + if (config.mode === 'direct') { + this.headers = { ...this.headers, Authorization: `Bearer ${config.apiKey}` }; + return; + } + + if (config.mode !== 'proxy') { + throw Error("Invalid mode specified for Ollama provider ('direct' or 'proxy')."); + } + } + + /** + * Calls Ollama and yields each chunk (or the full text). + * + * @param messages messages to include in the request + */ + public async *sendMessages(messages: Message[]): AsyncGenerator { + if (this.debug) { + const sanitizedHeaders = { ...this.headers }; + delete sanitizedHeaders['Authorization']; + console.log('[OllamaProvider] Request:', { + method: this.method, + endpoint: this.endpoint, + headers: sanitizedHeaders, + body: this.constructBodyWithMessages(messages), + }); + } + const res = await fetch(this.endpoint, { + method: this.method, + headers: this.headers as HeadersInit, + body: JSON.stringify(this.constructBodyWithMessages(messages)), + }); + + if (this.debug) { + console.log('[OllamaProvider] Response status:', res.status); + } + + if (!res.ok) { + throw new Error(`Ollama API error ${res.status}: ${await res.text()}`); + } + + if (this.responseFormat === 'stream') { + if (!res.body) { + throw new Error('Response body is empty – cannot stream'); + } + const reader = res.body.getReader(); + for await (const chunk of this.handleStreamResponse(reader)) { + yield chunk; + } + } else { + const payload = await res.json(); + if (this.debug) { + console.log('[OllamaProvider] Response body:', payload); + } + const text = payload.choices?.[0]?.message?.content; + if (typeof text === 'string') { + yield text; + } else { + throw new Error('Unexpected response shape – no text candidate'); + } + } + } + + /** + * Maps the chatbot message sender to the provider message sender. + * + * @param sender sender from the chatbot + */ + private roleMap = (sender: string): 'system' | 'user' | 'assistant' => { + switch (sender) { + case 'USER': + return 'user'; + case 'SYSTEM': + return 'system'; + default: + return 'assistant'; + } + }; + + /** + * Builds the full request body. + * + * @param messages messages to parse + */ + private constructBodyWithMessages = (messages: Message[]) => { + let parsedMessages; + if (this.messageParser) { + parsedMessages = this.messageParser(messages); + } else { + const filteredMessages = messages.filter( + (message) => typeof message.content === 'string' && message.sender.toUpperCase() !== 'SYSTEM' + ); + parsedMessages = filteredMessages.map((message) => { + const role = this.roleMap(message.sender.toUpperCase()); + const text = message.content; + return { + role, + content: text, + }; + }); + } + + // append system message if specified + if (this.systemMessage) { + parsedMessages = [{ role: 'system', content: this.systemMessage }, ...parsedMessages]; + } + + // Only include model and messages for Ollama + return { + model: this.body.model, + messages: parsedMessages, + }; + }; + + /** + * Consumes an SSE/text stream Response and yield each text chunk. + * + * @reader request body reader + */ + private handleStreamResponse = async function* ( + reader: ReadableStreamDefaultReader> + ): AsyncGenerator { + const decoder = new TextDecoder('utf-8'); + let buffer = ''; + + while (true) { + const { value, done } = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + const parts = buffer.split(/\r?\n/); + buffer = parts.pop()!; + + for (const line of parts) { + try { + const event = JSON.parse(line); + if (event.done === true) return; + if (event.message && typeof event.message.content === 'string') { + yield event.message.content; + } + } catch (err) { + console.error('Stream parse error', err); + } + } + } + }; } export default OllamaProvider; diff --git a/src/types/provider-config/OllamaProviderConfig.ts b/src/types/provider-config/OllamaProviderConfig.ts new file mode 100644 index 0000000..7558194 --- /dev/null +++ b/src/types/provider-config/OllamaProviderConfig.ts @@ -0,0 +1,43 @@ +import { Message } from 'react-chatbotify'; +import { OpenaiProviderMessage } from '../provider-message/OpenaiProviderMessage'; +import { OllamaProviderMessage } from '../provider-message/OllamaProviderMessage'; + +/** + * Configurations for OllamaProvider in direct mode. + */ +type DirectConfig = { + mode: 'direct'; + model: string; + apiKey: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + baseUrl?: string; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => OllamaProviderMessage[]; + debug?: boolean; +}; + +/** + * Configurations for OllamaProvider in proxy mode. + */ +type ProxyConfig = { + mode: 'proxy'; + model: string; + baseUrl: string; + systemMessage?: string; + responseFormat?: 'stream' | 'json'; + method?: string; + headers?: Record; + body?: Record; + messageParser?: (messages: Message[]) => OllamaProviderMessage[]; + debug?: boolean; +}; + +/** + * Combined openai provider configurations. + */ +type OllamaProviderConfig = DirectConfig | ProxyConfig; + +export type { OllamaProviderConfig }; diff --git a/src/types/provider-message/OllamaProviderMessage.ts b/src/types/provider-message/OllamaProviderMessage.ts index 8b13789..bb5d1b1 100644 --- a/src/types/provider-message/OllamaProviderMessage.ts +++ b/src/types/provider-message/OllamaProviderMessage.ts @@ -1 +1,9 @@ +/** + * Message format for OpenAI. + */ +type OllamaProviderMessage = { + role: 'user' | 'assistant' | 'system'; + content: string; +}; +export type { OllamaProviderMessage };