Skip to content

Commit 6d1a4a1

Browse files
authored
feat: add generic AI communication model (#15409)
Adds a new AI communication model for tracking raw LLM requests and responses. Tracking is automated via the language-model-service. This model serves as the foundation for: - Extracting communication history from Theia AI, allowing LLM testing without running the full Theia-based application - Replaying LLM communication without using an actual LLM, enabling deterministic integration tests - Removing the clunky communication recording service, making the ai-history package optional Resolves #15221 Contributed on behalf of STMicroelectronics
1 parent 95d4069 commit 6d1a4a1

File tree

5 files changed

+219
-9
lines changed

5 files changed

+219
-9
lines changed

packages/ai-chat/src/common/chat-session-naming-service.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ export class ChatSessionNamingAgent implements Agent {
106106

107107
const sessionId = generateUuid();
108108
const requestId = generateUuid();
109-
const request: UserRequest = {
109+
const request: UserRequest & { agentId: string } = {
110110
messages: [{
111111
actor: 'user',
112112
text: message,

packages/ai-core/src/browser/frontend-language-model-service.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ export class FrontendLanguageModelServiceImpl extends LanguageModelServiceImpl {
5454
}
5555
}
5656

57-
export const mergeRequestSettings = (requestSettings: RequestSetting[], modelId: string, providerId: string, agentId: string): RequestSetting => {
57+
export const mergeRequestSettings = (requestSettings: RequestSetting[], modelId: string, providerId: string, agentId?: string): RequestSetting => {
5858
const prioritizedSettings = Prioritizeable.prioritizeAllSync(requestSettings,
5959
setting => getRequestSettingSpecificity(setting, {
6060
modelId,
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
// *****************************************************************************
2+
// Copyright (C) 2025 STMicroelectronics and others.
3+
//
4+
// This program and the accompanying materials are made available under the
5+
// terms of the Eclipse Public License v. 2.0 which is available at
6+
// http://www.eclipse.org/legal/epl-2.0.
7+
//
8+
// This Source Code may also be made available under the following Secondary
9+
// Licenses when the conditions for such availability set forth in the Eclipse
10+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
11+
// with the GNU Classpath Exception which is available at
12+
// https://www.gnu.org/software/classpath/license.html.
13+
//
14+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
15+
// *****************************************************************************
16+
import {
17+
LanguageModelParsedResponse,
18+
LanguageModelRequest,
19+
LanguageModelStreamResponsePart,
20+
LanguageModelTextResponse
21+
} from './language-model';
22+
23+
/**
24+
* A session tracking raw exchanges with language models, organized into exchange units.
25+
*/
26+
export interface LanguageModelSession {
27+
/**
28+
* Identifier of this Language Model Session. Corresponds to Chat session ids
29+
*/
30+
id: string;
31+
/**
32+
* All exchange units part of this session
33+
*/
34+
exchanges: LanguageModelExchange[];
35+
}
36+
37+
/**
38+
* An exchange unit representing a logical operation which may involve multiple model requests.
39+
*/
40+
export interface LanguageModelExchange {
41+
/**
42+
* Identifier of the exchange unit.
43+
*/
44+
id: string;
45+
/**
46+
* All requests that constitute this exchange
47+
*/
48+
requests: LanguageModelExchangeRequest[];
49+
/**
50+
* Arbitrary metadata for the exchange
51+
*/
52+
metadata: {
53+
agent?: string;
54+
[key: string]: unknown;
55+
}
56+
}
57+
58+
/**
59+
* Alternative to the LanguageModelStreamResponse, suited for inspection
60+
*/
61+
export interface LanguageModelMonitoredStreamResponse {
62+
parts: LanguageModelStreamResponsePart[];
63+
}
64+
65+
/**
66+
* Represents a request to a language model within an exchange unit, capturing the request and its response.
67+
*/
68+
export interface LanguageModelExchangeRequest {
69+
/**
70+
* Identifier of the request. Might share the id with the parent exchange if there's only one request.
71+
*/
72+
id: string;
73+
/**
74+
* The actual request sent to the language model
75+
*/
76+
request: LanguageModelRequest;
77+
/**
78+
* Arbitrary metadata for the request. Might contain an agent id and timestamp.
79+
*/
80+
metadata: {
81+
agent?: string;
82+
timestamp?: number;
83+
[key: string]: unknown;
84+
};
85+
/**
86+
* The identifier of the language model the request was sent to
87+
*/
88+
languageModel: string;
89+
/**
90+
* The recorded response
91+
*/
92+
response: LanguageModelTextResponse | LanguageModelParsedResponse | LanguageModelMonitoredStreamResponse;
93+
}

packages/ai-core/src/common/language-model-service.ts

Lines changed: 101 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,29 @@
1515
// *****************************************************************************
1616

1717
import { inject } from '@theia/core/shared/inversify';
18-
import { LanguageModel, LanguageModelRegistry, LanguageModelResponse, UserRequest } from './language-model';
19-
import { CommunicationRecordingService } from './communication-recording-service';
18+
import { isLanguageModelStreamResponse, LanguageModel, LanguageModelRegistry, LanguageModelResponse, LanguageModelStreamResponsePart, UserRequest } from './language-model';
19+
import { LanguageModelExchangeRequest, LanguageModelSession } from './language-model-interaction-model';
20+
import { Emitter } from '@theia/core';
21+
22+
export interface RequestAddedEvent {
23+
type: 'requestAdded',
24+
id: string;
25+
}
26+
export interface ResponseCompletedEvent {
27+
type: 'responseCompleted',
28+
requestId: string;
29+
}
30+
export type SessionEvent = RequestAddedEvent | ResponseCompletedEvent;
2031

2132
export const LanguageModelService = Symbol('LanguageModelService');
2233
export interface LanguageModelService {
34+
onSessionChanged: Emitter<SessionEvent>['event'];
35+
/**
36+
* Collection of all recorded LanguageModelSessions.
37+
*/
38+
sessions: LanguageModelSession[];
2339
/**
24-
* Submit a language model request in the context of the given `chatRequest`.
40+
* Submit a language model request, it will automatically be recorded within a LanguageModelSession.
2541
*/
2642
sendRequest(
2743
languageModel: LanguageModel,
@@ -33,8 +49,10 @@ export class LanguageModelServiceImpl implements LanguageModelService {
3349
@inject(LanguageModelRegistry)
3450
protected languageModelRegistry: LanguageModelRegistry;
3551

36-
@inject(CommunicationRecordingService)
37-
protected recordingService: CommunicationRecordingService;
52+
sessions: LanguageModelSession[] = [];
53+
54+
protected sessionChangedEmitter = new Emitter<SessionEvent>();
55+
onSessionChanged = this.sessionChangedEmitter.event;
3856

3957
async sendRequest(
4058
languageModel: LanguageModel,
@@ -53,7 +71,84 @@ export class LanguageModelServiceImpl implements LanguageModelService {
5371
return true;
5472
});
5573

56-
return languageModel.request(languageModelRequest, languageModelRequest.cancellationToken);
74+
let response = await languageModel.request(languageModelRequest, languageModelRequest.cancellationToken);
75+
let storedResponse: LanguageModelExchangeRequest['response'];
76+
if (isLanguageModelStreamResponse(response)) {
77+
const parts: LanguageModelStreamResponsePart[] = [];
78+
response = {
79+
...response,
80+
stream: createLoggingAsyncIterable(response.stream,
81+
parts,
82+
() => this.sessionChangedEmitter.fire({ type: 'responseCompleted', requestId: languageModelRequest.subRequestId ?? languageModelRequest.requestId }))
83+
};
84+
storedResponse = { parts };
85+
} else {
86+
storedResponse = response;
87+
}
88+
this.storeRequest(languageModel, languageModelRequest, storedResponse);
89+
90+
return response;
5791
}
5892

93+
protected storeRequest(languageModel: LanguageModel, languageModelRequest: UserRequest, response: LanguageModelExchangeRequest['response']): void {
94+
// Find or create the session for this request
95+
let session = this.sessions.find(s => s.id === languageModelRequest.sessionId);
96+
if (!session) {
97+
session = {
98+
id: languageModelRequest.sessionId,
99+
exchanges: []
100+
};
101+
this.sessions.push(session);
102+
}
103+
104+
// Find or create the exchange for this request
105+
let exchange = session.exchanges.find(r => r.id === languageModelRequest.requestId);
106+
if (!exchange) {
107+
exchange = {
108+
id: languageModelRequest.requestId,
109+
requests: [],
110+
metadata: { agent: languageModelRequest.agentId }
111+
};
112+
session.exchanges.push(exchange);
113+
}
114+
115+
// Create and add the LanguageModelExchangeRequest to the exchange
116+
const exchangeRequest: LanguageModelExchangeRequest = {
117+
id: languageModelRequest.subRequestId ?? languageModelRequest.requestId,
118+
request: languageModelRequest,
119+
languageModel: languageModel.id,
120+
response: response,
121+
metadata: {}
122+
};
123+
124+
exchange.requests.push(exchangeRequest);
125+
126+
exchangeRequest.metadata.agent = languageModelRequest.agentId;
127+
exchangeRequest.metadata.timestamp = Date.now();
128+
129+
this.sessionChangedEmitter.fire({ type: 'requestAdded', id: languageModelRequest.subRequestId ?? languageModelRequest.requestId });
130+
}
131+
132+
}
133+
134+
/**
135+
* Creates an AsyncIterable wrapper that stores each yielded item while preserving the
136+
* original AsyncIterable behavior.
137+
*/
138+
async function* createLoggingAsyncIterable(
139+
stream: AsyncIterable<LanguageModelStreamResponsePart>,
140+
parts: LanguageModelStreamResponsePart[],
141+
streamFinished: () => void
142+
): AsyncIterable<LanguageModelStreamResponsePart> {
143+
try {
144+
for await (const part of stream) {
145+
parts.push(part);
146+
yield part;
147+
}
148+
} catch (error) {
149+
parts.push({ content: `[NOT FROM LLM] An error occured: ${error.message}` });
150+
throw error;
151+
} finally {
152+
streamFinished();
153+
}
59154
}

packages/ai-core/src/common/language-model.ts

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,10 +159,32 @@ export interface ResponseFormatJsonSchema {
159159
};
160160
}
161161

162+
/**
163+
* The UserRequest extends the "pure" LanguageModelRequest for cancelling support as well as
164+
* logging metadata.
165+
* The additional metadata might also be used for other use cases, for example to query default
166+
* request settings based on the agent id, merging with the request settings handed over.
167+
*/
162168
export interface UserRequest extends LanguageModelRequest {
169+
/**
170+
* Identifier of the Ai/ChatSession
171+
*/
163172
sessionId: string;
173+
/**
174+
* Identifier of the semantic request. Corresponds to request id in Chat sessions
175+
*/
164176
requestId: string;
165-
agentId: string;
177+
/**
178+
* Id of a sub request in case a semantic request consists of multiple sub requests
179+
*/
180+
subRequestId?: string;
181+
/**
182+
* Optional agent identifier in case the request was sent by an agent
183+
*/
184+
agentId?: string;
185+
/**
186+
* Cancellation support
187+
*/
166188
cancellationToken?: CancellationToken;
167189
}
168190

0 commit comments

Comments
 (0)