15
15
// *****************************************************************************
16
16
17
17
import { inject } from '@theia/core/shared/inversify' ;
18
- import { LanguageModel , LanguageModelRegistry , LanguageModelResponse , UserRequest } from './language-model' ;
19
- import { CommunicationRecordingService } from './communication-recording-service' ;
18
+ import { isLanguageModelStreamResponse , LanguageModel , LanguageModelRegistry , LanguageModelResponse , LanguageModelStreamResponsePart , UserRequest } from './language-model' ;
19
+ import { LanguageModelExchangeRequest , LanguageModelSession } from './language-model-interaction-model' ;
20
+ import { Emitter } from '@theia/core' ;
21
+
22
+ export interface RequestAddedEvent {
23
+ type : 'requestAdded' ,
24
+ id : string ;
25
+ }
26
+ export interface ResponseCompletedEvent {
27
+ type : 'responseCompleted' ,
28
+ requestId : string ;
29
+ }
30
+ export type SessionEvent = RequestAddedEvent | ResponseCompletedEvent ;
20
31
21
32
export const LanguageModelService = Symbol ( 'LanguageModelService' ) ;
22
33
export interface LanguageModelService {
34
+ onSessionChanged : Emitter < SessionEvent > [ 'event' ] ;
35
+ /**
36
+ * Collection of all recorded LanguageModelSessions.
37
+ */
38
+ sessions : LanguageModelSession [ ] ;
23
39
/**
24
- * Submit a language model request in the context of the given `chatRequest` .
40
+ * Submit a language model request, it will automatically be recorded within a LanguageModelSession .
25
41
*/
26
42
sendRequest (
27
43
languageModel : LanguageModel ,
@@ -33,8 +49,10 @@ export class LanguageModelServiceImpl implements LanguageModelService {
33
49
@inject ( LanguageModelRegistry )
34
50
protected languageModelRegistry : LanguageModelRegistry ;
35
51
36
- @inject ( CommunicationRecordingService )
37
- protected recordingService : CommunicationRecordingService ;
52
+ sessions : LanguageModelSession [ ] = [ ] ;
53
+
54
+ protected sessionChangedEmitter = new Emitter < SessionEvent > ( ) ;
55
+ onSessionChanged = this . sessionChangedEmitter . event ;
38
56
39
57
async sendRequest (
40
58
languageModel : LanguageModel ,
@@ -53,7 +71,84 @@ export class LanguageModelServiceImpl implements LanguageModelService {
53
71
return true ;
54
72
} ) ;
55
73
56
- return languageModel . request ( languageModelRequest , languageModelRequest . cancellationToken ) ;
74
+ let response = await languageModel . request ( languageModelRequest , languageModelRequest . cancellationToken ) ;
75
+ let storedResponse : LanguageModelExchangeRequest [ 'response' ] ;
76
+ if ( isLanguageModelStreamResponse ( response ) ) {
77
+ const parts : LanguageModelStreamResponsePart [ ] = [ ] ;
78
+ response = {
79
+ ...response ,
80
+ stream : createLoggingAsyncIterable ( response . stream ,
81
+ parts ,
82
+ ( ) => this . sessionChangedEmitter . fire ( { type : 'responseCompleted' , requestId : languageModelRequest . subRequestId ?? languageModelRequest . requestId } ) )
83
+ } ;
84
+ storedResponse = { parts } ;
85
+ } else {
86
+ storedResponse = response ;
87
+ }
88
+ this . storeRequest ( languageModel , languageModelRequest , storedResponse ) ;
89
+
90
+ return response ;
57
91
}
58
92
93
+ protected storeRequest ( languageModel : LanguageModel , languageModelRequest : UserRequest , response : LanguageModelExchangeRequest [ 'response' ] ) : void {
94
+ // Find or create the session for this request
95
+ let session = this . sessions . find ( s => s . id === languageModelRequest . sessionId ) ;
96
+ if ( ! session ) {
97
+ session = {
98
+ id : languageModelRequest . sessionId ,
99
+ exchanges : [ ]
100
+ } ;
101
+ this . sessions . push ( session ) ;
102
+ }
103
+
104
+ // Find or create the exchange for this request
105
+ let exchange = session . exchanges . find ( r => r . id === languageModelRequest . requestId ) ;
106
+ if ( ! exchange ) {
107
+ exchange = {
108
+ id : languageModelRequest . requestId ,
109
+ requests : [ ] ,
110
+ metadata : { agent : languageModelRequest . agentId }
111
+ } ;
112
+ session . exchanges . push ( exchange ) ;
113
+ }
114
+
115
+ // Create and add the LanguageModelExchangeRequest to the exchange
116
+ const exchangeRequest : LanguageModelExchangeRequest = {
117
+ id : languageModelRequest . subRequestId ?? languageModelRequest . requestId ,
118
+ request : languageModelRequest ,
119
+ languageModel : languageModel . id ,
120
+ response : response ,
121
+ metadata : { }
122
+ } ;
123
+
124
+ exchange . requests . push ( exchangeRequest ) ;
125
+
126
+ exchangeRequest . metadata . agent = languageModelRequest . agentId ;
127
+ exchangeRequest . metadata . timestamp = Date . now ( ) ;
128
+
129
+ this . sessionChangedEmitter . fire ( { type : 'requestAdded' , id : languageModelRequest . subRequestId ?? languageModelRequest . requestId } ) ;
130
+ }
131
+
132
+ }
133
+
134
+ /**
135
+ * Creates an AsyncIterable wrapper that stores each yielded item while preserving the
136
+ * original AsyncIterable behavior.
137
+ */
138
+ async function * createLoggingAsyncIterable (
139
+ stream : AsyncIterable < LanguageModelStreamResponsePart > ,
140
+ parts : LanguageModelStreamResponsePart [ ] ,
141
+ streamFinished : ( ) => void
142
+ ) : AsyncIterable < LanguageModelStreamResponsePart > {
143
+ try {
144
+ for await ( const part of stream ) {
145
+ parts . push ( part ) ;
146
+ yield part ;
147
+ }
148
+ } catch ( error ) {
149
+ parts . push ( { content : `[NOT FROM LLM] An error occured: ${ error . message } ` } ) ;
150
+ throw error ;
151
+ } finally {
152
+ streamFinished ( ) ;
153
+ }
59
154
}
0 commit comments