Skip to content

Commit 8784dee

Browse files
ThomasVitaletzolov
authored andcommitted
Update convention for chat finish reason in LLM observations
Signed-off-by: Thomas Vitale <ThomasVitale@users.noreply.github.com>
1 parent 689e458 commit 8784dee

File tree

5 files changed

+23
-19
lines changed

5 files changed

+23
-19
lines changed

models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelObservationIT.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,7 @@ void observationForEmbeddingOperation() {
9797
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.REQUEST_TOP_K.asString(), KeyValue.NONE_VALUE)
9898
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.REQUEST_TOP_P.asString(), "1.0")
9999
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.RESPONSE_ID.asString(), responseMetadata.getId())
100-
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.RESPONSE_FINISH_REASON.asString(),
101-
chatResponse.getResult().getMetadata().getFinishReason())
100+
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.RESPONSE_FINISH_REASONS.asString(), "[\"STOP\"]")
102101
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.USAGE_INPUT_TOKENS.asString(),
103102
String.valueOf(responseMetadata.getUsage().getPromptTokens()))
104103
.hasHighCardinalityKeyValue(HighCardinalityKeyNames.USAGE_OUTPUT_TOKENS.asString(),

spring-ai-core/src/main/java/org/springframework/ai/chat/observation/ChatModelObservationDocumentation.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -177,12 +177,13 @@ public String asString() {
177177
// Response
178178

179179
/**
180-
* Final reason the model stopped generating tokens.
180+
* Reasons the model stopped generating tokens, corresponding to each generation
181+
* received.
181182
*/
182-
RESPONSE_FINISH_REASON {
183+
RESPONSE_FINISH_REASONS {
183184
@Override
184185
public String asString() {
185-
return AiObservationAttributes.RESPONSE_FINISH_REASON.value();
186+
return AiObservationAttributes.RESPONSE_FINISH_REASONS.value();
186187
}
187188
},
188189

spring-ai-core/src/main/java/org/springframework/ai/chat/observation/DefaultChatModelObservationConvention.java

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
import io.micrometer.common.KeyValue;
1919
import io.micrometer.common.KeyValues;
20+
import org.springframework.util.CollectionUtils;
2021

2122
import java.util.StringJoiner;
2223

@@ -52,8 +53,8 @@ public class DefaultChatModelObservationConvention implements ChatModelObservati
5253
private static final KeyValue REQUEST_TOP_P_NONE = KeyValue
5354
.of(ChatModelObservationDocumentation.HighCardinalityKeyNames.REQUEST_TOP_P, KeyValue.NONE_VALUE);
5455

55-
private static final KeyValue RESPONSE_FINISH_REASON_NONE = KeyValue
56-
.of(ChatModelObservationDocumentation.HighCardinalityKeyNames.RESPONSE_FINISH_REASON, KeyValue.NONE_VALUE);
56+
private static final KeyValue RESPONSE_FINISH_REASONS_NONE = KeyValue
57+
.of(ChatModelObservationDocumentation.HighCardinalityKeyNames.RESPONSE_FINISH_REASONS, KeyValue.NONE_VALUE);
5758

5859
private static final KeyValue RESPONSE_ID_NONE = KeyValue
5960
.of(ChatModelObservationDocumentation.HighCardinalityKeyNames.RESPONSE_ID, KeyValue.NONE_VALUE);
@@ -114,7 +115,7 @@ protected KeyValue responseModel(ChatModelObservationContext context) {
114115
public KeyValues getHighCardinalityKeyValues(ChatModelObservationContext context) {
115116
return KeyValues.of(requestFrequencyPenalty(context), requestMaxTokens(context),
116117
requestPresencePenalty(context), requestStopSequences(context), requestTemperature(context),
117-
requestTopK(context), requestTopP(context), responseFinishReason(context), responseId(context),
118+
requestTopK(context), requestTopP(context), responseFinishReasons(context), responseId(context),
118119
usageInputTokens(context), usageOutputTokens(context), usageTotalTokens(context));
119120
}
120121

@@ -182,14 +183,17 @@ protected KeyValue requestTopP(ChatModelObservationContext context) {
182183

183184
// Response
184185

185-
protected KeyValue responseFinishReason(ChatModelObservationContext context) {
186-
if (context.getResponse() != null && context.getResponse().getResult() != null
187-
&& context.getResponse().getResult().getMetadata() != null
188-
&& context.getResponse().getResult().getMetadata().getFinishReason() != null) {
189-
return KeyValue.of(ChatModelObservationDocumentation.HighCardinalityKeyNames.RESPONSE_FINISH_REASON,
190-
context.getResponse().getResult().getMetadata().getFinishReason());
186+
protected KeyValue responseFinishReasons(ChatModelObservationContext context) {
187+
if (context.getResponse() != null && !CollectionUtils.isEmpty(context.getResponse().getResults())) {
188+
StringJoiner finishReasonsJoiner = new StringJoiner(", ", "[", "]");
189+
context.getResponse()
190+
.getResults()
191+
.forEach(generation -> finishReasonsJoiner
192+
.add("\"" + generation.getMetadata().getFinishReason() + "\""));
193+
return KeyValue.of(ChatModelObservationDocumentation.HighCardinalityKeyNames.RESPONSE_FINISH_REASONS,
194+
finishReasonsJoiner.toString());
191195
}
192-
return RESPONSE_FINISH_REASON_NONE;
196+
return RESPONSE_FINISH_REASONS_NONE;
193197
}
194198

195199
protected KeyValue responseId(ChatModelObservationContext context) {

spring-ai-core/src/main/java/org/springframework/ai/observation/conventions/AiObservationAttributes.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,9 +100,9 @@ public enum AiObservationAttributes {
100100
// GenAI Response
101101

102102
/**
103-
* Final reason the model stopped generating tokens.
103+
* Reasons the model stopped generating tokens, corresponding to each generation received.
104104
*/
105-
RESPONSE_FINISH_REASON("gen_ai.response.finish_reason"),
105+
RESPONSE_FINISH_REASONS("gen_ai.response.finish_reasons"),
106106
/**
107107
* The unique identifier for the AI response.
108108
*/

spring-ai-core/src/test/java/org/springframework/ai/chat/observation/DefaultChatModelObservationConventionTests.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ void shouldHaveOptionalKeyValues() {
117117
KeyValue.of(HighCardinalityKeyNames.REQUEST_TEMPERATURE.asString(), "0.5"),
118118
KeyValue.of(HighCardinalityKeyNames.REQUEST_TOP_K.asString(), "1"),
119119
KeyValue.of(HighCardinalityKeyNames.REQUEST_TOP_P.asString(), "0.9"),
120-
KeyValue.of(HighCardinalityKeyNames.RESPONSE_FINISH_REASON.asString(), "this-is-the-end"),
120+
KeyValue.of(HighCardinalityKeyNames.RESPONSE_FINISH_REASONS.asString(), "[\"this-is-the-end\"]"),
121121
KeyValue.of(HighCardinalityKeyNames.RESPONSE_ID.asString(), "say33"),
122122
KeyValue.of(HighCardinalityKeyNames.USAGE_INPUT_TOKENS.asString(), "1000"),
123123
KeyValue.of(HighCardinalityKeyNames.USAGE_OUTPUT_TOKENS.asString(), "500"),
@@ -141,7 +141,7 @@ void shouldHaveMissingKeyValues() {
141141
KeyValue.of(HighCardinalityKeyNames.REQUEST_TEMPERATURE.asString(), KeyValue.NONE_VALUE),
142142
KeyValue.of(HighCardinalityKeyNames.REQUEST_TOP_K.asString(), KeyValue.NONE_VALUE),
143143
KeyValue.of(HighCardinalityKeyNames.REQUEST_TOP_P.asString(), KeyValue.NONE_VALUE),
144-
KeyValue.of(HighCardinalityKeyNames.RESPONSE_FINISH_REASON.asString(), KeyValue.NONE_VALUE),
144+
KeyValue.of(HighCardinalityKeyNames.RESPONSE_FINISH_REASONS.asString(), KeyValue.NONE_VALUE),
145145
KeyValue.of(HighCardinalityKeyNames.RESPONSE_ID.asString(), KeyValue.NONE_VALUE),
146146
KeyValue.of(HighCardinalityKeyNames.USAGE_INPUT_TOKENS.asString(), KeyValue.NONE_VALUE),
147147
KeyValue.of(HighCardinalityKeyNames.USAGE_OUTPUT_TOKENS.asString(), KeyValue.NONE_VALUE),

0 commit comments

Comments
 (0)