diff --git a/.github/workflows/typescript-CI.yaml b/.github/workflows/typescript-CI.yaml index 303cc1116..530912e2d 100644 --- a/.github/workflows/typescript-CI.yaml +++ b/.github/workflows/typescript-CI.yaml @@ -49,7 +49,7 @@ jobs: version: 9.8.0 - name: Install Dependencies working-directory: ./js - run: pnpm install --frozen-lockfile --no-optional -r + run: pnpm install --frozen-lockfile -r - name: Pre-Build working-directory: ./js run: pnpm run -r prebuild diff --git a/js/.changeset/fifty-shirts-design.md b/js/.changeset/fifty-shirts-design.md new file mode 100644 index 000000000..19211bc99 --- /dev/null +++ b/js/.changeset/fifty-shirts-design.md @@ -0,0 +1,8 @@ +--- +"@arizeai/openinference-mastra": major +"@arizeai/openinference-vercel": minor +--- + +feat: Mastra instrumentation + +Initial instrumentation for Mastra, adhering to OpenInference semantic conventions diff --git a/js/.changeset/many-needles-create.md b/js/.changeset/many-needles-create.md new file mode 100644 index 000000000..6a2030339 --- /dev/null +++ b/js/.changeset/many-needles-create.md @@ -0,0 +1,5 @@ +--- +"@arizeai/openinference-vercel": minor +--- + +feat: Instrument tool calls and results from multi-part content messages diff --git a/js/.prettierignore b/js/.prettierignore index ce8171869..4758877d8 100644 --- a/js/.prettierignore +++ b/js/.prettierignore @@ -1,3 +1,5 @@ pnpm-lock.yaml dist .next +__snapshots__ +__fixtures__ \ No newline at end of file diff --git a/js/packages/openinference-mastra/CHANGELOG.md b/js/packages/openinference-mastra/CHANGELOG.md new file mode 100644 index 000000000..e8bec9139 --- /dev/null +++ b/js/packages/openinference-mastra/CHANGELOG.md @@ -0,0 +1,110 @@ +# @arizeai/openinference-vercel + +## 2.0.3 + +### Patch Changes + +- Updated dependencies [ae5cd15] + - @arizeai/openinference-semantic-conventions@1.1.0 + - @arizeai/openinference-core@1.0.2 + +## 2.0.2 + +### Patch Changes + +- Updated dependencies [c4e2252] + - @arizeai/openinference-semantic-conventions@1.0.1 + - @arizeai/openinference-core@1.0.1 + +## 2.0.1 + +### Patch Changes + +- 365a3c2: Updated the OpenInference semantic convention mapping to account for changes to the Vercel AI SDK semantic conventions + +## 2.0.0 + +### Major Changes + +- 16a3815: ESM support + + Packages are now shipped as "Dual Package" meaning that ESM and CJS module resolution + should be supported for each package. + + Support is described as "experimental" because opentelemetry describes support for autoinstrumenting + ESM projects as "ongoing". See https://github.com/open-telemetry/opentelemetry-js/blob/61d5a0e291db26c2af638274947081b29db3f0ca/doc/esm-support.md + +### Patch Changes + +- Updated dependencies [16a3815] + - @arizeai/openinference-semantic-conventions@1.0.0 + - @arizeai/openinference-core@1.0.0 + +## 1.2.2 + +### Patch Changes + +- Updated dependencies [1188c6d] + - @arizeai/openinference-semantic-conventions@0.14.0 + - @arizeai/openinference-core@0.3.3 + +## 1.2.1 + +### Patch Changes + +- Updated dependencies [710d1d3] + - @arizeai/openinference-semantic-conventions@0.13.0 + - @arizeai/openinference-core@0.3.2 + +## 1.2.0 + +### Minor Changes + +- a0e6f30: Support tool_call_id and tool_call.id + +### Patch Changes + +- Updated dependencies [a0e6f30] + - @arizeai/openinference-semantic-conventions@0.12.0 + - @arizeai/openinference-core@0.3.1 + +## 1.1.0 + +### Minor Changes + +- a96fbd5: Add readme documentation + +### Patch Changes + +- Updated dependencies [f965410] +- Updated dependencies [712b9da] +- Updated dependencies [d200d85] + - @arizeai/openinference-semantic-conventions@0.11.0 + - @arizeai/openinference-core@0.3.0 + +## 1.0.0 + +### Major Changes + +- 4f9246f: migrate OpenInferenceSpanProcessor to OpenInferenceSimpleSpanProcessor and OpenInferenceBatchSpanProcessor to allow for filtering exported spans + +## 0.1.1 + +### Patch Changes + +- 3b8702a: remove generic log from withSafety and add onError callback +- ff2668c: caputre input and output for tools, fix double count of tokens on llm spans / chains +- Updated dependencies [3b8702a] + - @arizeai/openinference-core@0.2.0 + +## 0.1.0 + +### Minor Changes + +- 97ca03b: add OpenInferenceSpanProcessor to transform Vercel AI SDK Spans to conform to the OpenInference spec + +### Patch Changes + +- Updated dependencies [ba142d5] + - @arizeai/openinference-semantic-conventions@0.10.0 + - @arizeai/openinference-core@0.1.1 diff --git a/js/packages/openinference-mastra/README.md b/js/packages/openinference-mastra/README.md new file mode 100644 index 000000000..0a1caa0ed --- /dev/null +++ b/js/packages/openinference-mastra/README.md @@ -0,0 +1,134 @@ +# OpenInference Mastra + +[![npm version](https://badge.fury.io/js/@arizeai%2Fopeninference-mastra.svg)](https://badge.fury.io/js/@arizeai%2Fopeninference-mastra) + +This package provides a set of utilities to ingest [Mastra](https://github.com/mastra-ai/mastra) spans into platforms like [Arize](https://arize.com/) and [Arize Phoenix](https://phoenix.arize.com/). + +## Installation + +```shell +npm install --save @arizeai/openinference-mastra +``` + +A typical Mastra project will already have OpenTelemetry and related packages installed, so you will likely not need to install any additional packages. + +## Usage + +`@arizeai/openinference-mastra` provides a set of utilities to help you ingest Mastra spans into the Phoenix platform (and any other OpenInference-compatible platform) and works in conjunction with Mastra's OpenTelemetry support. To get started, you will need to add OpenTelemetry support to your Mastra project according to the [Mastra Observability guide](https://mastra.ai/en/reference/observability/providers), or, follow along with the rest of this README. + +To process your Mastra spans add an `OpenInferenceOTLPTraceExporter` to your `telemetry` configuration within your `Mastra` instance. + +Here is an example of how to configure a `Mastra` instance to export spans to Arize Phoenix: + +```shell +# Set the Phoenix collector endpoint and API key in your environment +export PHOENIX_COLLECTOR_ENDPOINT="https://localhost:6006/v1/traces" +export PHOENIX_API_KEY="your-api-key" +``` + +```typescript +import { Mastra } from "@mastra/core"; +import { + OpenInferenceOTLPTraceExporter, + isOpenInferenceSpan, +} from "@arizeai/openinference-mastra"; + +export const mastra = new Mastra({ + // ... other config + telemetry: { + serviceName: "openinference-mastra-agent", // you can rename this to whatever you want to appear in the Phoenix UI + enabled: true, + export: { + type: "custom", + exporter: new OpenInferenceOTLPTraceExporter({ + url: process.env.PHOENIX_COLLECTOR_ENDPOINT, + headers: { + Authorization: `Bearer ${process.env.PHOENIX_API_KEY}`, + }, + // optional: filter out http, and other node service specific spans + // they will still be exported to Mastra, but not to the target of + // this exporter + spanFilter: isOpenInferenceSpan, + }), + }, + }, +}); +``` + +For general details on Mastra's OpenTelemetry support see the [Mastra Observability guide](https://mastra.ai/en/docs/observability/tracing). + +## Examples + +### Weather Agent + +To setup the canonical Mastra weather agent example, and then ingest the spans into Arize Phoenix (or any other OpenInference-compatible platform), follow the steps below. + +- Create a new Mastra project + +```shell +npm create mastra@latest +# answer the prompts, include agent, tools, and the example when asked +cd chosen-project-name +npm install --save @arizeai/openinference-mastra +# export some variables for mastra to use later on +export PHOENIX_COLLECTOR_ENDPOINT="https://localhost:6006/v1/traces" +export PHOENIX_API_KEY="your-api-key" +export OPENAI_API_KEY="your-openai-api-key" +``` + +- Add the OpenInferenceOTLPTraceExporter to your Mastra project + +```typescript +// chosen-project-name/src/index.ts +import { Mastra } from "@mastra/core/mastra"; +import { createLogger } from "@mastra/core/logger"; +import { LibSQLStore } from "@mastra/libsql"; +import { + isOpenInferenceSpan, + OpenInferenceOTLPTraceExporter, +} from "@arizeai/openinference-mastra"; + +import { weatherAgent } from "./agents"; + +export const mastra = new Mastra({ + agents: { weatherAgent }, + storage: new LibSQLStore({ + url: ":memory:", + }), + logger: createLogger({ + name: "Mastra", + level: "info", + }), + telemetry: { + enabled: true, + serviceName: "weather-agent", + export: { + type: "custom", + exporter: new OpenInferenceOTLPTraceExporter({ + url: process.env.PHOENIX_COLLECTOR_ENDPOINT, + headers: { + Authorization: `Bearer ${process.env.PHOENIX_API_KEY}`, + }, + spanFilter: isOpenInferenceSpan, + }), + }, + }, +}); +``` + +- Run the agent + +```shell +npm run dev +``` + +- Send a chat message to the agent in the playground [http://localhost:4111/agents/weatherAgent/chat/](http://localhost:4111/agents/weatherAgent/chat/) + +![weather agent chat](./docs/mastra-weather-agent.png) + +- After a few moments, you should see the spans for the agent's request and response in Phoenix. + - Not sure how to run the Phoenix collector? [Check out the Phoenix docs](https://docs.arize.com/phoenix/self-hosting/deployment-options/docker#docker). + +![weather agent spans](./docs/mastra-weather-agent-spans.png) + +You've done it! For next steps, check out the [Mastra docs](https://mastra.ai/en/docs) to learn how to add more agents, tools, and storage options to your project. diff --git a/js/packages/openinference-mastra/docs/mastra-weather-agent-spans.png b/js/packages/openinference-mastra/docs/mastra-weather-agent-spans.png new file mode 100644 index 000000000..e2dd62681 Binary files /dev/null and b/js/packages/openinference-mastra/docs/mastra-weather-agent-spans.png differ diff --git a/js/packages/openinference-mastra/docs/mastra-weather-agent.png b/js/packages/openinference-mastra/docs/mastra-weather-agent.png new file mode 100644 index 000000000..92af5851f Binary files /dev/null and b/js/packages/openinference-mastra/docs/mastra-weather-agent.png differ diff --git a/js/packages/openinference-mastra/package.json b/js/packages/openinference-mastra/package.json new file mode 100644 index 000000000..ec043ee6f --- /dev/null +++ b/js/packages/openinference-mastra/package.json @@ -0,0 +1,61 @@ +{ + "name": "@arizeai/openinference-mastra", + "version": "1.0.0", + "private": false, + "type": "module", + "types": "dist/esm/index.d.ts", + "description": "OpenInference utilities for ingesting Mastra spans", + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc --build tsconfig.esm.json && tsc-alias -p tsconfig.esm.json", + "postbuild": "echo '{\"type\": \"module\"}' > ./dist/esm/package.json && rimraf dist/test", + "type:check": "tsc --noEmit", + "test": "vitest" + }, + "exports": { + ".": { + "import": "./dist/esm/index.js" + }, + "./utils": { + "import": "./dist/esm/utils.js" + } + }, + "files": [ + "dist", + "src" + ], + "keywords": [ + "openinference", + "llm", + "opentelemetry", + "mastra", + "agent" + ], + "author": "oss-devs@arize.com", + "license": "Apache-2.0", + "homepage": "https://github.com/arize-ai/openinference/tree/main/js/packages/openinference-mastra", + "repository": { + "type": "git", + "url": "git+https://github.com/Arize-ai/openinference.git" + }, + "bugs": { + "url": "https://github.com/Arize-ai/openinference/issues" + }, + "dependencies": { + "@arizeai/openinference-core": "workspace:*", + "@arizeai/openinference-semantic-conventions": "workspace:*", + "@arizeai/openinference-vercel": "workspace:*" + }, + "devDependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/core": "^2.0.1", + "@opentelemetry/exporter-trace-otlp-proto": "^0.201.1", + "@opentelemetry/sdk-trace-base": "^2.0.1", + "@opentelemetry/semantic-conventions": "^1.34.0", + "vitest": "^3.1.3" + }, + "peerDependencies": { + "@opentelemetry/exporter-trace-otlp-proto": "^0.201.1", + "@opentelemetry/semantic-conventions": "^1.34.0" + } +} diff --git a/js/packages/openinference-mastra/src/OpenInferenceTraceExporter.ts b/js/packages/openinference-mastra/src/OpenInferenceTraceExporter.ts new file mode 100644 index 000000000..e27114af2 --- /dev/null +++ b/js/packages/openinference-mastra/src/OpenInferenceTraceExporter.ts @@ -0,0 +1,97 @@ +import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; +import type { ExportResult } from "@opentelemetry/core"; +import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto"; +import { addOpenInferenceAttributesToSpan } from "@arizeai/openinference-vercel/utils"; + +import { addOpenInferenceAttributesToMastraSpan } from "./attributes.js"; +import { addOpenInferenceProjectResourceAttributeSpan } from "./utils.js"; + +type ConstructorArgs = { + /** + * A function that filters the spans to be exported. + * If provided, the span will be exported if the function returns `true`. + * + * @example + * ```ts + * import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; + * import { isOpenInferenceSpan, OpenInferenceOTLPTraceExporter } from "@arizeai/openinference-mastra"; + * const spanFilter = (span: ReadableSpan) => { + * // add more span filtering logic here if desired + * // or just use the default isOpenInferenceSpan filter directly + * return isOpenInferenceSpan(span); + * }; + * const exporter = new OpenInferenceOTLPTraceExporter({ + * url: "...", + * headers: "...", + * spanFilter, + * }); + * ``` + */ + spanFilter?: (span: ReadableSpan) => boolean; +} & NonNullable[0]>; + +/** + * A custom OpenTelemetry trace exporter that appends OpenInference semantic conventions to spans prior to export + * + * This class extends the `OTLPTraceExporter` and adds additional logic to the `export` method to augment the spans with OpenInference attributes. + * + * @example + * ```ts + * import { Mastra } from "@mastra/core/mastra"; + * import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; + * import { isOpenInferenceSpan, OpenInferenceOTLPTraceExporter } from "@arizeai/openinference-mastra"; + * const spanFilter = (span: ReadableSpan) => { + * // add more span filtering logic here if desired + * // or just use the default isOpenInferenceSpan filter directly + * return isOpenInferenceSpan(span); + * }; + * const exporter = new OpenInferenceOTLPTraceExporter({ + * apiKey: "api-key", + * collectorEndpoint: "http://localhost:6006/v1/traces", + * spanFilter, + * }); + * const mastra = new Mastra({ + * // ... other config + * telemetry: { + * export: { + * type: "custom", + * exporter, + * }, + * }, + * }) + * ``` + */ +export class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter { + private readonly spanFilter?: (span: ReadableSpan) => boolean; + + constructor({ spanFilter, ...args }: ConstructorArgs) { + super({ + ...args, + }); + this.spanFilter = spanFilter; + } + export( + spans: ReadableSpan[], + resultCallback: (result: ExportResult) => void, + ) { + let filteredSpans = spans.map((span) => { + // add OpenInference resource attributes to the span based on Mastra span attributes + addOpenInferenceProjectResourceAttributeSpan(span); + // add OpenInference attributes to the span based on Vercel span attributes + addOpenInferenceAttributesToSpan({ + ...span, + // backwards compatibility with older versions of sdk-trace-base + instrumentationLibrary: { + name: "@arizeai/openinference-mastra", + }, + }); + // add OpenInference attributes to the span based on Mastra span attributes + addOpenInferenceAttributesToMastraSpan(span); + return span; + }); + if (this.spanFilter) { + filteredSpans = filteredSpans.filter(this.spanFilter); + } + super.export(filteredSpans, resultCallback); + } +} diff --git a/js/packages/openinference-mastra/src/attributes.ts b/js/packages/openinference-mastra/src/attributes.ts new file mode 100644 index 000000000..0f2623aa5 --- /dev/null +++ b/js/packages/openinference-mastra/src/attributes.ts @@ -0,0 +1,76 @@ +import { + OpenInferenceSpanKind, + SemanticConventions, +} from "@arizeai/openinference-semantic-conventions"; +import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; + +const MASTRA_AGENT_SPAN_NAME_PREFIXES = [ + "agent", + "mastra.getAgent", + "post /api/agents", +]; + +/** + * Add the OpenInference span kind to the given Mastra span. + * + * This function will add the OpenInference span kind to the given Mastra span. + */ +const addOpenInferenceSpanKind = ( + span: ReadableSpan, + kind: OpenInferenceSpanKind, +) => { + span.attributes[SemanticConventions.OPENINFERENCE_SPAN_KIND] = kind; +}; + +/** + * Get the OpenInference span kind for the given Mastra span. + * + * This function will return the OpenInference span kind for the given Mastra span, if it has already been set. + */ +const getOpenInferenceSpanKind = (span: ReadableSpan) => { + return span.attributes[SemanticConventions.OPENINFERENCE_SPAN_KIND] as + | OpenInferenceSpanKind + | undefined; +}; + +/** + * Get the closest OpenInference span kind for the given Mastra span. + * + * This function will attempt to detect the closest OpenInference span kind for the given Mastra span, + * based on the span's name and parent span ID. + */ +const getOpenInferenceSpanKindFromMastraSpan = ( + span: ReadableSpan, +): OpenInferenceSpanKind | null => { + const oiKind = getOpenInferenceSpanKind(span); + if (oiKind) { + return oiKind; + } + const spanName = span.name.toLowerCase(); + if ( + MASTRA_AGENT_SPAN_NAME_PREFIXES.some((prefix) => + spanName.startsWith(prefix), + ) + ) { + return OpenInferenceSpanKind.AGENT; + } + return null; +}; + +/** + * Enrich a Mastra span with OpenInference attributes. + * + * This function will add additional attributes to the span, based on the Mastra span's attributes. + * + * It will attempt to detect the closest OpenInference span kind for the given Mastra span, and then + * enrich the span with the appropriate attributes based on the span kind and current attributes. + * + * @param span - The Mastra span to enrich. + */ +export const addOpenInferenceAttributesToMastraSpan = (span: ReadableSpan) => { + const kind = getOpenInferenceSpanKindFromMastraSpan(span); + if (kind) { + addOpenInferenceSpanKind(span, kind); + } + // TODO: Further enrich the span with additional attributes based on the span kind +}; diff --git a/js/packages/openinference-mastra/src/debug.ts b/js/packages/openinference-mastra/src/debug.ts new file mode 100644 index 000000000..45e7a99a3 --- /dev/null +++ b/js/packages/openinference-mastra/src/debug.ts @@ -0,0 +1,55 @@ +import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; + +let debugSpans: Pick< + ReadableSpan, + | "name" + | "attributes" + | "parentSpanContext" + | "kind" + | "status" + | "resource" + | "startTime" + | "endTime" +>[] = []; + +/** + * Strictly for debugging purposes and not exposed as a package level export. + * + * You can place this in an exporter export function to capture mastra spans for debugging. + * + * Accumulate items across invocations until the item has no parentId, then dump items to json file + * $HOME/debug-mastra-instrumentation/spans-{new Date().toISOString()}.json + */ +export const debug = async (spans: ReadableSpan[]) => { + // only import fs if we need it + // this allows the module to be used in environments that don't have fs + const fs = await import("node:fs"); + debugSpans.push( + // @ts-expect-error -just grabbing incomplete fields for testing + ...spans + .map((span) => ({ + name: span.name, + attributes: span.attributes, + parentSpanId: span.parentSpanContext?.spanId, + kind: span.kind, + status: span.status, + resource: {}, + startTime: span.startTime, + endTime: span.endTime, + })) + .filter((span) => + ["post", "agent", "ai"].some((prefix) => + span.name.toLocaleLowerCase().startsWith(prefix), + ), + ), + ); + const root = spans.find((span) => span.parentSpanContext?.spanId == null); + if (root) { + fs.mkdirSync("debug-mastra-instrumentation", { recursive: true }); + fs.writeFileSync( + `debug-mastra-instrumentation/${encodeURIComponent(root.name)}-${new Date().toISOString()}.json`, + JSON.stringify(debugSpans, null, 2), + ); + debugSpans = []; + } +}; diff --git a/js/packages/openinference-mastra/src/index.ts b/js/packages/openinference-mastra/src/index.ts new file mode 100644 index 000000000..d38091d1f --- /dev/null +++ b/js/packages/openinference-mastra/src/index.ts @@ -0,0 +1,2 @@ +export * from "./OpenInferenceTraceExporter.js"; +export { isOpenInferenceSpan } from "./utils.js"; diff --git a/js/packages/openinference-mastra/src/types.ts b/js/packages/openinference-mastra/src/types.ts new file mode 100644 index 000000000..314a76873 --- /dev/null +++ b/js/packages/openinference-mastra/src/types.ts @@ -0,0 +1,7 @@ +export type Mutable = { + -readonly [P in keyof T]: T[P]; +}; + +export type DeeplyMutable = { + -readonly [P in keyof T]: T[P] extends object ? DeeplyMutable : T[P]; +}; diff --git a/js/packages/openinference-mastra/src/utils.ts b/js/packages/openinference-mastra/src/utils.ts new file mode 100644 index 000000000..098901541 --- /dev/null +++ b/js/packages/openinference-mastra/src/utils.ts @@ -0,0 +1,34 @@ +import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; +import { + SemanticConventions, + SEMRESATTRS_PROJECT_NAME, +} from "@arizeai/openinference-semantic-conventions"; +import { ATTR_SERVICE_NAME } from "@opentelemetry/semantic-conventions"; + +/** + * Augments a span with OpenInference project resource attribute. + * + * This function will add additional attributes to the span, based on the span's resource attributes. + * + * @param span - The span to augment. + */ +export const addOpenInferenceProjectResourceAttributeSpan = ( + span: ReadableSpan, +) => { + const attributes = span.resource.attributes; + if (ATTR_SERVICE_NAME in attributes) { + attributes[SEMRESATTRS_PROJECT_NAME] = attributes[ATTR_SERVICE_NAME]; + } +}; + +/** + * Determines whether a span is an OpenInference span. + * + * @param span - The span to check. + * @returns `true` if the span is an OpenInference span, `false` otherwise. + */ +export const isOpenInferenceSpan = (span: ReadableSpan) => { + const maybeOpenInferenceSpanKind = + span.attributes[SemanticConventions.OPENINFERENCE_SPAN_KIND]; + return typeof maybeOpenInferenceSpanKind === "string"; +}; diff --git a/js/packages/openinference-mastra/test/OpenInferenceTraceExporter.test.ts b/js/packages/openinference-mastra/test/OpenInferenceTraceExporter.test.ts new file mode 100644 index 000000000..e72a32a7a --- /dev/null +++ b/js/packages/openinference-mastra/test/OpenInferenceTraceExporter.test.ts @@ -0,0 +1,112 @@ +import type { ReadableSpan } from "@opentelemetry/sdk-trace-base"; +import { OpenInferenceOTLPTraceExporter } from "../src/OpenInferenceTraceExporter.js"; +import weatherAgentSpans from "./__fixtures__/weatherAgentSpans.json"; +import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto"; + +vi.mock(import("@opentelemetry/exporter-trace-otlp-proto"), () => { + const mockedClass = vi.fn(); + mockedClass.prototype.export = vi.fn(); + return { + OTLPTraceExporter: mockedClass, + }; +}); + +describe("OpenInferenceTraceExporter", () => { + afterEach(() => { + vi.resetAllMocks(); + }); + + it("should initialize without throwing an error", () => { + new OpenInferenceOTLPTraceExporter({ + url: "http://example.com/v1/traces", + headers: { + Authorization: "Bearer test-api-key", + }, + }); + }); + + // Quickly capture a known working state of the instrumentation to ensure + // we don't regress. + // TODO: Replace with a more fine-grained test that is easier to update over + // time with the changes in the instrumentation. + it("(snapshot) should export spans with openinference properties", async () => { + const exporter = new OpenInferenceOTLPTraceExporter({ + url: "http://example.com/v1/traces", + headers: { + Authorization: "Bearer test-api-key", + }, + }); + exporter.export(weatherAgentSpans as unknown as ReadableSpan[], () => {}); + await expect( + // @ts-expect-error - mock.calls is provided by vitest + OTLPTraceExporter.prototype.export.mock.calls, + ).toMatchFileSnapshot( + `./__snapshots__/OpenInferenceTraceExporter.test.ts.export.json`, + ); + }); + + it("should filter spans based on the spanFilter function", async () => { + const exporter = new OpenInferenceOTLPTraceExporter({ + url: "http://example.com/v1/traces", + headers: { + Authorization: "Bearer test-api-key", + }, + spanFilter: (span) => + span.name === "POST /api/agents/weatherAgent/stream", + }); + exporter.export(weatherAgentSpans as unknown as ReadableSpan[], () => {}); + expect( + // @ts-expect-error - mock.calls is provided by vitest + OTLPTraceExporter.prototype.export.mock.calls, + ).toMatchInlineSnapshot(` + [ + [ + [ + { + "attributes": { + "http.flavor": "1.1", + "http.host": "localhost:4111", + "http.method": "POST", + "http.request_content_length_uncompressed": 251, + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "http.scheme": "http", + "http.status_code": 200, + "http.status_text": "OK", + "http.target": "/api/agents/weatherAgent/stream", + "http.url": "http://localhost:4111/api/agents/weatherAgent/stream", + "http.user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36", + "net.host.ip": "::1", + "net.host.name": "localhost", + "net.host.port": 4111, + "net.peer.ip": "::1", + "net.peer.port": 51258, + "net.transport": "ip_tcp", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754797, + 193654459, + ], + "kind": 1, + "name": "POST /api/agents/weatherAgent/stream", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 713000000, + ], + "status": { + "code": 0, + }, + }, + ], + [Function], + ], + ] + `); + }); +}); diff --git a/js/packages/openinference-mastra/test/__fixtures__/weatherAgentSpans.json b/js/packages/openinference-mastra/test/__fixtures__/weatherAgentSpans.json new file mode 100644 index 000000000..2f2794b80 --- /dev/null +++ b/js/packages/openinference-mastra/test/__fixtures__/weatherAgentSpans.json @@ -0,0 +1,444 @@ +[ + { + "name": "agent.sanitizeResponseMessages", + "attributes": { + "agent.sanitizeResponseMessages.argument.0": "[]", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.sanitizeResponseMessages.result": "[]" + }, + "parentSpanId": "4b67da60e06e2e95", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 723000000], + "endTime": [1747754793, 723094166] + }, + { + "name": "agent.fetchMemory", + "attributes": { + "agent.fetchMemory.argument.0": "{\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"thread\":{\"id\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"title\":\"New Thread 2025-05-20T15:26:33.719Z\",\"resourceId\":\"weatherAgent\",\"createdAt\":\"2025-05-20T15:26:33.719Z\",\"updatedAt\":\"2025-05-20T15:26:33.719Z\"},\"resourceId\":\"weatherAgent\",\"userMessages\":[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}],\"systemMessage\":{\"role\":\"system\",\"content\":\"\\n You are a helpful weather assistant that provides accurate weather information.\\n\\n Your primary function is to help users get weather details for specific locations. When responding:\\n - Always ask for a location if none is provided\\n - If the location name isn’t in English, please translate it\\n - If giving a location with multiple parts (e.g. \\\"New York, NY\\\"), use the most relevant part (e.g. \\\"New York\\\")\\n - Include relevant details like humidity, wind conditions, and precipitation\\n - Keep responses concise but informative\\n\\n Use the weatherTool to fetch current weather data.\\n\"}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.fetchMemory.result": "{\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"messages\":[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}]}" + }, + "parentSpanId": "9afc1ee8e1e9d56d", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 721000000], + "endTime": [1747754793, 722524208] + }, + { + "name": "agent.preExecute", + "attributes": { + "agent.preExecute.argument.0": "{\"resourceId\":\"weatherAgent\",\"runId\":\"weatherAgent\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"thread\":{\"id\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"title\":\"New Thread 2025-05-20T15:26:33.719Z\",\"resourceId\":\"weatherAgent\",\"createdAt\":\"2025-05-20T15:26:33.719Z\",\"updatedAt\":\"2025-05-20T15:26:33.719Z\"},\"messages\":[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}],\"systemMessage\":{\"role\":\"system\",\"content\":\"\\n You are a helpful weather assistant that provides accurate weather information.\\n\\n Your primary function is to help users get weather details for specific locations. When responding:\\n - Always ask for a location if none is provided\\n - If the location name isn’t in English, please translate it\\n - If giving a location with multiple parts (e.g. \\\"New York, NY\\\"), use the most relevant part (e.g. \\\"New York\\\")\\n - Include relevant details like humidity, wind conditions, and precipitation\\n - Keep responses concise but informative\\n\\n Use the weatherTool to fetch current weather data.\\n\"}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.preExecute.result": "{\"coreMessages\":[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}],\"threadIdToUse\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\"}" + }, + "parentSpanId": "236712a8cd85d5c5", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 721000000], + "endTime": [1747754793, 722734583] + }, + { + "name": "agent.getAssignedTools", + "attributes": { + "agent.getAssignedTools.argument.0": "{\"runId\":\"weatherAgent\",\"resourceId\":\"weatherAgent\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"runtimeContext\":{\"registry\":{}},\"mastraProxy\":{}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getAssignedTools.result": "{\"weatherTool\":{\"type\":\"function\",\"description\":\"Get current weather for a location\",\"parameters\":{\"jsonSchema\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\",\"description\":\"City name\"}},\"required\":[\"location\"],\"additionalProperties\":false,\"$schema\":\"http://json-schema.org/draft-07/schema#\"}}}}" + }, + "parentSpanId": "5f16b350a179b53e", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 723000000], + "endTime": [1747754793, 725576292] + }, + { + "name": "agent.getMemoryTools", + "attributes": { + "agent.getMemoryTools.argument.0": "{\"runId\":\"weatherAgent\",\"resourceId\":\"weatherAgent\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"runtimeContext\":{\"registry\":{}},\"mastraProxy\":{}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getMemoryTools.result": "{}" + }, + "parentSpanId": "5f16b350a179b53e", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 726000000], + "endTime": [1747754793, 726049125] + }, + { + "name": "agent.getToolsets", + "attributes": { + "agent.getToolsets.argument.0": "{\"runId\":\"weatherAgent\",\"resourceId\":\"weatherAgent\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"runtimeContext\":{\"registry\":{}},\"mastraProxy\":{}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getToolsets.result": "{}" + }, + "parentSpanId": "5f16b350a179b53e", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 726000000], + "endTime": [1747754793, 726033875] + }, + { + "name": "agent.getClientTools", + "attributes": { + "agent.getClientTools.argument.0": "{\"runId\":\"weatherAgent\",\"resourceId\":\"weatherAgent\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"runtimeContext\":{\"registry\":{}},\"mastraProxy\":{}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getClientTools.result": "{}" + }, + "parentSpanId": "5f16b350a179b53e", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 726000000], + "endTime": [1747754793, 726017833] + }, + { + "name": "agent.getWorkflowTools", + "attributes": { + "agent.getWorkflowTools.argument.0": "{\"runId\":\"weatherAgent\",\"resourceId\":\"weatherAgent\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"runtimeContext\":{\"registry\":{}}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getWorkflowTools.result": "{}" + }, + "parentSpanId": "5f16b350a179b53e", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 726000000], + "endTime": [1747754793, 726022375] + }, + { + "name": "agent.convertTools", + "attributes": { + "agent.convertTools.argument.0": "{\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"resourceId\":\"weatherAgent\",\"runId\":\"weatherAgent\",\"runtimeContext\":{\"registry\":{}}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.convertTools.result": "{\"weatherTool\":{\"type\":\"function\",\"description\":\"Get current weather for a location\",\"parameters\":{\"jsonSchema\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\",\"description\":\"City name\"}},\"required\":[\"location\"],\"additionalProperties\":false,\"$schema\":\"http://json-schema.org/draft-07/schema#\"}}}}" + }, + "parentSpanId": "236712a8cd85d5c5", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 723000000], + "endTime": [1747754793, 725862125] + }, + { + "name": "agent.stream", + "attributes": { + "agent.stream.argument.0": "[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}]", + "agent.stream.argument.1": "{\"runId\":\"weatherAgent\",\"maxRetries\":2,\"maxSteps\":5,\"temperature\":0.5,\"topP\":1,\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"resourceId\":\"weatherAgent\",\"runtimeContext\":{\"registry\":{}}}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.stream.result": "{\"warningsPromise\":{\"status\":{\"type\":\"pending\"}},\"usagePromise\":{\"status\":{\"type\":\"pending\"}},\"finishReasonPromise\":{\"status\":{\"type\":\"pending\"}},\"providerMetadataPromise\":{\"status\":{\"type\":\"pending\"}},\"textPromise\":{\"status\":{\"type\":\"pending\"}},\"reasoningPromise\":{\"status\":{\"type\":\"pending\"}},\"reasoningDetailsPromise\":{\"status\":{\"type\":\"pending\"}},\"sourcesPromise\":{\"status\":{\"type\":\"pending\"}},\"filesPromise\":{\"status\":{\"type\":\"pending\"}},\"toolCallsPromise\":{\"status\":{\"type\":\"pending\"}},\"toolResultsPromise\":{\"status\":{\"type\":\"pending\"}},\"requestPromise\":{\"status\":{\"type\":\"pending\"}},\"responsePromise\":{\"status\":{\"type\":\"pending\"}},\"stepsPromise\":{\"status\":{\"type\":\"pending\"}},\"baseStream\":{}}" + }, + "parentSpanId": "39266a50f6d02d91", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 717000000], + "endTime": [1747754793, 729243708] + }, + { + "name": "POST", + "attributes": { + "http.request.method": "POST", + "http.request.method_original": "POST" + }, + "parentSpanId": "b252df254059695b", + "kind": 2, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 727000000], + "endTime": [1747754794, 874990917] + }, + { + "name": "ai.toolCall", + "attributes": { + "operation.name": "ai.toolCall", + "ai.operationId": "ai.toolCall", + "ai.toolCall.name": "weatherTool", + "ai.toolCall.id": "call_jNucVMVoqwhzfOZ1wfx92lXo", + "ai.toolCall.args": "{\"location\":\"Ann Arbor\"}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "ai.toolCall.result": "{\"temperature\":12.2,\"feelsLike\":8.8,\"humidity\":61,\"windSpeed\":21.6,\"windGust\":35.3,\"conditions\":\"Overcast\",\"location\":\"Ann Arbor\"}" + }, + "parentSpanId": "e754528d3b71ea75", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754794, 873000000], + "endTime": [1747754795, 788932042] + }, + { + "name": "ai.streamText.doStream", + "attributes": { + "operation.name": "ai.streamText.doStream", + "ai.operationId": "ai.streamText.doStream", + "ai.model.provider": "openai.chat", + "ai.model.id": "gpt-4o", + "ai.settings.temperature": 0.5, + "ai.settings.topP": 1, + "ai.settings.maxRetries": 2, + "ai.prompt.format": "messages", + "ai.prompt.messages": "[{\"role\":\"system\",\"content\":\"\\n You are a helpful weather assistant that provides accurate weather information.\\n\\n Your primary function is to help users get weather details for specific locations. When responding:\\n - Always ask for a location if none is provided\\n - If the location name isn’t in English, please translate it\\n - If giving a location with multiple parts (e.g. \\\"New York, NY\\\"), use the most relevant part (e.g. \\\"New York\\\")\\n - Include relevant details like humidity, wind conditions, and precipitation\\n - Keep responses concise but informative\\n\\n Use the weatherTool to fetch current weather data.\\n\"},{\"role\":\"user\",\"content\":[{\"type\":\"text\",\"text\":\"what is the weather in ann arbor\"}]}]", + "ai.prompt.tools": [ + "{\"type\":\"function\",\"name\":\"weatherTool\",\"description\":\"Get current weather for a location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\",\"description\":\"City name\"}},\"required\":[\"location\"],\"additionalProperties\":false,\"$schema\":\"http://json-schema.org/draft-07/schema#\"}}" + ], + "ai.prompt.toolChoice": "{\"type\":\"auto\"}", + "gen_ai.system": "openai.chat", + "gen_ai.request.model": "gpt-4o", + "gen_ai.request.temperature": 0.5, + "gen_ai.request.top_p": 1, + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "ai.response.msToFirstChunk": 1084.9757499999832, + "ai.response.msToFinish": 2062.2397920000076, + "ai.response.avgCompletionTokensPerSecond": 7.273644926351001, + "ai.response.finishReason": "tool-calls", + "ai.response.text": "", + "ai.response.toolCalls": "[{\"type\":\"tool-call\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"args\":{\"location\":\"Ann Arbor\"}}]", + "ai.response.id": "chatcmpl-BZJ9Oz5K5ewqBH2PvYVKyj5qZE8AS", + "ai.response.model": "gpt-4o-2024-08-06", + "ai.response.timestamp": "2025-05-20T15:26:34.000Z", + "ai.usage.promptTokens": 179, + "ai.usage.completionTokens": 15, + "gen_ai.response.finish_reasons": ["tool-calls"], + "gen_ai.response.id": "chatcmpl-BZJ9Oz5K5ewqBH2PvYVKyj5qZE8AS", + "gen_ai.response.model": "gpt-4o-2024-08-06", + "gen_ai.usage.input_tokens": 179, + "gen_ai.usage.output_tokens": 15 + }, + "parentSpanId": "e754528d3b71ea75", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 727000000], + "endTime": [1747754795, 789569958] + }, + { + "name": "POST", + "attributes": { + "http.request.method": "POST", + "http.request.method_original": "POST" + }, + "parentSpanId": "510ea0778c66256d", + "kind": 2, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754795, 792000000], + "endTime": [1747754797, 187530666] + }, + { + "name": "ai.streamText.doStream", + "attributes": { + "operation.name": "ai.streamText.doStream", + "ai.operationId": "ai.streamText.doStream", + "ai.model.provider": "openai.chat", + "ai.model.id": "gpt-4o", + "ai.settings.temperature": 0.5, + "ai.settings.topP": 1, + "ai.settings.maxRetries": 2, + "ai.prompt.format": "messages", + "ai.prompt.messages": "[{\"role\":\"system\",\"content\":\"\\n You are a helpful weather assistant that provides accurate weather information.\\n\\n Your primary function is to help users get weather details for specific locations. When responding:\\n - Always ask for a location if none is provided\\n - If the location name isn’t in English, please translate it\\n - If giving a location with multiple parts (e.g. \\\"New York, NY\\\"), use the most relevant part (e.g. \\\"New York\\\")\\n - Include relevant details like humidity, wind conditions, and precipitation\\n - Keep responses concise but informative\\n\\n Use the weatherTool to fetch current weather data.\\n\"},{\"role\":\"user\",\"content\":[{\"type\":\"text\",\"text\":\"what is the weather in ann arbor\"}]},{\"role\":\"assistant\",\"content\":[{\"type\":\"tool-call\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"args\":{\"location\":\"Ann Arbor\"}}]},{\"role\":\"tool\",\"content\":[{\"type\":\"tool-result\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"result\":{\"temperature\":12.2,\"feelsLike\":8.8,\"humidity\":61,\"windSpeed\":21.6,\"windGust\":35.3,\"conditions\":\"Overcast\",\"location\":\"Ann Arbor\"}}]}]", + "ai.prompt.tools": [ + "{\"type\":\"function\",\"name\":\"weatherTool\",\"description\":\"Get current weather for a location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\",\"description\":\"City name\"}},\"required\":[\"location\"],\"additionalProperties\":false,\"$schema\":\"http://json-schema.org/draft-07/schema#\"}}" + ], + "ai.prompt.toolChoice": "{\"type\":\"auto\"}", + "gen_ai.system": "openai.chat", + "gen_ai.request.model": "gpt-4o", + "gen_ai.request.temperature": 0.5, + "gen_ai.request.top_p": 1, + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "ai.response.msToFirstChunk": 756.9352089999884, + "ai.response.msToFinish": 1397.3403749999998, + "ai.response.avgCompletionTokensPerSecond": 45.801295908307246, + "ai.response.finishReason": "stop", + "ai.response.text": "The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.", + "ai.response.id": "chatcmpl-BZJ9Qu6EW2xU8oEQAZ7JC2vlUNxFb", + "ai.response.model": "gpt-4o-2024-08-06", + "ai.response.timestamp": "2025-05-20T15:26:36.000Z", + "ai.usage.promptTokens": 246, + "ai.usage.completionTokens": 64, + "gen_ai.response.finish_reasons": ["stop"], + "gen_ai.response.id": "chatcmpl-BZJ9Qu6EW2xU8oEQAZ7JC2vlUNxFb", + "gen_ai.response.model": "gpt-4o-2024-08-06", + "gen_ai.usage.input_tokens": 246, + "gen_ai.usage.output_tokens": 64 + }, + "parentSpanId": "e754528d3b71ea75", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754795, 790000000], + "endTime": [1747754797, 187845583] + }, + { + "name": "agent.getMostRecentUserMessage", + "attributes": { + "agent.getMostRecentUserMessage.argument.0": "[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}]", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getMostRecentUserMessage.result": "{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}" + }, + "parentSpanId": "236712a8cd85d5c5", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754797, 189000000], + "endTime": [1747754797, 189135208] + }, + { + "name": "agent.sanitizeResponseMessages", + "attributes": { + "agent.sanitizeResponseMessages.argument.0": "[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}]", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.sanitizeResponseMessages.result": "[{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}]" + }, + "parentSpanId": "236712a8cd85d5c5", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754797, 189000000], + "endTime": [1747754797, 189173375] + }, + { + "name": "agent.sanitizeResponseMessages", + "attributes": { + "agent.sanitizeResponseMessages.argument.0": "[{\"role\":\"assistant\",\"content\":[{\"type\":\"tool-call\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"args\":{\"location\":\"Ann Arbor\"}}],\"id\":\"msg-NR1VI5rKK1BH6BWwFUTyA2UN\"},{\"role\":\"tool\",\"id\":\"msg-AaK5V3CygaWNC9Qcu8MEfW0Q\",\"content\":[{\"type\":\"tool-result\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"result\":{\"temperature\":12.2,\"feelsLike\":8.8,\"humidity\":61,\"windSpeed\":21.6,\"windGust\":35.3,\"conditions\":\"Overcast\",\"location\":\"Ann Arbor\"}}]},{\"role\":\"assistant\",\"content\":[{\"type\":\"text\",\"text\":\"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.\"}],\"id\":\"msg-5mfT3Rz7bKnssiEcn235nQta\"}]", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.sanitizeResponseMessages.result": "[{\"role\":\"assistant\",\"content\":[{\"type\":\"tool-call\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"args\":{\"location\":\"Ann Arbor\"}}],\"id\":\"msg-NR1VI5rKK1BH6BWwFUTyA2UN\"},{\"role\":\"tool\",\"id\":\"msg-AaK5V3CygaWNC9Qcu8MEfW0Q\",\"content\":[{\"type\":\"tool-result\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"result\":{\"temperature\":12.2,\"feelsLike\":8.8,\"humidity\":61,\"windSpeed\":21.6,\"windGust\":35.3,\"conditions\":\"Overcast\",\"location\":\"Ann Arbor\"}}]},{\"role\":\"assistant\",\"content\":[{\"type\":\"text\",\"text\":\"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.\"}],\"id\":\"msg-5mfT3Rz7bKnssiEcn235nQta\"}]" + }, + "parentSpanId": "654be41a9d75936f", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754797, 190000000], + "endTime": [1747754797, 190055541] + }, + { + "name": "agent.getResponseMessages", + "attributes": { + "agent.getResponseMessages.argument.0": "{\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"resourceId\":\"weatherAgent\",\"messages\":[{\"role\":\"assistant\",\"content\":[{\"type\":\"tool-call\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"args\":{\"location\":\"Ann Arbor\"}}],\"id\":\"msg-NR1VI5rKK1BH6BWwFUTyA2UN\"},{\"role\":\"tool\",\"id\":\"msg-AaK5V3CygaWNC9Qcu8MEfW0Q\",\"content\":[{\"type\":\"tool-result\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"result\":{\"temperature\":12.2,\"feelsLike\":8.8,\"humidity\":61,\"windSpeed\":21.6,\"windGust\":35.3,\"conditions\":\"Overcast\",\"location\":\"Ann Arbor\"}}]},{\"role\":\"assistant\",\"content\":[{\"type\":\"text\",\"text\":\"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.\"}],\"id\":\"msg-5mfT3Rz7bKnssiEcn235nQta\"}],\"now\":1747754797190}", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "componentName": "Weather Agent", + "agent.getResponseMessages.result": "[{\"id\":\"msg-NR1VI5rKK1BH6BWwFUTyA2UN\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"resourceId\":\"weatherAgent\",\"role\":\"assistant\",\"content\":[{\"type\":\"tool-call\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"args\":{\"location\":\"Ann Arbor\"}}],\"createdAt\":\"2025-05-20T15:26:37.190Z\",\"toolCallIds\":[\"call_jNucVMVoqwhzfOZ1wfx92lXo\"],\"toolCallArgs\":[{\"location\":\"Ann Arbor\"}],\"toolNames\":[\"weatherTool\"],\"type\":\"tool-call\"},{\"id\":\"msg-AaK5V3CygaWNC9Qcu8MEfW0Q\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"resourceId\":\"weatherAgent\",\"role\":\"tool\",\"content\":[{\"type\":\"tool-result\",\"toolCallId\":\"call_jNucVMVoqwhzfOZ1wfx92lXo\",\"toolName\":\"weatherTool\",\"result\":{\"temperature\":12.2,\"feelsLike\":8.8,\"humidity\":61,\"windSpeed\":21.6,\"windGust\":35.3,\"conditions\":\"Overcast\",\"location\":\"Ann Arbor\"}}],\"createdAt\":\"2025-05-20T15:26:37.191Z\",\"toolCallIds\":[\"call_jNucVMVoqwhzfOZ1wfx92lXo\"],\"type\":\"tool-result\"},{\"id\":\"msg-5mfT3Rz7bKnssiEcn235nQta\",\"threadId\":\"93dcfcda-a269-4fb3-99be-8e03e432db24\",\"resourceId\":\"weatherAgent\",\"role\":\"assistant\",\"content\":[{\"type\":\"text\",\"text\":\"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.\"}],\"createdAt\":\"2025-05-20T15:26:37.192Z\",\"type\":\"text\"}]" + }, + "parentSpanId": "236712a8cd85d5c5", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754797, 190000000], + "endTime": [1747754797, 190200291] + }, + { + "name": "ai.streamText", + "attributes": { + "operation.name": "ai.streamText", + "ai.operationId": "ai.streamText", + "ai.model.provider": "openai.chat", + "ai.model.id": "gpt-4o", + "ai.settings.temperature": 0.5, + "ai.settings.topP": 1, + "ai.settings.maxRetries": 2, + "ai.prompt": "{\"messages\":[{\"role\":\"system\",\"content\":\"\\n You are a helpful weather assistant that provides accurate weather information.\\n\\n Your primary function is to help users get weather details for specific locations. When responding:\\n - Always ask for a location if none is provided\\n - If the location name isn’t in English, please translate it\\n - If giving a location with multiple parts (e.g. \\\"New York, NY\\\"), use the most relevant part (e.g. \\\"New York\\\")\\n - Include relevant details like humidity, wind conditions, and precipitation\\n - Keep responses concise but informative\\n\\n Use the weatherTool to fetch current weather data.\\n\"},{\"role\":\"user\",\"content\":\"what is the weather in ann arbor\"}]}", + "ai.settings.maxSteps": 5, + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "ai.response.finishReason": "stop", + "ai.response.text": "The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.", + "ai.usage.promptTokens": 425, + "ai.usage.completionTokens": 79 + }, + "parentSpanId": "236712a8cd85d5c5", + "kind": 0, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 727000000], + "endTime": [1747754797, 190427917] + }, + { + "name": "POST /api/agents/weatherAgent/stream", + "attributes": { + "http.url": "http://localhost:4111/api/agents/weatherAgent/stream", + "http.host": "localhost:4111", + "net.host.name": "localhost", + "http.method": "POST", + "http.scheme": "http", + "http.target": "/api/agents/weatherAgent/stream", + "http.user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36", + "http.request_content_length_uncompressed": 251, + "http.flavor": "1.1", + "net.transport": "ip_tcp", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "net.host.ip": "::1", + "net.host.port": 4111, + "net.peer.ip": "::1", + "net.peer.port": 51258, + "http.status_code": 200, + "http.status_text": "OK" + }, + "kind": 1, + "status": { + "code": 0 + }, + "resource": { "attributes": { "service.name": "mock" } }, + "startTime": [1747754793, 713000000], + "endTime": [1747754797, 193654459] + } +] diff --git a/js/packages/openinference-mastra/test/__snapshots__/OpenInferenceTraceExporter.test.ts.export.json b/js/packages/openinference-mastra/test/__snapshots__/OpenInferenceTraceExporter.test.ts.export.json new file mode 100644 index 000000000..66f39d1c0 --- /dev/null +++ b/js/packages/openinference-mastra/test/__snapshots__/OpenInferenceTraceExporter.test.ts.export.json @@ -0,0 +1,801 @@ +[ + [ + [ + { + "attributes": { + "agent.sanitizeResponseMessages.argument.0": "[]", + "agent.sanitizeResponseMessages.result": "[]", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 723094166, + ], + "kind": 0, + "name": "agent.sanitizeResponseMessages", + "parentSpanId": "4b67da60e06e2e95", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 723000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.fetchMemory.argument.0": "{"threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","thread":{"id":"93dcfcda-a269-4fb3-99be-8e03e432db24","title":"New Thread 2025-05-20T15:26:33.719Z","resourceId":"weatherAgent","createdAt":"2025-05-20T15:26:33.719Z","updatedAt":"2025-05-20T15:26:33.719Z"},"resourceId":"weatherAgent","userMessages":[{"role":"user","content":"what is the weather in ann arbor"}],"systemMessage":{"role":"system","content":"\n You are a helpful weather assistant that provides accurate weather information.\n\n Your primary function is to help users get weather details for specific locations. When responding:\n - Always ask for a location if none is provided\n - If the location name isn’t in English, please translate it\n - If giving a location with multiple parts (e.g. \"New York, NY\"), use the most relevant part (e.g. \"New York\")\n - Include relevant details like humidity, wind conditions, and precipitation\n - Keep responses concise but informative\n\n Use the weatherTool to fetch current weather data.\n"}}", + "agent.fetchMemory.result": "{"threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","messages":[{"role":"user","content":"what is the weather in ann arbor"}]}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 722524208, + ], + "kind": 0, + "name": "agent.fetchMemory", + "parentSpanId": "9afc1ee8e1e9d56d", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 721000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.preExecute.argument.0": "{"resourceId":"weatherAgent","runId":"weatherAgent","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","thread":{"id":"93dcfcda-a269-4fb3-99be-8e03e432db24","title":"New Thread 2025-05-20T15:26:33.719Z","resourceId":"weatherAgent","createdAt":"2025-05-20T15:26:33.719Z","updatedAt":"2025-05-20T15:26:33.719Z"},"messages":[{"role":"user","content":"what is the weather in ann arbor"}],"systemMessage":{"role":"system","content":"\n You are a helpful weather assistant that provides accurate weather information.\n\n Your primary function is to help users get weather details for specific locations. When responding:\n - Always ask for a location if none is provided\n - If the location name isn’t in English, please translate it\n - If giving a location with multiple parts (e.g. \"New York, NY\"), use the most relevant part (e.g. \"New York\")\n - Include relevant details like humidity, wind conditions, and precipitation\n - Keep responses concise but informative\n\n Use the weatherTool to fetch current weather data.\n"}}", + "agent.preExecute.result": "{"coreMessages":[{"role":"user","content":"what is the weather in ann arbor"}],"threadIdToUse":"93dcfcda-a269-4fb3-99be-8e03e432db24"}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 722734583, + ], + "kind": 0, + "name": "agent.preExecute", + "parentSpanId": "236712a8cd85d5c5", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 721000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getAssignedTools.argument.0": "{"runId":"weatherAgent","resourceId":"weatherAgent","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","runtimeContext":{"registry":{}},"mastraProxy":{}}", + "agent.getAssignedTools.result": "{"weatherTool":{"type":"function","description":"Get current weather for a location","parameters":{"jsonSchema":{"type":"object","properties":{"location":{"type":"string","description":"City name"}},"required":["location"],"additionalProperties":false,"$schema":"http://json-schema.org/draft-07/schema#"}}}}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 725576292, + ], + "kind": 0, + "name": "agent.getAssignedTools", + "parentSpanId": "5f16b350a179b53e", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 723000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getMemoryTools.argument.0": "{"runId":"weatherAgent","resourceId":"weatherAgent","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","runtimeContext":{"registry":{}},"mastraProxy":{}}", + "agent.getMemoryTools.result": "{}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 726049125, + ], + "kind": 0, + "name": "agent.getMemoryTools", + "parentSpanId": "5f16b350a179b53e", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 726000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getToolsets.argument.0": "{"runId":"weatherAgent","resourceId":"weatherAgent","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","runtimeContext":{"registry":{}},"mastraProxy":{}}", + "agent.getToolsets.result": "{}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 726033875, + ], + "kind": 0, + "name": "agent.getToolsets", + "parentSpanId": "5f16b350a179b53e", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 726000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getClientTools.argument.0": "{"runId":"weatherAgent","resourceId":"weatherAgent","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","runtimeContext":{"registry":{}},"mastraProxy":{}}", + "agent.getClientTools.result": "{}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 726017833, + ], + "kind": 0, + "name": "agent.getClientTools", + "parentSpanId": "5f16b350a179b53e", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 726000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getWorkflowTools.argument.0": "{"runId":"weatherAgent","resourceId":"weatherAgent","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","runtimeContext":{"registry":{}}}", + "agent.getWorkflowTools.result": "{}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 726022375, + ], + "kind": 0, + "name": "agent.getWorkflowTools", + "parentSpanId": "5f16b350a179b53e", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 726000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.convertTools.argument.0": "{"threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","resourceId":"weatherAgent","runId":"weatherAgent","runtimeContext":{"registry":{}}}", + "agent.convertTools.result": "{"weatherTool":{"type":"function","description":"Get current weather for a location","parameters":{"jsonSchema":{"type":"object","properties":{"location":{"type":"string","description":"City name"}},"required":["location"],"additionalProperties":false,"$schema":"http://json-schema.org/draft-07/schema#"}}}}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 725862125, + ], + "kind": 0, + "name": "agent.convertTools", + "parentSpanId": "236712a8cd85d5c5", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 723000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.stream.argument.0": "[{"role":"user","content":"what is the weather in ann arbor"}]", + "agent.stream.argument.1": "{"runId":"weatherAgent","maxRetries":2,"maxSteps":5,"temperature":0.5,"topP":1,"threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","resourceId":"weatherAgent","runtimeContext":{"registry":{}}}", + "agent.stream.result": "{"warningsPromise":{"status":{"type":"pending"}},"usagePromise":{"status":{"type":"pending"}},"finishReasonPromise":{"status":{"type":"pending"}},"providerMetadataPromise":{"status":{"type":"pending"}},"textPromise":{"status":{"type":"pending"}},"reasoningPromise":{"status":{"type":"pending"}},"reasoningDetailsPromise":{"status":{"type":"pending"}},"sourcesPromise":{"status":{"type":"pending"}},"filesPromise":{"status":{"type":"pending"}},"toolCallsPromise":{"status":{"type":"pending"}},"toolResultsPromise":{"status":{"type":"pending"}},"requestPromise":{"status":{"type":"pending"}},"responsePromise":{"status":{"type":"pending"}},"stepsPromise":{"status":{"type":"pending"}},"baseStream":{}}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754793, + 729243708, + ], + "kind": 0, + "name": "agent.stream", + "parentSpanId": "39266a50f6d02d91", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 717000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "http.request.method": "POST", + "http.request.method_original": "POST", + "openinference.span.kind": undefined, + }, + "endTime": [ + 1747754794, + 874990917, + ], + "kind": 2, + "name": "POST", + "parentSpanId": "b252df254059695b", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 727000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "ai.operationId": "ai.toolCall", + "ai.toolCall.args": "{"location":"Ann Arbor"}", + "ai.toolCall.id": "call_jNucVMVoqwhzfOZ1wfx92lXo", + "ai.toolCall.name": "weatherTool", + "ai.toolCall.result": "{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "input.mime_type": "application/json", + "input.value": "{"location":"Ann Arbor"}", + "openinference.span.kind": "TOOL", + "operation.name": "ai.toolCall", + "output.mime_type": "application/json", + "output.value": "{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}", + "tool.name": "weatherTool", + "tool.parameters": "{"location":"Ann Arbor"}", + "tool_call.id": "call_jNucVMVoqwhzfOZ1wfx92lXo", + }, + "endTime": [ + 1747754795, + 788932042, + ], + "kind": 0, + "name": "ai.toolCall", + "parentSpanId": "e754528d3b71ea75", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754794, + 873000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "ai.model.id": "gpt-4o", + "ai.model.provider": "openai.chat", + "ai.operationId": "ai.streamText.doStream", + "ai.prompt.format": "messages", + "ai.prompt.messages": "[{"role":"system","content":"\n You are a helpful weather assistant that provides accurate weather information.\n\n Your primary function is to help users get weather details for specific locations. When responding:\n - Always ask for a location if none is provided\n - If the location name isn’t in English, please translate it\n - If giving a location with multiple parts (e.g. \"New York, NY\"), use the most relevant part (e.g. \"New York\")\n - Include relevant details like humidity, wind conditions, and precipitation\n - Keep responses concise but informative\n\n Use the weatherTool to fetch current weather data.\n"},{"role":"user","content":[{"type":"text","text":"what is the weather in ann arbor"}]}]", + "ai.prompt.toolChoice": "{"type":"auto"}", + "ai.prompt.tools": [ + "{"type":"function","name":"weatherTool","description":"Get current weather for a location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"City name"}},"required":["location"],"additionalProperties":false,"$schema":"http://json-schema.org/draft-07/schema#"}}", + ], + "ai.response.avgCompletionTokensPerSecond": 7.273644926351001, + "ai.response.finishReason": "tool-calls", + "ai.response.id": "chatcmpl-BZJ9Oz5K5ewqBH2PvYVKyj5qZE8AS", + "ai.response.model": "gpt-4o-2024-08-06", + "ai.response.msToFinish": 2062.2397920000076, + "ai.response.msToFirstChunk": 1084.9757499999832, + "ai.response.text": "", + "ai.response.timestamp": "2025-05-20T15:26:34.000Z", + "ai.response.toolCalls": "[{"type":"tool-call","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","args":{"location":"Ann Arbor"}}]", + "ai.settings.maxRetries": 2, + "ai.settings.temperature": 0.5, + "ai.settings.topP": 1, + "ai.usage.completionTokens": 15, + "ai.usage.promptTokens": 179, + "componentName": "Weather Agent", + "gen_ai.request.model": "gpt-4o", + "gen_ai.request.temperature": 0.5, + "gen_ai.request.top_p": 1, + "gen_ai.response.finish_reasons": [ + "tool-calls", + ], + "gen_ai.response.id": "chatcmpl-BZJ9Oz5K5ewqBH2PvYVKyj5qZE8AS", + "gen_ai.response.model": "gpt-4o-2024-08-06", + "gen_ai.system": "openai.chat", + "gen_ai.usage.input_tokens": 179, + "gen_ai.usage.output_tokens": 15, + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "llm.input_messages.0.message.content": " + You are a helpful weather assistant that provides accurate weather information. + + Your primary function is to help users get weather details for specific locations. When responding: + - Always ask for a location if none is provided + - If the location name isn’t in English, please translate it + - If giving a location with multiple parts (e.g. "New York, NY"), use the most relevant part (e.g. "New York") + - Include relevant details like humidity, wind conditions, and precipitation + - Keep responses concise but informative + + Use the weatherTool to fetch current weather data. +", + "llm.input_messages.0.message.role": "system", + "llm.input_messages.1.message.contents.0.message_content.image": undefined, + "llm.input_messages.1.message.contents.0.message_content.text": "what is the weather in ann arbor", + "llm.input_messages.1.message.contents.0.message_content.type": "text", + "llm.input_messages.1.message.role": "user", + "llm.input_messages.1.message.tool_calls.0.tool_call.function.arguments": undefined, + "llm.input_messages.1.message.tool_calls.0.tool_call.function.name": undefined, + "llm.input_messages.1.message.tool_calls.0.tool_call.id": undefined, + "llm.invocation_parameters": "{"temperature":0.5,"topP":1,"maxRetries":2}", + "llm.model_name": "gpt-4o", + "llm.output_messages.0.message.role": "assistant", + "llm.output_messages.0.message.tool_calls.0.tool_call.function.arguments": "{"location":"Ann Arbor"}", + "llm.output_messages.0.message.tool_calls.0.tool_call.function.name": "weatherTool", + "llm.token_count.completion": 15, + "llm.token_count.prompt": 179, + "openinference.span.kind": "LLM", + "operation.name": "ai.streamText.doStream", + "output.mime_type": "text/plain", + "output.value": "", + }, + "endTime": [ + 1747754795, + 789569958, + ], + "kind": 0, + "name": "ai.streamText.doStream", + "parentSpanId": "e754528d3b71ea75", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 727000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "http.request.method": "POST", + "http.request.method_original": "POST", + "openinference.span.kind": undefined, + }, + "endTime": [ + 1747754797, + 187530666, + ], + "kind": 2, + "name": "POST", + "parentSpanId": "510ea0778c66256d", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754795, + 792000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "ai.model.id": "gpt-4o", + "ai.model.provider": "openai.chat", + "ai.operationId": "ai.streamText.doStream", + "ai.prompt.format": "messages", + "ai.prompt.messages": "[{"role":"system","content":"\n You are a helpful weather assistant that provides accurate weather information.\n\n Your primary function is to help users get weather details for specific locations. When responding:\n - Always ask for a location if none is provided\n - If the location name isn’t in English, please translate it\n - If giving a location with multiple parts (e.g. \"New York, NY\"), use the most relevant part (e.g. \"New York\")\n - Include relevant details like humidity, wind conditions, and precipitation\n - Keep responses concise but informative\n\n Use the weatherTool to fetch current weather data.\n"},{"role":"user","content":[{"type":"text","text":"what is the weather in ann arbor"}]},{"role":"assistant","content":[{"type":"tool-call","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","args":{"location":"Ann Arbor"}}]},{"role":"tool","content":[{"type":"tool-result","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","result":{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}}]}]", + "ai.prompt.toolChoice": "{"type":"auto"}", + "ai.prompt.tools": [ + "{"type":"function","name":"weatherTool","description":"Get current weather for a location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"City name"}},"required":["location"],"additionalProperties":false,"$schema":"http://json-schema.org/draft-07/schema#"}}", + ], + "ai.response.avgCompletionTokensPerSecond": 45.801295908307246, + "ai.response.finishReason": "stop", + "ai.response.id": "chatcmpl-BZJ9Qu6EW2xU8oEQAZ7JC2vlUNxFb", + "ai.response.model": "gpt-4o-2024-08-06", + "ai.response.msToFinish": 1397.3403749999998, + "ai.response.msToFirstChunk": 756.9352089999884, + "ai.response.text": "The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.", + "ai.response.timestamp": "2025-05-20T15:26:36.000Z", + "ai.settings.maxRetries": 2, + "ai.settings.temperature": 0.5, + "ai.settings.topP": 1, + "ai.usage.completionTokens": 64, + "ai.usage.promptTokens": 246, + "componentName": "Weather Agent", + "content": [ + { + "result": { + "conditions": "Overcast", + "feelsLike": 8.8, + "humidity": 61, + "location": "Ann Arbor", + "temperature": 12.2, + "windGust": 35.3, + "windSpeed": 21.6, + }, + "toolCallId": "call_jNucVMVoqwhzfOZ1wfx92lXo", + "toolName": "weatherTool", + "type": "tool-result", + }, + ], + "gen_ai.request.model": "gpt-4o", + "gen_ai.request.temperature": 0.5, + "gen_ai.request.top_p": 1, + "gen_ai.response.finish_reasons": [ + "stop", + ], + "gen_ai.response.id": "chatcmpl-BZJ9Qu6EW2xU8oEQAZ7JC2vlUNxFb", + "gen_ai.response.model": "gpt-4o-2024-08-06", + "gen_ai.system": "openai.chat", + "gen_ai.usage.input_tokens": 246, + "gen_ai.usage.output_tokens": 64, + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "llm.input_messages.0.message.content": " + You are a helpful weather assistant that provides accurate weather information. + + Your primary function is to help users get weather details for specific locations. When responding: + - Always ask for a location if none is provided + - If the location name isn’t in English, please translate it + - If giving a location with multiple parts (e.g. "New York, NY"), use the most relevant part (e.g. "New York") + - Include relevant details like humidity, wind conditions, and precipitation + - Keep responses concise but informative + + Use the weatherTool to fetch current weather data. +", + "llm.input_messages.0.message.role": "system", + "llm.input_messages.1.message.contents.0.message_content.image": undefined, + "llm.input_messages.1.message.contents.0.message_content.text": "what is the weather in ann arbor", + "llm.input_messages.1.message.contents.0.message_content.type": "text", + "llm.input_messages.1.message.role": "user", + "llm.input_messages.1.message.tool_calls.0.tool_call.function.arguments": undefined, + "llm.input_messages.1.message.tool_calls.0.tool_call.function.name": undefined, + "llm.input_messages.1.message.tool_calls.0.tool_call.id": undefined, + "llm.input_messages.2.message.contents.0.message_content.image": undefined, + "llm.input_messages.2.message.contents.0.message_content.text": undefined, + "llm.input_messages.2.message.contents.0.message_content.type": "tool-call", + "llm.input_messages.2.message.role": "assistant", + "llm.input_messages.2.message.tool_calls.0.tool_call.function.arguments": "{"location":"Ann Arbor"}", + "llm.input_messages.2.message.tool_calls.0.tool_call.function.name": "weatherTool", + "llm.input_messages.2.message.tool_calls.0.tool_call.id": "call_jNucVMVoqwhzfOZ1wfx92lXo", + "llm.input_messages.3.message.content": "{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}", + "llm.input_messages.3.message.role": "tool", + "llm.input_messages.3.message.tool_call_id": "call_jNucVMVoqwhzfOZ1wfx92lXo", + "llm.input_messages.3.tool.name": "weatherTool", + "llm.invocation_parameters": "{"temperature":0.5,"topP":1,"maxRetries":2}", + "llm.model_name": "gpt-4o", + "llm.token_count.completion": 64, + "llm.token_count.prompt": 246, + "openinference.span.kind": "LLM", + "operation.name": "ai.streamText.doStream", + "output.mime_type": "text/plain", + "output.value": "The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.", + "role": "tool", + }, + "endTime": [ + 1747754797, + 187845583, + ], + "kind": 0, + "name": "ai.streamText.doStream", + "parentSpanId": "e754528d3b71ea75", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754795, + 790000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getMostRecentUserMessage.argument.0": "[{"role":"user","content":"what is the weather in ann arbor"}]", + "agent.getMostRecentUserMessage.result": "{"role":"user","content":"what is the weather in ann arbor"}", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754797, + 189135208, + ], + "kind": 0, + "name": "agent.getMostRecentUserMessage", + "parentSpanId": "236712a8cd85d5c5", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754797, + 189000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.sanitizeResponseMessages.argument.0": "[{"role":"user","content":"what is the weather in ann arbor"}]", + "agent.sanitizeResponseMessages.result": "[{"role":"user","content":"what is the weather in ann arbor"}]", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754797, + 189173375, + ], + "kind": 0, + "name": "agent.sanitizeResponseMessages", + "parentSpanId": "236712a8cd85d5c5", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754797, + 189000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.sanitizeResponseMessages.argument.0": "[{"role":"assistant","content":[{"type":"tool-call","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","args":{"location":"Ann Arbor"}}],"id":"msg-NR1VI5rKK1BH6BWwFUTyA2UN"},{"role":"tool","id":"msg-AaK5V3CygaWNC9Qcu8MEfW0Q","content":[{"type":"tool-result","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","result":{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}}]},{"role":"assistant","content":[{"type":"text","text":"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h."}],"id":"msg-5mfT3Rz7bKnssiEcn235nQta"}]", + "agent.sanitizeResponseMessages.result": "[{"role":"assistant","content":[{"type":"tool-call","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","args":{"location":"Ann Arbor"}}],"id":"msg-NR1VI5rKK1BH6BWwFUTyA2UN"},{"role":"tool","id":"msg-AaK5V3CygaWNC9Qcu8MEfW0Q","content":[{"type":"tool-result","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","result":{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}}]},{"role":"assistant","content":[{"type":"text","text":"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h."}],"id":"msg-5mfT3Rz7bKnssiEcn235nQta"}]", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754797, + 190055541, + ], + "kind": 0, + "name": "agent.sanitizeResponseMessages", + "parentSpanId": "654be41a9d75936f", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754797, + 190000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "agent.getResponseMessages.argument.0": "{"threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","resourceId":"weatherAgent","messages":[{"role":"assistant","content":[{"type":"tool-call","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","args":{"location":"Ann Arbor"}}],"id":"msg-NR1VI5rKK1BH6BWwFUTyA2UN"},{"role":"tool","id":"msg-AaK5V3CygaWNC9Qcu8MEfW0Q","content":[{"type":"tool-result","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","result":{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}}]},{"role":"assistant","content":[{"type":"text","text":"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h."}],"id":"msg-5mfT3Rz7bKnssiEcn235nQta"}],"now":1747754797190}", + "agent.getResponseMessages.result": "[{"id":"msg-NR1VI5rKK1BH6BWwFUTyA2UN","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","resourceId":"weatherAgent","role":"assistant","content":[{"type":"tool-call","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","args":{"location":"Ann Arbor"}}],"createdAt":"2025-05-20T15:26:37.190Z","toolCallIds":["call_jNucVMVoqwhzfOZ1wfx92lXo"],"toolCallArgs":[{"location":"Ann Arbor"}],"toolNames":["weatherTool"],"type":"tool-call"},{"id":"msg-AaK5V3CygaWNC9Qcu8MEfW0Q","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","resourceId":"weatherAgent","role":"tool","content":[{"type":"tool-result","toolCallId":"call_jNucVMVoqwhzfOZ1wfx92lXo","toolName":"weatherTool","result":{"temperature":12.2,"feelsLike":8.8,"humidity":61,"windSpeed":21.6,"windGust":35.3,"conditions":"Overcast","location":"Ann Arbor"}}],"createdAt":"2025-05-20T15:26:37.191Z","toolCallIds":["call_jNucVMVoqwhzfOZ1wfx92lXo"],"type":"tool-result"},{"id":"msg-5mfT3Rz7bKnssiEcn235nQta","threadId":"93dcfcda-a269-4fb3-99be-8e03e432db24","resourceId":"weatherAgent","role":"assistant","content":[{"type":"text","text":"The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h."}],"createdAt":"2025-05-20T15:26:37.192Z","type":"text"}]", + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754797, + 190200291, + ], + "kind": 0, + "name": "agent.getResponseMessages", + "parentSpanId": "236712a8cd85d5c5", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754797, + 190000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "ai.model.id": "gpt-4o", + "ai.model.provider": "openai.chat", + "ai.operationId": "ai.streamText", + "ai.prompt": "{"messages":[{"role":"system","content":"\n You are a helpful weather assistant that provides accurate weather information.\n\n Your primary function is to help users get weather details for specific locations. When responding:\n - Always ask for a location if none is provided\n - If the location name isn’t in English, please translate it\n - If giving a location with multiple parts (e.g. \"New York, NY\"), use the most relevant part (e.g. \"New York\")\n - Include relevant details like humidity, wind conditions, and precipitation\n - Keep responses concise but informative\n\n Use the weatherTool to fetch current weather data.\n"},{"role":"user","content":"what is the weather in ann arbor"}]}", + "ai.response.finishReason": "stop", + "ai.response.text": "The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.", + "ai.settings.maxRetries": 2, + "ai.settings.maxSteps": 5, + "ai.settings.temperature": 0.5, + "ai.settings.topP": 1, + "ai.usage.completionTokens": 79, + "ai.usage.promptTokens": 425, + "componentName": "Weather Agent", + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "input.mime_type": "application/json", + "input.value": "{"messages":[{"role":"system","content":"\n You are a helpful weather assistant that provides accurate weather information.\n\n Your primary function is to help users get weather details for specific locations. When responding:\n - Always ask for a location if none is provided\n - If the location name isn’t in English, please translate it\n - If giving a location with multiple parts (e.g. \"New York, NY\"), use the most relevant part (e.g. \"New York\")\n - Include relevant details like humidity, wind conditions, and precipitation\n - Keep responses concise but informative\n\n Use the weatherTool to fetch current weather data.\n"},{"role":"user","content":"what is the weather in ann arbor"}]}", + "llm.invocation_parameters": "{"temperature":0.5,"topP":1,"maxRetries":2,"maxSteps":5}", + "llm.model_name": "gpt-4o", + "openinference.span.kind": "CHAIN", + "operation.name": "ai.streamText", + "output.mime_type": "text/plain", + "output.value": "The current weather in Ann Arbor is overcast with a temperature of 12.2°C, though it feels like 8.8°C. The humidity level is at 61%, and there is a wind speed of 21.6 km/h with gusts reaching up to 35.3 km/h.", + }, + "endTime": [ + 1747754797, + 190427917, + ], + "kind": 0, + "name": "ai.streamText", + "parentSpanId": "236712a8cd85d5c5", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 727000000, + ], + "status": { + "code": 0, + }, + }, + { + "attributes": { + "http.flavor": "1.1", + "http.host": "localhost:4111", + "http.method": "POST", + "http.request_content_length_uncompressed": 251, + "http.request_id": "98823c5f-b2ec-4a4e-a056-f22f7bcb53ae", + "http.scheme": "http", + "http.status_code": 200, + "http.status_text": "OK", + "http.target": "/api/agents/weatherAgent/stream", + "http.url": "http://localhost:4111/api/agents/weatherAgent/stream", + "http.user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36", + "net.host.ip": "::1", + "net.host.name": "localhost", + "net.host.port": 4111, + "net.peer.ip": "::1", + "net.peer.port": 51258, + "net.transport": "ip_tcp", + "openinference.span.kind": "AGENT", + }, + "endTime": [ + 1747754797, + 193654459, + ], + "kind": 1, + "name": "POST /api/agents/weatherAgent/stream", + "resource": { + "attributes": { + "openinference.project.name": "mock", + "service.name": "mock", + }, + }, + "startTime": [ + 1747754793, + 713000000, + ], + "status": { + "code": 0, + }, + }, + ], + [Function], + ], +] \ No newline at end of file diff --git a/js/packages/openinference-mastra/tsconfig.esm.json b/js/packages/openinference-mastra/tsconfig.esm.json new file mode 100644 index 000000000..d194cd261 --- /dev/null +++ b/js/packages/openinference-mastra/tsconfig.esm.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.base.esm.json", + "compilerOptions": { + "outDir": "dist/esm", + "rootDir": "src", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "tsBuildInfoFile": "dist/esm/tsconfig.esm.tsbuildinfo" + }, + "include": ["src/**/*.ts"], + "references": [] +} diff --git a/js/packages/openinference-mastra/tsconfig.json b/js/packages/openinference-mastra/tsconfig.json new file mode 100644 index 000000000..78ff1e2f4 --- /dev/null +++ b/js/packages/openinference-mastra/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "../../tsconfig.base.esm.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": ".", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "types": ["vitest/globals"], + "lib": ["ESNext"], + "resolveJsonModule": true + }, + "files": [], + "include": ["src/**/*.ts", "test/**/*.ts", "test/__fixtures__/**/*.json"], + "exclude": ["test/__snapshots__/**/*.ts"], + "references": [] +} diff --git a/js/packages/openinference-mastra/vitest.config.ts b/js/packages/openinference-mastra/vitest.config.ts new file mode 100644 index 000000000..1876c41fc --- /dev/null +++ b/js/packages/openinference-mastra/vitest.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + dir: "test", + globals: true, + }, +}); diff --git a/js/packages/openinference-vercel/package.json b/js/packages/openinference-vercel/package.json index e0d4fdaf3..5e8f3dec4 100644 --- a/js/packages/openinference-vercel/package.json +++ b/js/packages/openinference-vercel/package.json @@ -18,6 +18,16 @@ ".": { "import": "./dist/esm/index.js", "require": "./dist/src/index.js" + }, + "./utils": { + "import": "./dist/esm/utils.js", + "require": "./dist/src/utils.js", + "types": "./dist/src/utils.d.ts" + }, + "./types": { + "import": "./dist/esm/types.js", + "require": "./dist/src/types.js", + "types": "./dist/src/types.d.ts" } }, "files": [ diff --git a/js/packages/openinference-vercel/src/utils.ts b/js/packages/openinference-vercel/src/utils.ts index 1e8e2625c..e2208c146 100644 --- a/js/packages/openinference-vercel/src/utils.ts +++ b/js/packages/openinference-vercel/src/utils.ts @@ -15,7 +15,6 @@ import { import { OpenInferenceIOConventionKey, OpenInferenceSemanticConventionKey, - ReadWriteSpan, SpanFilter, } from "./types"; import { @@ -242,10 +241,44 @@ const getInputMessageAttributes = (promptMessages?: AttributeValue) => { return messages.reduce((acc: Attributes, message, index) => { const MESSAGE_PREFIX = `${SemanticConventions.LLM_INPUT_MESSAGES}.${index}`; - if (isArrayOfObjects(message.content)) { + if (message.role === "tool") { + return { + ...acc, + ...message, + [`${MESSAGE_PREFIX}.${SemanticConventions.MESSAGE_ROLE}`]: message.role, + [`${MESSAGE_PREFIX}.${SemanticConventions.MESSAGE_TOOL_CALL_ID}`]: + Array.isArray(message.content) + ? typeof message.content[0]?.toolCallId === "string" + ? message.content[0].toolCallId + : undefined + : typeof message.toolCallId === "string" + ? message.toolCallId + : undefined, + [`${MESSAGE_PREFIX}.${SemanticConventions.TOOL_NAME}`]: Array.isArray( + message.content, + ) + ? typeof message.content[0]?.toolName === "string" + ? message.content[0].toolName + : undefined + : typeof message.toolName === "string" + ? message.toolName + : undefined, + [`${MESSAGE_PREFIX}.${SemanticConventions.MESSAGE_CONTENT}`]: + Array.isArray(message.content) + ? typeof message.content[0]?.result === "string" + ? message.content[0].result + : message.content[0]?.result + ? JSON.stringify(message.content[0].result) + : undefined + : typeof message.content === "string" + ? message.content + : undefined, + }; + } else if (isArrayOfObjects(message.content)) { const messageAttributes = message.content.reduce( (acc: Attributes, content, contentIndex) => { const CONTENTS_PREFIX = `${MESSAGE_PREFIX}.${SemanticConventions.MESSAGE_CONTENTS}.${contentIndex}`; + const TOOL_CALL_PREFIX = `${MESSAGE_PREFIX}.${SemanticConventions.MESSAGE_TOOL_CALLS}.${contentIndex}`; return { ...acc, [`${CONTENTS_PREFIX}.${SemanticConventions.MESSAGE_CONTENT_TYPE}`]: @@ -254,6 +287,20 @@ const getInputMessageAttributes = (promptMessages?: AttributeValue) => { typeof content.text === "string" ? content.text : undefined, [`${CONTENTS_PREFIX}.${SemanticConventions.MESSAGE_CONTENT_IMAGE}`]: typeof content.image === "string" ? content.image : undefined, + [`${TOOL_CALL_PREFIX}.${SemanticConventions.TOOL_CALL_ID}`]: + typeof content.toolCallId === "string" + ? content.toolCallId + : undefined, + [`${TOOL_CALL_PREFIX}.${SemanticConventions.TOOL_CALL_FUNCTION_NAME}`]: + typeof content.toolName === "string" + ? content.toolName + : undefined, + [`${TOOL_CALL_PREFIX}.${SemanticConventions.TOOL_CALL_FUNCTION_ARGUMENTS_JSON}`]: + typeof content.args === "string" + ? content.args + : typeof content.args === "object" + ? JSON.stringify(content.args) + : undefined, }; }, {}, @@ -536,10 +583,13 @@ export const shouldExportSpan = ({ * @param span - The span to add OpenInference attributes to. */ export const addOpenInferenceAttributesToSpan = (span: ReadableSpan): void => { - const attributes = { ...span.attributes }; - - (span as ReadWriteSpan).attributes = { - ...span.attributes, - ...safelyGetOpenInferenceAttributes(attributes), + const newAttributes = { + ...safelyGetOpenInferenceAttributes(span.attributes), }; + + // newer versions of opentelemetry will not allow you to reassign + // the attributes object, so you must edit it by keyname instead + Object.entries(newAttributes).forEach(([key, value]) => { + span.attributes[key] = value; + }); }; diff --git a/js/packages/openinference-vercel/test/OpenInferenceSpanProcessor.test.ts b/js/packages/openinference-vercel/test/OpenInferenceSpanProcessor.test.ts index 189556cbb..b0f645789 100644 --- a/js/packages/openinference-vercel/test/OpenInferenceSpanProcessor.test.ts +++ b/js/packages/openinference-vercel/test/OpenInferenceSpanProcessor.test.ts @@ -322,6 +322,18 @@ const generateVercelAttributeTestCases = (): SpanProcessorTestCase[] => { "image.com", [`${firstInputMessageContentsPrefix}.1.${SemanticConventions.MESSAGE_CONTENT_TEXT}`]: undefined, + [`${SemanticConventions.LLM_INPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}.0.${SemanticConventions.TOOL_CALL_FUNCTION_ARGUMENTS_JSON}`]: + undefined, + [`${SemanticConventions.LLM_INPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}.0.${SemanticConventions.TOOL_CALL_FUNCTION_NAME}`]: + undefined, + [`${SemanticConventions.LLM_INPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}.0.${SemanticConventions.TOOL_CALL_ID}`]: + undefined, + [`${SemanticConventions.LLM_INPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}.1.${SemanticConventions.TOOL_CALL_FUNCTION_ARGUMENTS_JSON}`]: + undefined, + [`${SemanticConventions.LLM_INPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}.1.${SemanticConventions.TOOL_CALL_FUNCTION_NAME}`]: + undefined, + [`${SemanticConventions.LLM_INPUT_MESSAGES}.0.${SemanticConventions.MESSAGE_TOOL_CALLS}.1.${SemanticConventions.TOOL_CALL_ID}`]: + undefined, [SemanticConventions.OPENINFERENCE_SPAN_KIND]: OpenInferenceSpanKind.LLM, }, diff --git a/js/pnpm-lock.yaml b/js/pnpm-lock.yaml index 3f539bdb5..c804a0dbb 100644 --- a/js/pnpm-lock.yaml +++ b/js/pnpm-lock.yaml @@ -42,7 +42,7 @@ importers: version: 5.0.10 ts-jest: specifier: ^29.2.2 - version: 29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0)(typescript@5.5.4) + version: 29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0(@types/node@20.14.11)(ts-node@10.9.2(@types/node@20.14.11)(typescript@5.5.4)))(typescript@5.5.4) tsc-alias: specifier: ^1.8.10 version: 1.8.10 @@ -311,6 +311,37 @@ importers: specifier: ^4.95.0 version: 4.95.1(zod@3.24.3) + packages/openinference-mastra: + dependencies: + '@arizeai/openinference-core': + specifier: workspace:* + version: link:../openinference-core + '@arizeai/openinference-semantic-conventions': + specifier: workspace:* + version: link:../openinference-semantic-conventions + '@arizeai/openinference-vercel': + specifier: workspace:* + version: link:../openinference-vercel + devDependencies: + '@opentelemetry/api': + specifier: ^1.9.0 + version: 1.9.0 + '@opentelemetry/core': + specifier: ^2.0.1 + version: 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': + specifier: ^0.201.1 + version: 0.201.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': + specifier: ^2.0.1 + version: 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': + specifier: ^1.34.0 + version: 1.34.0 + vitest: + specifier: ^3.1.3 + version: 3.1.4(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0) + packages/openinference-semantic-conventions: {} packages/openinference-vercel: @@ -1354,6 +1385,10 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} + '@opentelemetry/api-logs@0.201.1': + resolution: {integrity: sha512-IxcFDP1IGMDemVFG2by/AMK+/o6EuBQ8idUq3xZ6MxgQGeumYZuX5OwR0h9HuvcUc/JPjQGfU5OHKIKYDJcXeA==} + engines: {node: '>=8.0.0'} + '@opentelemetry/api-logs@0.50.0': resolution: {integrity: sha512-JdZuKrhOYggqOpUljAq4WWNi5nB10PmgoF0y2CvedLGXd0kSawb/UBnWT8gg1ND3bHCNHStAIVT0ELlxJJRqrA==} engines: {node: '>=14'} @@ -1404,6 +1439,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/core@2.0.1': + resolution: {integrity: sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/exporter-logs-otlp-grpc@0.57.1': resolution: {integrity: sha512-RL8qmZH1H/H7Hbj0xKxF0Gg8kX9ic0aoMS3Kv5kj864lWxlpuR5YtGGn5OjGYwCmq6nYbsNy257fFp1U63pABw==} engines: {node: '>=14'} @@ -1464,6 +1505,12 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-proto@0.201.1': + resolution: {integrity: sha512-wMxdDDyW+lmmenYGBp0evCoKzajXqIw6SSaZtaF/uqKR9/POhC/9vudnc+kf8W49hYFyIEutPrc1hA0exe3UwQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-proto@0.50.0': resolution: {integrity: sha512-vavD9Ow6yOLiD+ocuS/oeciCsXNdsN41aYUrEljNaLXogvnkfMhJ+JLAhOnRSpzlVtRp7Ciw2BYGdYSebR0OsA==} engines: {node: '>=14'} @@ -1494,6 +1541,12 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-exporter-base@0.201.1': + resolution: {integrity: sha512-FiS/mIWmZXyRxYGyXPHY+I/4+XrYVTD7Fz/zwOHkVPQsA1JTakAOP9fAi6trXMio0dIpzvQujLNiBqGM7ExrQw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-exporter-base@0.50.0': resolution: {integrity: sha512-JUmjmrCmE1/fc4LjCQMqLfudgSl5OpUkzx7iA94b4jgeODM7zWxUoVXL7/CT7fWf47Cn+pmKjMvTCSESqZZ3mA==} engines: {node: '>=14'} @@ -1524,6 +1577,12 @@ packages: peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/otlp-transformer@0.201.1': + resolution: {integrity: sha512-+q/8Yuhtu9QxCcjEAXEO8fXLjlSnrnVwfzi9jiWaMAppQp69MoagHHomQj02V2WnGjvBod5ajgkbK4IoWab50A==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-transformer@0.50.0': resolution: {integrity: sha512-s0sl1Yfqd5q1Kjrf6DqXPWzErL+XHhrXOfejh4Vc/SMTNqC902xDsC8JQxbjuramWt/+hibfguIvi7Ns8VLolA==} engines: {node: '>=14'} @@ -1584,6 +1643,18 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/resources@2.0.1': + resolution: {integrity: sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.201.1': + resolution: {integrity: sha512-Ug8gtpssUNUnfpotB9ZhnSsPSGDu+7LngTMgKl31mmVJwLAKyl6jC8diZrMcGkSgBh0o5dbg9puvLyR25buZfw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + '@opentelemetry/sdk-logs@0.50.0': resolution: {integrity: sha512-PeUEupBB29p9nlPNqXoa1PUWNLsZnxG0DCDj3sHqzae+8y76B/A5hvZjg03ulWdnvBLYpnJslqzylG9E0IL87g==} engines: {node: '>=14'} @@ -1615,6 +1686,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/sdk-metrics@2.0.1': + resolution: {integrity: sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.9.0 <1.10.0' + '@opentelemetry/sdk-node@0.57.1': resolution: {integrity: sha512-0i25YQCpNiE1RDiaZ6ECO3Hgd6DIJeyHyA2AY9C4szMdZV5cM2m8/nrwK6fyNZdOEjRd54D/FkyP3aqZVIPGvg==} engines: {node: '>=14'} @@ -1639,6 +1716,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/sdk-trace-base@2.0.1': + resolution: {integrity: sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/sdk-trace-node@1.25.1': resolution: {integrity: sha512-nMcjFIKxnFqoez4gUmihdBrbpsEnAX/Xj16sGvZm+guceYE0NE00vLhpDVK6f3q8Q4VFI5xG8JjlXKMB/SkTTQ==} engines: {node: '>=14'} @@ -1671,6 +1754,10 @@ packages: resolution: {integrity: sha512-s0OpmpQFSfMrmedAn9Lhg4KWJELHCU6uU9dtIJ28N8UGhf9Y55im5X8fEzwhwDwiSqN+ZPSNrDJF7ivf/AuRPQ==} engines: {node: '>=14'} + '@opentelemetry/semantic-conventions@1.34.0': + resolution: {integrity: sha512-aKcOkyrorBGlajjRdVoJWHTxfxO1vCNHLJVlSDaRHDIdjU+pX8IYQPvPDkYiujKLbRnWU+1TBwEt0QRgSm4SGA==} + engines: {node: '>=14'} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -1705,6 +1792,106 @@ packages: '@protobufjs/utf8@1.1.0': resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + '@rollup/rollup-android-arm-eabi@4.41.0': + resolution: {integrity: sha512-KxN+zCjOYHGwCl4UCtSfZ6jrq/qi88JDUtiEFk8LELEHq2Egfc/FgW+jItZiOLRuQfb/3xJSgFuNPC9jzggX+A==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.41.0': + resolution: {integrity: sha512-yDvqx3lWlcugozax3DItKJI5j05B0d4Kvnjx+5mwiUpWramVvmAByYigMplaoAQ3pvdprGCTCE03eduqE/8mPQ==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.41.0': + resolution: {integrity: sha512-2KOU574vD3gzcPSjxO0eyR5iWlnxxtmW1F5CkNOHmMlueKNCQkxR6+ekgWyVnz6zaZihpUNkGxjsYrkTJKhkaw==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.41.0': + resolution: {integrity: sha512-gE5ACNSxHcEZyP2BA9TuTakfZvULEW4YAOtxl/A/YDbIir/wPKukde0BNPlnBiP88ecaN4BJI2TtAd+HKuZPQQ==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.41.0': + resolution: {integrity: sha512-GSxU6r5HnWij7FoSo7cZg3l5GPg4HFLkzsFFh0N/b16q5buW1NAWuCJ+HMtIdUEi6XF0qH+hN0TEd78laRp7Dg==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.41.0': + resolution: {integrity: sha512-KGiGKGDg8qLRyOWmk6IeiHJzsN/OYxO6nSbT0Vj4MwjS2XQy/5emsmtoqLAabqrohbgLWJ5GV3s/ljdrIr8Qjg==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.41.0': + resolution: {integrity: sha512-46OzWeqEVQyX3N2/QdiU/CMXYDH/lSHpgfBkuhl3igpZiaB3ZIfSjKuOnybFVBQzjsLwkus2mjaESy8H41SzvA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.41.0': + resolution: {integrity: sha512-lfgW3KtQP4YauqdPpcUZHPcqQXmTmH4nYU0cplNeW583CMkAGjtImw4PKli09NFi2iQgChk4e9erkwlfYem6Lg==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.41.0': + resolution: {integrity: sha512-nn8mEyzMbdEJzT7cwxgObuwviMx6kPRxzYiOl6o/o+ChQq23gfdlZcUNnt89lPhhz3BYsZ72rp0rxNqBSfqlqw==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.41.0': + resolution: {integrity: sha512-l+QK99je2zUKGd31Gh+45c4pGDAqZSuWQiuRFCdHYC2CSiO47qUWsCcenrI6p22hvHZrDje9QjwSMAFL3iwXwQ==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loongarch64-gnu@4.41.0': + resolution: {integrity: sha512-WbnJaxPv1gPIm6S8O/Wg+wfE/OzGSXlBMbOe4ie+zMyykMOeqmgD1BhPxZQuDqwUN+0T/xOFtL2RUWBspnZj3w==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-powerpc64le-gnu@4.41.0': + resolution: {integrity: sha512-eRDWR5t67/b2g8Q/S8XPi0YdbKcCs4WQ8vklNnUYLaSWF+Cbv2axZsp4jni6/j7eKvMLYCYdcsv8dcU+a6QNFg==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.41.0': + resolution: {integrity: sha512-TWrZb6GF5jsEKG7T1IHwlLMDRy2f3DPqYldmIhnA2DVqvvhY2Ai184vZGgahRrg8k9UBWoSlHv+suRfTN7Ua4A==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.41.0': + resolution: {integrity: sha512-ieQljaZKuJpmWvd8gW87ZmSFwid6AxMDk5bhONJ57U8zT77zpZ/TPKkU9HpnnFrM4zsgr4kiGuzbIbZTGi7u9A==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.41.0': + resolution: {integrity: sha512-/L3pW48SxrWAlVsKCN0dGLB2bi8Nv8pr5S5ocSM+S0XCn5RCVCXqi8GVtHFsOBBCSeR+u9brV2zno5+mg3S4Aw==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.41.0': + resolution: {integrity: sha512-XMLeKjyH8NsEDCRptf6LO8lJk23o9wvB+dJwcXMaH6ZQbbkHu2dbGIUindbMtRN6ux1xKi16iXWu6q9mu7gDhQ==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.41.0': + resolution: {integrity: sha512-m/P7LycHZTvSQeXhFmgmdqEiTqSV80zn6xHaQ1JSqwCtD1YGtwEK515Qmy9DcB2HK4dOUVypQxvhVSy06cJPEg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-win32-arm64-msvc@4.41.0': + resolution: {integrity: sha512-4yodtcOrFHpbomJGVEqZ8fzD4kfBeCbpsUy5Pqk4RluXOdsWdjLnjhiKy2w3qzcASWd04fp52Xz7JKarVJ5BTg==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.41.0': + resolution: {integrity: sha512-tmazCrAsKzdkXssEc65zIE1oC6xPHwfy9d5Ta25SRCDOZS+I6RypVVShWALNuU9bxIfGA0aqrmzlzoM5wO5SPQ==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.41.0': + resolution: {integrity: sha512-h1J+Yzjo/X+0EAvR2kIXJDuTuyT7drc+t2ALY0nIcGPbTatNOf0VWdhEA2Z4AAjv6X1NJV7SYo5oCTYRJhSlVA==} + cpu: [x64] + os: [win32] + '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} @@ -1942,6 +2129,9 @@ packages: '@types/diff-match-patch@1.0.36': resolution: {integrity: sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==} + '@types/estree@1.0.7': + resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + '@types/express-serve-static-core@5.0.6': resolution: {integrity: sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==} @@ -2093,6 +2283,35 @@ packages: '@ungap/structured-clone@1.2.0': resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} + '@vitest/expect@3.1.4': + resolution: {integrity: sha512-xkD/ljeliyaClDYqHPNCiJ0plY5YIcM0OlRiZizLhlPmpXWpxnGMyTZXOHFhFeG7w9P5PBeL4IdtJ/HeQwTbQA==} + + '@vitest/mocker@3.1.4': + resolution: {integrity: sha512-8IJ3CvwtSw/EFXqWFL8aCMu+YyYXG2WUSrQbViOZkWTKTVicVwZ/YiEZDSqD00kX+v/+W+OnxhNWoeVKorHygA==} + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 || ^6.0.0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@3.1.4': + resolution: {integrity: sha512-cqv9H9GvAEoTaoq+cYqUTCGscUjKqlJZC7PRwY5FMySVj5J+xOm1KQcCiYHJOEzOKRUhLH4R2pTwvFlWCEScsg==} + + '@vitest/runner@3.1.4': + resolution: {integrity: sha512-djTeF1/vt985I/wpKVFBMWUlk/I7mb5hmD5oP8K9ACRmVXgKTae3TUOtXAEBfslNKPzUQvnKhNd34nnRSYgLNQ==} + + '@vitest/snapshot@3.1.4': + resolution: {integrity: sha512-JPHf68DvuO7vilmvwdPr9TS0SuuIzHvxeaCkxYcCD4jTk67XwL45ZhEHFKIuCm8CYstgI6LZ4XbwD6ANrwMpFg==} + + '@vitest/spy@3.1.4': + resolution: {integrity: sha512-Xg1bXhu+vtPXIodYN369M86K8shGLouNjoVI78g8iAq2rFoHFdajNvJJ5A/9bPMFcfQqdaCpOgWKEoMQg/s0Yg==} + + '@vitest/utils@3.1.4': + resolution: {integrity: sha512-yriMuO1cfFhmiGc8ataN51+9ooHRuURdfAZfwFd3usWynjzpLslZdYnRegTv32qdgtJTsj15FoeZe2g15fY1gg==} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} @@ -2220,6 +2439,10 @@ packages: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + async@3.2.5: resolution: {integrity: sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==} @@ -2369,6 +2592,10 @@ packages: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + call-bind-apply-helpers@1.0.2: resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} engines: {node: '>= 0.4'} @@ -2396,6 +2623,10 @@ packages: caniuse-lite@1.0.30001642: resolution: {integrity: sha512-3XQ0DoRgLijXJErLSl+bLnJ+Et4KqV1PY6JJBGAFlsNsz31zeAIncyeZfLCabHK/jtSh+671RM9YMldxjUPZtA==} + chai@5.2.0: + resolution: {integrity: sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==} + engines: {node: '>=12'} + chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} @@ -2418,6 +2649,10 @@ packages: charenc@0.0.2: resolution: {integrity: sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==} + check-error@2.1.1: + resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} + engines: {node: '>= 16'} + chokidar@3.6.0: resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} engines: {node: '>= 8.10.0'} @@ -2549,6 +2784,10 @@ packages: babel-plugin-macros: optional: true + deep-eql@5.0.2: + resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} + engines: {node: '>=6'} + deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} @@ -2656,6 +2895,9 @@ packages: resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} engines: {node: '>= 0.4'} + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} @@ -2726,6 +2968,9 @@ packages: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + esutils@2.0.3: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} @@ -2760,6 +3005,10 @@ packages: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} engines: {node: '>= 0.8.0'} + expect-type@1.2.1: + resolution: {integrity: sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==} + engines: {node: '>=12.0.0'} + expect@29.7.0: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -2818,6 +3067,14 @@ packages: fb-watchman@2.0.2: resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + fdir@6.4.4: + resolution: {integrity: sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + file-entry-cache@6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} engines: {node: ^10.12.0 || >=12.0.0} @@ -3832,12 +4089,18 @@ packages: long@5.2.3: resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==} + loupe@3.1.3: + resolution: {integrity: sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==} + lru-cache@10.4.3: resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + make-dir@4.0.0: resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} engines: {node: '>=10'} @@ -4196,6 +4459,13 @@ packages: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + + pathval@2.0.0: + resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} + engines: {node: '>= 14.16'} + picocolors@1.0.1: resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} @@ -4206,6 +4476,10 @@ packages: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} + picomatch@4.0.2: + resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + engines: {node: '>=12'} + pify@4.0.1: resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} engines: {node: '>=6'} @@ -4236,6 +4510,10 @@ packages: resolution: {integrity: sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==} engines: {node: '>=12'} + postcss@8.5.3: + resolution: {integrity: sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==} + engines: {node: ^10 || ^12 || >=14} + prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -4406,6 +4684,11 @@ packages: resolution: {integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==} hasBin: true + rollup@4.41.0: + resolution: {integrity: sha512-HqMFpUbWlf/tvcxBFNKnJyzc7Lk+XO3FGc3pbNBLqEbOz0gPLRgcrlS3UF4MfUrVlstOaP/q0kM6GVvi+LrLRg==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + router@2.2.0: resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} engines: {node: '>= 18'} @@ -4482,6 +4765,9 @@ packages: resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} engines: {node: '>= 0.4'} + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} @@ -4499,6 +4785,10 @@ packages: sonic-boom@4.2.0: resolution: {integrity: sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==} + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + source-map-support@0.5.13: resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} @@ -4523,10 +4813,16 @@ packages: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + statuses@2.0.1: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} + std-env@3.9.0: + resolution: {integrity: sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==} + string-collapse-leading-whitespace@7.0.9: resolution: {integrity: sha512-lEuTHlogBT9PWipfk0FOyvoMKX8syiE03QoFk5MDh8oS0AJ2C07IlstR5cGkxz48nKkOIuvkC28w9Rx/cVRNDg==} engines: {node: '>=14.18.0'} @@ -4637,6 +4933,28 @@ packages: tiny-invariant@1.3.3: resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + + tinyglobby@0.2.13: + resolution: {integrity: sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==} + engines: {node: '>=12.0.0'} + + tinypool@1.0.2: + resolution: {integrity: sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==} + engines: {node: ^18.0.0 || >=20.0.0} + + tinyrainbow@2.0.0: + resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} + engines: {node: '>=14.0.0'} + + tinyspy@3.0.2: + resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} + engines: {node: '>=14.0.0'} + tmp@0.0.33: resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} engines: {node: '>=0.6.0'} @@ -4803,6 +5121,79 @@ packages: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} + vite-node@3.1.4: + resolution: {integrity: sha512-6enNwYnpyDo4hEgytbmc6mYWHXDHYEn0D1/rw4Q+tnHUGtKTJsn8T1YkX6Q18wI5LCrS8CTYlBaiCqxOy2kvUA==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + + vite@6.3.5: + resolution: {integrity: sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + jiti: '>=1.21.0' + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + vitest@3.1.4: + resolution: {integrity: sha512-Ta56rT7uWxCSJXlBtKgIlApJnT6e6IGmTYxYcmxjJ4ujuZDI59GUQgVDObXXJujOmPDBYXHK1qmaGtneu6TNIQ==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/debug': ^4.1.12 + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + '@vitest/browser': 3.1.4 + '@vitest/ui': 3.1.4 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/debug': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + walker@1.0.8: resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} @@ -4828,6 +5219,11 @@ packages: engines: {node: '>= 8'} hasBin: true + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + wikipedia@2.1.2: resolution: {integrity: sha512-RAYaMpXC9/E873RaSEtlEa8dXK4e0p5k98GKOd210MtkE5emm6fcnwD+N6ZA4cuffjDWagvhaQKtp/mGp2BOVQ==} engines: {node: '>=10'} @@ -6209,6 +6605,10 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.17.1 + '@opentelemetry/api-logs@0.201.1': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs@0.50.0': dependencies: '@opentelemetry/api': 1.9.0 @@ -6253,6 +6653,11 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.28.0 + '@opentelemetry/core@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.34.0 + '@opentelemetry/exporter-logs-otlp-grpc@0.57.1(@opentelemetry/api@1.9.0)': dependencies: '@grpc/grpc-js': 1.12.5 @@ -6348,6 +6753,15 @@ snapshots: '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto@0.201.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.201.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.201.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto@0.50.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6398,6 +6812,12 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/otlp-exporter-base@0.201.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.201.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base@0.50.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6430,6 +6850,17 @@ snapshots: '@opentelemetry/otlp-exporter-base': 0.50.0(@opentelemetry/api@1.9.0) protobufjs: 7.4.0 + '@opentelemetry/otlp-transformer@0.201.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.201.1 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.201.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + protobufjs: 7.4.0 + '@opentelemetry/otlp-transformer@0.50.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6506,6 +6937,19 @@ snapshots: '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.28.0 + '@opentelemetry/resources@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.34.0 + + '@opentelemetry/sdk-logs@0.201.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.201.1 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs@0.50.0(@opentelemetry/api-logs@0.50.0)(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6540,6 +6984,12 @@ snapshots: '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-node@0.57.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6594,6 +7044,13 @@ snapshots: '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.28.0 + '@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.34.0 + '@opentelemetry/sdk-trace-node@1.25.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6624,6 +7081,8 @@ snapshots: '@opentelemetry/semantic-conventions@1.32.0': {} + '@opentelemetry/semantic-conventions@1.34.0': {} + '@pkgjs/parseargs@0.11.0': optional: true @@ -6650,6 +7109,66 @@ snapshots: '@protobufjs/utf8@1.1.0': {} + '@rollup/rollup-android-arm-eabi@4.41.0': + optional: true + + '@rollup/rollup-android-arm64@4.41.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.41.0': + optional: true + + '@rollup/rollup-darwin-x64@4.41.0': + optional: true + + '@rollup/rollup-freebsd-arm64@4.41.0': + optional: true + + '@rollup/rollup-freebsd-x64@4.41.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.41.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.41.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.41.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.41.0': + optional: true + + '@rollup/rollup-linux-loongarch64-gnu@4.41.0': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.41.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.41.0': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.41.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.41.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.41.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.41.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.41.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.41.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.41.0': + optional: true + '@sinclair/typebox@0.27.8': {} '@sindresorhus/is@4.6.0': {} @@ -7001,6 +7520,8 @@ snapshots: '@types/diff-match-patch@1.0.36': {} + '@types/estree@1.0.7': {} + '@types/express-serve-static-core@5.0.6': dependencies: '@types/node': 20.14.11 @@ -7190,6 +7711,46 @@ snapshots: '@ungap/structured-clone@1.2.0': {} + '@vitest/expect@3.1.4': + dependencies: + '@vitest/spy': 3.1.4 + '@vitest/utils': 3.1.4 + chai: 5.2.0 + tinyrainbow: 2.0.0 + + '@vitest/mocker@3.1.4(vite@6.3.5(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0))': + dependencies: + '@vitest/spy': 3.1.4 + estree-walker: 3.0.3 + magic-string: 0.30.17 + optionalDependencies: + vite: 6.3.5(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0) + + '@vitest/pretty-format@3.1.4': + dependencies: + tinyrainbow: 2.0.0 + + '@vitest/runner@3.1.4': + dependencies: + '@vitest/utils': 3.1.4 + pathe: 2.0.3 + + '@vitest/snapshot@3.1.4': + dependencies: + '@vitest/pretty-format': 3.1.4 + magic-string: 0.30.17 + pathe: 2.0.3 + + '@vitest/spy@3.1.4': + dependencies: + tinyspy: 3.0.2 + + '@vitest/utils@3.1.4': + dependencies: + '@vitest/pretty-format': 3.1.4 + loupe: 3.1.3 + tinyrainbow: 2.0.0 + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 @@ -7302,6 +7863,8 @@ snapshots: array-union@2.1.0: {} + assertion-error@2.0.1: {} + async@3.2.5: {} asynckit@0.4.0: {} @@ -7468,6 +8031,8 @@ snapshots: bytes@3.1.2: {} + cac@6.7.14: {} + call-bind-apply-helpers@1.0.2: dependencies: es-errors: 1.3.0 @@ -7488,6 +8053,14 @@ snapshots: caniuse-lite@1.0.30001642: {} + chai@5.2.0: + dependencies: + assertion-error: 2.0.1 + check-error: 2.1.1 + deep-eql: 5.0.2 + loupe: 3.1.3 + pathval: 2.0.0 + chalk@2.4.2: dependencies: ansi-styles: 3.2.1 @@ -7507,6 +8080,8 @@ snapshots: charenc@0.0.2: {} + check-error@2.1.1: {} + chokidar@3.6.0: dependencies: anymatch: 3.1.3 @@ -7623,6 +8198,8 @@ snapshots: dedent@1.5.3: {} + deep-eql@5.0.2: {} + deep-is@0.1.4: {} deepmerge@4.3.1: {} @@ -7704,6 +8281,8 @@ snapshots: es-errors@1.3.0: {} + es-module-lexer@1.7.0: {} + es-object-atoms@1.1.1: dependencies: es-errors: 1.3.0 @@ -7823,6 +8402,10 @@ snapshots: estraverse@5.3.0: {} + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.7 + esutils@2.0.3: {} etag@1.8.1: {} @@ -7853,6 +8436,8 @@ snapshots: exit@0.1.2: {} + expect-type@1.2.1: {} + expect@29.7.0: dependencies: '@jest/expect-utils': 29.7.0 @@ -7941,6 +8526,10 @@ snapshots: dependencies: bser: 2.1.1 + fdir@6.4.4(picomatch@4.0.2): + optionalDependencies: + picomatch: 4.0.2 + file-entry-cache@6.0.1: dependencies: flat-cache: 3.2.0 @@ -8927,12 +9516,18 @@ snapshots: long@5.2.3: {} + loupe@3.1.3: {} + lru-cache@10.4.3: {} lru-cache@5.1.1: dependencies: yallist: 3.1.1 + magic-string@0.30.17: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.0 + make-dir@4.0.0: dependencies: semver: 7.7.1 @@ -9274,12 +9869,18 @@ snapshots: path-type@4.0.0: {} + pathe@2.0.3: {} + + pathval@2.0.0: {} + picocolors@1.0.1: {} picocolors@1.1.1: {} picomatch@2.3.1: {} + picomatch@4.0.2: {} + pify@4.0.1: {} pino-abstract-transport@2.0.0: @@ -9314,6 +9915,12 @@ snapshots: dependencies: queue-lit: 1.5.2 + postcss@8.5.3: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + prelude-ls@1.2.1: {} prettier@2.8.8: {} @@ -9472,6 +10079,32 @@ snapshots: dependencies: glob: 10.4.5 + rollup@4.41.0: + dependencies: + '@types/estree': 1.0.7 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.41.0 + '@rollup/rollup-android-arm64': 4.41.0 + '@rollup/rollup-darwin-arm64': 4.41.0 + '@rollup/rollup-darwin-x64': 4.41.0 + '@rollup/rollup-freebsd-arm64': 4.41.0 + '@rollup/rollup-freebsd-x64': 4.41.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.41.0 + '@rollup/rollup-linux-arm-musleabihf': 4.41.0 + '@rollup/rollup-linux-arm64-gnu': 4.41.0 + '@rollup/rollup-linux-arm64-musl': 4.41.0 + '@rollup/rollup-linux-loongarch64-gnu': 4.41.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.41.0 + '@rollup/rollup-linux-riscv64-gnu': 4.41.0 + '@rollup/rollup-linux-riscv64-musl': 4.41.0 + '@rollup/rollup-linux-s390x-gnu': 4.41.0 + '@rollup/rollup-linux-x64-gnu': 4.41.0 + '@rollup/rollup-linux-x64-musl': 4.41.0 + '@rollup/rollup-win32-arm64-msvc': 4.41.0 + '@rollup/rollup-win32-ia32-msvc': 4.41.0 + '@rollup/rollup-win32-x64-msvc': 4.41.0 + fsevents: 2.3.3 + router@2.2.0: dependencies: debug: 4.4.0 @@ -9567,6 +10200,8 @@ snapshots: side-channel-map: 1.0.1 side-channel-weakmap: 1.0.2 + siginfo@2.0.0: {} + signal-exit@3.0.7: {} signal-exit@4.1.0: {} @@ -9579,6 +10214,8 @@ snapshots: dependencies: atomic-sleep: 1.0.0 + source-map-js@1.2.1: {} + source-map-support@0.5.13: dependencies: buffer-from: 1.1.2 @@ -9603,8 +10240,12 @@ snapshots: dependencies: escape-string-regexp: 2.0.0 + stackback@0.0.2: {} + statuses@2.0.1: {} + std-env@3.9.0: {} + string-collapse-leading-whitespace@7.0.9: {} string-comparison@1.3.0: {} @@ -9705,6 +10346,21 @@ snapshots: tiny-invariant@1.3.3: {} + tinybench@2.9.0: {} + + tinyexec@0.3.2: {} + + tinyglobby@0.2.13: + dependencies: + fdir: 6.4.4(picomatch@4.0.2) + picomatch: 4.0.2 + + tinypool@1.0.2: {} + + tinyrainbow@2.0.0: {} + + tinyspy@3.0.2: {} + tmp@0.0.33: dependencies: os-tmpdir: 1.0.2 @@ -9725,7 +10381,7 @@ snapshots: dependencies: typescript: 5.5.4 - ts-jest@29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0)(typescript@5.5.4): + ts-jest@29.2.4(@babel/core@7.24.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.9))(jest@29.7.0(@types/node@20.14.11)(ts-node@10.9.2(@types/node@20.14.11)(typescript@5.5.4)))(typescript@5.5.4): dependencies: bs-logger: 0.2.6 ejs: 3.1.10 @@ -9844,6 +10500,80 @@ snapshots: vary@1.1.2: {} + vite-node@3.1.4(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0): + dependencies: + cac: 6.7.14 + debug: 4.4.0 + es-module-lexer: 1.7.0 + pathe: 2.0.3 + vite: 6.3.5(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0) + transitivePeerDependencies: + - '@types/node' + - jiti + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + + vite@6.3.5(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0): + dependencies: + esbuild: 0.25.2 + fdir: 6.4.4(picomatch@4.0.2) + picomatch: 4.0.2 + postcss: 8.5.3 + rollup: 4.41.0 + tinyglobby: 0.2.13 + optionalDependencies: + '@types/node': 20.14.11 + fsevents: 2.3.3 + tsx: 4.19.3 + yaml: 2.5.0 + + vitest@3.1.4(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0): + dependencies: + '@vitest/expect': 3.1.4 + '@vitest/mocker': 3.1.4(vite@6.3.5(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0)) + '@vitest/pretty-format': 3.1.4 + '@vitest/runner': 3.1.4 + '@vitest/snapshot': 3.1.4 + '@vitest/spy': 3.1.4 + '@vitest/utils': 3.1.4 + chai: 5.2.0 + debug: 4.4.0 + expect-type: 1.2.1 + magic-string: 0.30.17 + pathe: 2.0.3 + std-env: 3.9.0 + tinybench: 2.9.0 + tinyexec: 0.3.2 + tinyglobby: 0.2.13 + tinypool: 1.0.2 + tinyrainbow: 2.0.0 + vite: 6.3.5(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0) + vite-node: 3.1.4(@types/node@20.14.11)(tsx@4.19.3)(yaml@2.5.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 20.14.11 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + walker@1.0.8: dependencies: makeerror: 1.0.12 @@ -9866,6 +10596,11 @@ snapshots: dependencies: isexe: 2.0.0 + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + wikipedia@2.1.2: dependencies: axios: 1.7.9