navigate(
- `${row.original.trace.traceId}?selectedSpanNodeId=${row.original.id}`
+ `${row.original.trace.traceId}?${SELECTED_SPAN_NODE_ID_PARAM}=${row.original.id}`
)
}
>
{row.getVisibleCells().map((cell) => {
+ const colSizeVar = `--col-${cell.column.id}-size`;
return (
{
+ if (!setTraceSequence) {
+ return;
+ }
+ setTraceSequence(
+ data.spans.edges.map(({ span }) => ({
+ traceId: span.trace.traceId,
+ spanId: span.id,
+ }))
+ );
+ return () => {
+ setTraceSequence([]);
+ };
+ }, [data.spans.edges, setTraceSequence]);
+
const annotationColumnVisibility = useTracingContext(
(state) => state.annotationColumnVisibility
);
@@ -254,29 +286,34 @@ export function SpansTable(props: SpansTableProps) {
header: name,
columns: [
{
- header: `label`,
- accessorKey: `${ANNOTATIONS_COLUMN_PREFIX}${ANNOTATIONS_KEY_SEPARATOR}label${ANNOTATIONS_KEY_SEPARATOR}${name}`,
+ header: `labels`,
+ accessorKey: makeAnnotationColumnId(name, "label"),
cell: ({ row }) => {
- const annotation = row.original.spanAnnotations.find(
+ const annotation = row.original.spanAnnotationSummaries.find(
(annotation) => annotation.name === name
);
if (!annotation) {
return null;
}
- return annotation.label;
+ return (
+
+ );
},
} as ColumnDef,
{
- header: `score`,
- accessorKey: `${ANNOTATIONS_COLUMN_PREFIX}${ANNOTATIONS_KEY_SEPARATOR}score${ANNOTATIONS_KEY_SEPARATOR}${name}`,
+ header: `mean score`,
+ accessorKey: makeAnnotationColumnId(name, "score"),
cell: ({ row }) => {
- const annotation = row.original.spanAnnotations.find(
+ const annotation = row.original.spanAnnotationSummaries.find(
(annotation) => annotation.name === name
);
if (!annotation) {
return null;
}
- return annotation.score;
+ return ;
},
} as ColumnDef,
],
@@ -306,24 +343,10 @@ export function SpansTable(props: SpansTableProps) {
cell: ({ row }) => {
return (
- {row.original.spanAnnotations.map((annotation) => {
- return (
-
- }
- >
-
-
- );
- })}
+
{row.original.documentRetrievalMetrics.map((retrievalMetric) => {
return (
<>
@@ -409,7 +432,7 @@ export function SpansTable(props: SpansTableProps) {
const span = row.original;
const { traceId } = span.trace;
return (
-
+
{getValue() as string}
);
@@ -646,6 +669,7 @@ export function SpansTable(props: SpansTableProps) {
{headerGroup.headers.map((header) => (
- {flexRender(
- header.column.columnDef.header,
- header.getContext()
- )}
+
+ {flexRender(
+ header.column.columnDef.header,
+ header.getContext()
+ )}
+
{header.column.getIsSorted() ? (
;
}) => {
const navigate = useNavigate();
+ const { traceId } = useParams();
return (
{table.getRowModel().rows.map((row) => {
+ const isSelected = row.original.trace.traceId === traceId;
return (
navigate(`${row.original.trace.traceId}`)}
data-is-additional-row={row.original.__additionalRow}
- css={trCSS}
+ data-selected={isSelected}
+ css={css(trCSS)}
>
{row.getVisibleCells().map((cell) => {
+ const colSizeVar = `--col-${cell.column.id}-size`;
return (
{
- const data = row.original;
- const annotation = data.spanAnnotations.find(
- (annotation) => annotation.name === name
- );
+ const annotation = (
+ row.original
+ .spanAnnotationSummaries as TracesTable_spans$data["rootSpans"]["edges"][number]["rootSpan"]["spanAnnotationSummaries"]
+ )?.find((annotation) => annotation.name === name);
if (!annotation) {
return null;
}
- return annotation.label;
+ return (
+
+ );
},
} as ColumnDef,
{
- header: `score`,
- accessorKey: `${ANNOTATIONS_COLUMN_PREFIX}${ANNOTATIONS_KEY_SEPARATOR}score${ANNOTATIONS_KEY_SEPARATOR}${name}`,
+ header: `mean score`,
+ accessorKey: makeAnnotationColumnId(name, "score"),
cell: ({ row }) => {
- const annotation = row.original.spanAnnotations.find(
- (annotation) => annotation.name === name
- );
+ const annotation = (
+ row.original
+ .spanAnnotationSummaries as TracesTable_spans$data["rootSpans"]["edges"][number]["rootSpan"]["spanAnnotationSummaries"]
+ )?.find((annotation) => annotation.name === name);
if (!annotation) {
return null;
}
- return annotation.score;
+ return (
+
+ );
},
} as ColumnDef,
],
@@ -415,24 +436,10 @@ export function TracesTable(props: TracesTableProps) {
row.original.documentRetrievalMetrics.length === 0;
return (
- {row.original.spanAnnotations.map((annotation) => {
- return (
-
- }
- >
-
-
- );
- })}
+
{row.original.documentRetrievalMetrics.map((retrievalMetric) => {
return (
@@ -578,7 +585,7 @@ export function TracesTable(props: TracesTableProps) {
: row.original.id;
return (
{getValue() as string}
@@ -695,6 +702,24 @@ export function TracesTable(props: TracesTableProps) {
},
[hasNext, isLoadingNext, loadNext]
);
+
+ const pagination = useTracePagination();
+ const setTraceSequence = pagination?.setTraceSequence;
+ useEffect(() => {
+ if (!setTraceSequence) {
+ return;
+ }
+ setTraceSequence(
+ data.rootSpans.edges.map(({ rootSpan }) => ({
+ traceId: rootSpan.trace.traceId,
+ spanId: rootSpan.id,
+ }))
+ );
+ return () => {
+ setTraceSequence([]);
+ };
+ }, [data.rootSpans.edges, setTraceSequence]);
+
const [expanded, setExpanded] = useState({});
const columnVisibility = useTracingContext((state) => state.columnVisibility);
const setColumnSizing = useTracingContext((state) => state.setColumnSizing);
@@ -801,6 +826,7 @@ export function TracesTable(props: TracesTableProps) {
style={{
width: `calc(var(--header-${header.id}-size) * 1px)`,
}}
+ colSpan={header.colSpan}
key={header.id}
>
{header.isPlaceholder ? null : (
@@ -818,10 +844,12 @@ export function TracesTable(props: TracesTableProps) {
},
}}
>
- {flexRender(
- header.column.columnDef.header,
- header.getContext()
- )}
+
+ {flexRender(
+ header.column.columnDef.header,
+ header.getContext()
+ )}
+
{header.column.getIsSorted() ? (
>
+ * @generated SignedSource<<57eb739471039bb86a540f8d4a005454>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -63,7 +63,42 @@ v4 = [
"name": "timeRange",
"variableName": "timeRange"
}
-];
+],
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v9 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+};
return {
"fragment": {
"argumentDefinitions": [
@@ -113,27 +148,101 @@ return {
"name": "node",
"plural": false,
"selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "__typename",
- "storageKey": null
- },
+ (v5/*: any*/),
{
"kind": "TypeDiscriminator",
"abstractKey": "__isNode"
},
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "id",
- "storageKey": null
- },
+ (v6/*: any*/),
{
"kind": "InlineFragment",
"selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ (v8/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v9/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
{
"alias": null,
"args": (v4/*: any*/),
@@ -142,6 +251,7 @@ return {
"name": "spanAnnotationSummary",
"plural": false,
"selections": [
+ (v8/*: any*/),
{
"alias": null,
"args": null,
@@ -150,13 +260,7 @@ return {
"name": "labelFractions",
"plural": true,
"selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
+ (v9/*: any*/),
{
"alias": null,
"args": null,
@@ -187,12 +291,12 @@ return {
]
},
"params": {
- "cacheID": "88dfad5d25db7e03286a789f3ba37c75",
+ "cacheID": "1309abd7d655ab71ac00caaf47a59d44",
"id": null,
"metadata": {},
"name": "AnnotationSummaryQuery",
"operationKind": "query",
- "text": "query AnnotationSummaryQuery(\n $id: GlobalID!\n $annotationName: String!\n $timeRange: TimeRange!\n) {\n project: node(id: $id) {\n __typename\n ...AnnotationSummaryValueFragment_4BTVrq\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryValueFragment_4BTVrq on Project {\n spanAnnotationSummary(annotationName: $annotationName, timeRange: $timeRange) {\n labelFractions {\n label\n fraction\n }\n meanScore\n }\n id\n}\n"
+ "text": "query AnnotationSummaryQuery(\n $id: GlobalID!\n $annotationName: String!\n $timeRange: TimeRange!\n) {\n project: node(id: $id) {\n __typename\n ...AnnotationSummaryValueFragment_4BTVrq\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryValueFragment_4BTVrq on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n annotationType\n id\n optimizationDirection\n name\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n spanAnnotationSummary(annotationName: $annotationName, timeRange: $timeRange) {\n name\n labelFractions {\n label\n fraction\n }\n meanScore\n }\n id\n}\n"
}
};
})();
diff --git a/app/src/pages/project/__generated__/AnnotationSummaryValueFragment.graphql.ts b/app/src/pages/project/__generated__/AnnotationSummaryValueFragment.graphql.ts
index fe3b6ee892..d257623ed7 100644
--- a/app/src/pages/project/__generated__/AnnotationSummaryValueFragment.graphql.ts
+++ b/app/src/pages/project/__generated__/AnnotationSummaryValueFragment.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<>
+ * @generated SignedSource<<1f85ce67fb66c9e1966df963bd6a0427>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,8 +9,24 @@
// @ts-nocheck
import { ReaderFragment } from 'relay-runtime';
+export type AnnotationType = "CATEGORICAL" | "CONTINUOUS" | "FREEFORM";
+export type OptimizationDirection = "MAXIMIZE" | "MINIMIZE" | "NONE";
import { FragmentRefs } from "relay-runtime";
export type AnnotationSummaryValueFragment$data = {
+ readonly annotationConfigs: {
+ readonly edges: ReadonlyArray<{
+ readonly node: {
+ readonly annotationType?: AnnotationType;
+ readonly id?: string;
+ readonly name?: string;
+ readonly optimizationDirection?: OptimizationDirection;
+ readonly values?: ReadonlyArray<{
+ readonly label: string;
+ readonly score: number | null;
+ }>;
+ };
+ }>;
+ };
readonly id: string;
readonly spanAnnotationSummary: {
readonly labelFractions: ReadonlyArray<{
@@ -18,6 +34,7 @@ export type AnnotationSummaryValueFragment$data = {
readonly label: string;
}>;
readonly meanScore: number | null;
+ readonly name: string;
} | null;
readonly " $fragmentType": "AnnotationSummaryValueFragment";
};
@@ -28,7 +45,36 @@ export type AnnotationSummaryValueFragment$key = {
import AnnotationSummaryValueQuery_graphql from './AnnotationSummaryValueQuery.graphql';
-const node: ReaderFragment = {
+const node: ReaderFragment = (function(){
+var v0 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+};
+return {
"argumentDefinitions": [
{
"defaultValue": null,
@@ -57,6 +103,83 @@ const node: ReaderFragment = {
},
"name": "AnnotationSummaryValueFragment",
"selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ (v2/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v3/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
{
"alias": null,
"args": [
@@ -76,6 +199,7 @@ const node: ReaderFragment = {
"name": "spanAnnotationSummary",
"plural": false,
"selections": [
+ (v2/*: any*/),
{
"alias": null,
"args": null,
@@ -84,13 +208,7 @@ const node: ReaderFragment = {
"name": "labelFractions",
"plural": true,
"selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
+ (v3/*: any*/),
{
"alias": null,
"args": null,
@@ -111,18 +229,13 @@ const node: ReaderFragment = {
],
"storageKey": null
},
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "id",
- "storageKey": null
- }
+ (v1/*: any*/)
],
"type": "Project",
"abstractKey": null
};
+})();
-(node as any).hash = "0a8e190e4d87acc87c161366f330e3af";
+(node as any).hash = "5d114f6c3ea85467bd6c915907c07d87";
export default node;
diff --git a/app/src/pages/project/__generated__/AnnotationSummaryValueQuery.graphql.ts b/app/src/pages/project/__generated__/AnnotationSummaryValueQuery.graphql.ts
index ce56a6f284..14810c5dd6 100644
--- a/app/src/pages/project/__generated__/AnnotationSummaryValueQuery.graphql.ts
+++ b/app/src/pages/project/__generated__/AnnotationSummaryValueQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<0c1bfb21a9f6ddd23f9a7b2b391b6873>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -63,7 +63,42 @@ v4 = [
"name": "timeRange",
"variableName": "timeRange"
}
-];
+],
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v9 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+};
return {
"fragment": {
"argumentDefinitions": [
@@ -113,27 +148,101 @@ return {
"name": "node",
"plural": false,
"selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "__typename",
- "storageKey": null
- },
+ (v5/*: any*/),
{
"kind": "TypeDiscriminator",
"abstractKey": "__isNode"
},
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "id",
- "storageKey": null
- },
+ (v6/*: any*/),
{
"kind": "InlineFragment",
"selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ (v8/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v9/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
{
"alias": null,
"args": (v4/*: any*/),
@@ -142,6 +251,7 @@ return {
"name": "spanAnnotationSummary",
"plural": false,
"selections": [
+ (v8/*: any*/),
{
"alias": null,
"args": null,
@@ -150,13 +260,7 @@ return {
"name": "labelFractions",
"plural": true,
"selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
+ (v9/*: any*/),
{
"alias": null,
"args": null,
@@ -187,16 +291,16 @@ return {
]
},
"params": {
- "cacheID": "bae90b7000aa37d37050070d8504b41c",
+ "cacheID": "2c3626e6f163549362727204bc50686b",
"id": null,
"metadata": {},
"name": "AnnotationSummaryValueQuery",
"operationKind": "query",
- "text": "query AnnotationSummaryValueQuery(\n $annotationName: String!\n $timeRange: TimeRange!\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...AnnotationSummaryValueFragment_4BTVrq\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryValueFragment_4BTVrq on Project {\n spanAnnotationSummary(annotationName: $annotationName, timeRange: $timeRange) {\n labelFractions {\n label\n fraction\n }\n meanScore\n }\n id\n}\n"
+ "text": "query AnnotationSummaryValueQuery(\n $annotationName: String!\n $timeRange: TimeRange!\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...AnnotationSummaryValueFragment_4BTVrq\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryValueFragment_4BTVrq on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n annotationType\n id\n optimizationDirection\n name\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n spanAnnotationSummary(annotationName: $annotationName, timeRange: $timeRange) {\n name\n labelFractions {\n label\n fraction\n }\n meanScore\n }\n id\n}\n"
}
};
})();
-(node as any).hash = "0a8e190e4d87acc87c161366f330e3af";
+(node as any).hash = "5d114f6c3ea85467bd6c915907c07d87";
export default node;
diff --git a/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation.graphql.ts b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation.graphql.ts
new file mode 100644
index 0000000000..4ba23cf311
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation.graphql.ts
@@ -0,0 +1,215 @@
+/**
+ * @generated SignedSource<<9f4ee12212f3b8c6d6e2164536ddb4a9>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation$variables = {
+ annotationConfigId: string;
+ projectId: string;
+};
+export type ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation$data = {
+ readonly addAnnotationConfigToProject: {
+ readonly project: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectAnnotationConfigCardContent_project_annotations">;
+ };
+ };
+};
+export type ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation = {
+ response: ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation$data;
+ variables: ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "annotationConfigId"
+},
+v1 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "projectId"
+},
+v2 = [
+ {
+ "fields": [
+ {
+ "kind": "Variable",
+ "name": "annotationConfigId",
+ "variableName": "annotationConfigId"
+ },
+ {
+ "kind": "Variable",
+ "name": "projectId",
+ "variableName": "projectId"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "input"
+ }
+],
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": [
+ (v0/*: any*/),
+ (v1/*: any*/)
+ ],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "AddAnnotationConfigToProjectPayload",
+ "kind": "LinkedField",
+ "name": "addAnnotationConfigToProject",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectAnnotationConfigCardContent_project_annotations"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": [
+ (v1/*: any*/),
+ (v0/*: any*/)
+ ],
+ "kind": "Operation",
+ "name": "ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "AddAnnotationConfigToProjectPayload",
+ "kind": "LinkedField",
+ "name": "addAnnotationConfigToProject",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "2058f72875702238cf08b43a463f5d41",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation",
+ "operationKind": "mutation",
+ "text": "mutation ProjectAnnotationConfigCardContentAddAnnotationConfigToProjectMutation(\n $projectId: GlobalID!\n $annotationConfigId: GlobalID!\n) {\n addAnnotationConfigToProject(input: {projectId: $projectId, annotationConfigId: $annotationConfigId}) {\n project {\n ...ProjectAnnotationConfigCardContent_project_annotations\n }\n }\n}\n\nfragment ProjectAnnotationConfigCardContent_project_annotations on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n id\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "c366a73bfa60a9621a61a1c7eebf0271";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentProjectAnnotationsQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentProjectAnnotationsQuery.graphql.ts
new file mode 100644
index 0000000000..a86c2bea1e
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentProjectAnnotationsQuery.graphql.ts
@@ -0,0 +1,183 @@
+/**
+ * @generated SignedSource<<61a77afc2c44d5c28e070a8b90857b88>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectAnnotationConfigCardContentProjectAnnotationsQuery$variables = {
+ id: string;
+};
+export type ProjectAnnotationConfigCardContentProjectAnnotationsQuery$data = {
+ readonly node: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectAnnotationConfigCardContent_project_annotations">;
+ };
+};
+export type ProjectAnnotationConfigCardContentProjectAnnotationsQuery = {
+ response: ProjectAnnotationConfigCardContentProjectAnnotationsQuery$data;
+ variables: ProjectAnnotationConfigCardContentProjectAnnotationsQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "id"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "id"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectAnnotationConfigCardContentProjectAnnotationsQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectAnnotationConfigCardContent_project_annotations"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "ProjectAnnotationConfigCardContentProjectAnnotationsQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v3/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "0430655c8d0f76f4f7e080bca9e31315",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectAnnotationConfigCardContentProjectAnnotationsQuery",
+ "operationKind": "query",
+ "text": "query ProjectAnnotationConfigCardContentProjectAnnotationsQuery(\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...ProjectAnnotationConfigCardContent_project_annotations\n __isNode: __typename\n id\n }\n}\n\nfragment ProjectAnnotationConfigCardContent_project_annotations on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n id\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "f7049daf44d5f6a2e085c04cc187e86e";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentQuery.graphql.ts
new file mode 100644
index 0000000000..c8cbfba85c
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentQuery.graphql.ts
@@ -0,0 +1,288 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type AnnotationType = "CATEGORICAL" | "CONTINUOUS" | "FREEFORM";
+export type ProjectAnnotationConfigCardContentQuery$variables = {
+ projectId: string;
+};
+export type ProjectAnnotationConfigCardContentQuery$data = {
+ readonly allAnnotationConfigs: {
+ readonly edges: ReadonlyArray<{
+ readonly node: {
+ readonly annotationType?: AnnotationType;
+ readonly id?: string;
+ readonly name?: string;
+ };
+ }>;
+ };
+ readonly project: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectAnnotationConfigCardContent_project_annotations">;
+ };
+};
+export type ProjectAnnotationConfigCardContentQuery = {
+ response: ProjectAnnotationConfigCardContentQuery$data;
+ variables: ProjectAnnotationConfigCardContentQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "projectId"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "projectId"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v3 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectAnnotationConfigCardContentQuery",
+ "selections": [
+ {
+ "alias": "project",
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectAnnotationConfigCardContent_project_annotations"
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": "allAnnotationConfigs",
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v5/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "ProjectAnnotationConfigCardContentQuery",
+ "selections": [
+ {
+ "alias": "project",
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": "allAnnotationConfigs",
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ (v3/*: any*/),
+ (v5/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "0c6ef22f77939481932f07f0e45a3154",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectAnnotationConfigCardContentQuery",
+ "operationKind": "query",
+ "text": "query ProjectAnnotationConfigCardContentQuery(\n $projectId: GlobalID!\n) {\n project: node(id: $projectId) {\n __typename\n ... on Project {\n ...ProjectAnnotationConfigCardContent_project_annotations\n }\n __isNode: __typename\n id\n }\n allAnnotationConfigs: annotationConfigs {\n edges {\n node {\n __typename\n ... on Node {\n __isNode: __typename\n id\n }\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n annotationType\n }\n }\n }\n }\n}\n\nfragment ProjectAnnotationConfigCardContent_project_annotations on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n id\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "ed210ab30e8d40d669d27c7f83ada5b8";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation.graphql.ts b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation.graphql.ts
new file mode 100644
index 0000000000..0eb9cacb2d
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation.graphql.ts
@@ -0,0 +1,215 @@
+/**
+ * @generated SignedSource<<8776cd35fbc8a1af057dffe77f290c4f>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation$variables = {
+ annotationConfigId: string;
+ projectId: string;
+};
+export type ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation$data = {
+ readonly removeAnnotationConfigFromProject: {
+ readonly project: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectAnnotationConfigCardContent_project_annotations">;
+ };
+ };
+};
+export type ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation = {
+ response: ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation$data;
+ variables: ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "annotationConfigId"
+},
+v1 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "projectId"
+},
+v2 = [
+ {
+ "fields": [
+ {
+ "kind": "Variable",
+ "name": "annotationConfigId",
+ "variableName": "annotationConfigId"
+ },
+ {
+ "kind": "Variable",
+ "name": "projectId",
+ "variableName": "projectId"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "input"
+ }
+],
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": [
+ (v0/*: any*/),
+ (v1/*: any*/)
+ ],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "RemoveAnnotationConfigFromProjectPayload",
+ "kind": "LinkedField",
+ "name": "removeAnnotationConfigFromProject",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectAnnotationConfigCardContent_project_annotations"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": [
+ (v1/*: any*/),
+ (v0/*: any*/)
+ ],
+ "kind": "Operation",
+ "name": "ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "RemoveAnnotationConfigFromProjectPayload",
+ "kind": "LinkedField",
+ "name": "removeAnnotationConfigFromProject",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "ead28b4d3393d0091aee72f30bcb3550",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation",
+ "operationKind": "mutation",
+ "text": "mutation ProjectAnnotationConfigCardContentRemoveAnnotationConfigFromProjectMutation(\n $projectId: GlobalID!\n $annotationConfigId: GlobalID!\n) {\n removeAnnotationConfigFromProject(input: {projectId: $projectId, annotationConfigId: $annotationConfigId}) {\n project {\n ...ProjectAnnotationConfigCardContent_project_annotations\n }\n }\n}\n\nfragment ProjectAnnotationConfigCardContent_project_annotations on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n id\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "0ec2dc54a9750a2f8f31bece26a75b7f";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContent_project_annotations.graphql.ts b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContent_project_annotations.graphql.ts
new file mode 100644
index 0000000000..87636a9dd7
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectAnnotationConfigCardContent_project_annotations.graphql.ts
@@ -0,0 +1,110 @@
+/**
+ * @generated SignedSource<<93264ea748ae73c84ac6b50cd12c6eb7>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectAnnotationConfigCardContent_project_annotations$data = {
+ readonly annotationConfigs: {
+ readonly edges: ReadonlyArray<{
+ readonly node: {
+ readonly name?: string;
+ };
+ }>;
+ };
+ readonly id: string;
+ readonly " $fragmentType": "ProjectAnnotationConfigCardContent_project_annotations";
+};
+export type ProjectAnnotationConfigCardContent_project_annotations$key = {
+ readonly " $data"?: ProjectAnnotationConfigCardContent_project_annotations$data;
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectAnnotationConfigCardContent_project_annotations">;
+};
+
+import ProjectAnnotationConfigCardContentProjectAnnotationsQuery_graphql from './ProjectAnnotationConfigCardContentProjectAnnotationsQuery.graphql';
+
+const node: ReaderFragment = {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": {
+ "refetch": {
+ "connection": null,
+ "fragmentPathInResult": [
+ "node"
+ ],
+ "operation": ProjectAnnotationConfigCardContentProjectAnnotationsQuery_graphql,
+ "identifierInfo": {
+ "identifierField": "id",
+ "identifierQueryVariableName": "id"
+ }
+ }
+ },
+ "name": "ProjectAnnotationConfigCardContent_project_annotations",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+};
+
+(node as any).hash = "f7049daf44d5f6a2e085c04cc187e86e";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectConfigPageProjectRetentionPolicyCardQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectConfigPageProjectRetentionPolicyCardQuery.graphql.ts
new file mode 100644
index 0000000000..d47799d179
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectConfigPageProjectRetentionPolicyCardQuery.graphql.ts
@@ -0,0 +1,206 @@
+/**
+ * @generated SignedSource<<104fcd9d6a5ce09881a6124599ef673e>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectConfigPageProjectRetentionPolicyCardQuery$variables = {
+ id: string;
+};
+export type ProjectConfigPageProjectRetentionPolicyCardQuery$data = {
+ readonly node: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectConfigPage_projectRetentionPolicyCard">;
+ };
+};
+export type ProjectConfigPageProjectRetentionPolicyCardQuery = {
+ response: ProjectConfigPageProjectRetentionPolicyCardQuery$data;
+ variables: ProjectConfigPageProjectRetentionPolicyCardQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "id"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "id"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectConfigPageProjectRetentionPolicyCardQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectConfigPage_projectRetentionPolicyCard"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "ProjectConfigPageProjectRetentionPolicyCardQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v3/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "traceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/),
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "49b369ab7ad84cf211bbecaf35401515",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectConfigPageProjectRetentionPolicyCardQuery",
+ "operationKind": "query",
+ "text": "query ProjectConfigPageProjectRetentionPolicyCardQuery(\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...ProjectConfigPage_projectRetentionPolicyCard\n __isNode: __typename\n id\n }\n}\n\nfragment ProjectConfigPage_projectRetentionPolicyCard on Project {\n id\n name\n traceRetentionPolicy {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "be41839ade0a17a6d549249abad5d6ab";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectConfigPage_projectRetentionPolicyCard.graphql.ts b/app/src/pages/project/__generated__/ProjectConfigPage_projectRetentionPolicyCard.graphql.ts
new file mode 100644
index 0000000000..0d165bb9f4
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectConfigPage_projectRetentionPolicyCard.graphql.ts
@@ -0,0 +1,147 @@
+/**
+ * @generated SignedSource<<7defc23224a16332de310129565e111e>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectConfigPage_projectRetentionPolicyCard$data = {
+ readonly id: string;
+ readonly name: string;
+ readonly traceRetentionPolicy: {
+ readonly cronExpression: string;
+ readonly id: string;
+ readonly name: string;
+ readonly rule: {
+ readonly maxCount?: number;
+ readonly maxDays?: number;
+ };
+ };
+ readonly " $fragmentType": "ProjectConfigPage_projectRetentionPolicyCard";
+};
+export type ProjectConfigPage_projectRetentionPolicyCard$key = {
+ readonly " $data"?: ProjectConfigPage_projectRetentionPolicyCard$data;
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectConfigPage_projectRetentionPolicyCard">;
+};
+
+import ProjectConfigPageProjectRetentionPolicyCardQuery_graphql from './ProjectConfigPageProjectRetentionPolicyCardQuery.graphql';
+
+const node: ReaderFragment = (function(){
+var v0 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+};
+return {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": {
+ "refetch": {
+ "connection": null,
+ "fragmentPathInResult": [
+ "node"
+ ],
+ "operation": ProjectConfigPageProjectRetentionPolicyCardQuery_graphql,
+ "identifierInfo": {
+ "identifierField": "id",
+ "identifierQueryVariableName": "id"
+ }
+ }
+ },
+ "name": "ProjectConfigPage_projectRetentionPolicyCard",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "traceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v3/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+};
+})();
+
+(node as any).hash = "be41839ade0a17a6d549249abad5d6ab";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectPageQueriesProjectConfigQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectPageQueriesProjectConfigQuery.graphql.ts
index 1105a4d413..df06730bd4 100644
--- a/app/src/pages/project/__generated__/ProjectPageQueriesProjectConfigQuery.graphql.ts
+++ b/app/src/pages/project/__generated__/ProjectPageQueriesProjectConfigQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<7a440cbc319fc3ad5ca2a7acdcefa9a9>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -15,8 +15,10 @@ export type ProjectPageQueriesProjectConfigQuery$variables = {
};
export type ProjectPageQueriesProjectConfigQuery$data = {
readonly project: {
- readonly " $fragmentSpreads": FragmentRefs<"ProjectConfigPage_projectConfigCard">;
+ readonly id: string;
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectConfigPage_projectConfigCard" | "ProjectRetentionPolicyCard_policy">;
};
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectRetentionPolicyCard_query">;
};
export type ProjectPageQueriesProjectConfigQuery = {
response: ProjectPageQueriesProjectConfigQuery$data;
@@ -37,7 +39,74 @@ v1 = [
"name": "id",
"variableName": "id"
}
-];
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v7 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v9 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v8/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+},
+v10 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/),
+ (v8/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+};
return {
"fragment": {
"argumentDefinitions": (v0/*: any*/),
@@ -53,13 +122,24 @@ return {
"name": "node",
"plural": false,
"selections": [
+ (v2/*: any*/),
{
"args": null,
"kind": "FragmentSpread",
"name": "ProjectConfigPage_projectConfigCard"
+ },
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectRetentionPolicyCard_policy"
}
],
"storageKey": null
+ },
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectRetentionPolicyCard_query"
}
],
"type": "Query",
@@ -79,46 +159,53 @@ return {
"name": "node",
"plural": false,
"selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "__typename",
- "storageKey": null
- },
- {
- "kind": "TypeDiscriminator",
- "abstractKey": "__isNode"
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "id",
- "storageKey": null
- },
+ (v3/*: any*/),
+ (v2/*: any*/),
{
"kind": "InlineFragment",
"selections": [
+ (v4/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "name",
+ "name": "gradientStartColor",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "gradientStartColor",
+ "name": "gradientEndColor",
"storageKey": null
},
{
"alias": null,
"args": null,
- "kind": "ScalarField",
- "name": "gradientEndColor",
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "traceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v7/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
"storageKey": null
}
],
@@ -127,20 +214,71 @@ return {
}
],
"storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "projectTraceRetentionPolicies",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v9/*: any*/),
+ (v7/*: any*/),
+ (v10/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
}
]
},
"params": {
- "cacheID": "92a104050258aeb9404066cf783b1888",
+ "cacheID": "4ae7b5450bfc4c428bd6560495cf1f01",
"id": null,
"metadata": {},
"name": "ProjectPageQueriesProjectConfigQuery",
"operationKind": "query",
- "text": "query ProjectPageQueriesProjectConfigQuery(\n $id: GlobalID!\n) {\n project: node(id: $id) {\n __typename\n ...ProjectConfigPage_projectConfigCard\n __isNode: __typename\n id\n }\n}\n\nfragment ProjectConfigPage_projectConfigCard on Project {\n id\n name\n gradientStartColor\n gradientEndColor\n}\n"
+ "text": "query ProjectPageQueriesProjectConfigQuery(\n $id: GlobalID!\n) {\n project: node(id: $id) {\n __typename\n id\n ...ProjectConfigPage_projectConfigCard\n ...ProjectRetentionPolicyCard_policy\n }\n ...ProjectRetentionPolicyCard_query\n}\n\nfragment ProjectConfigPage_projectConfigCard on Project {\n id\n name\n gradientStartColor\n gradientEndColor\n}\n\nfragment ProjectRetentionPolicyCard_policy on Project {\n id\n name\n traceRetentionPolicy {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n }\n}\n\nfragment ProjectRetentionPolicyCard_query on Query {\n ...ProjectTraceRetentionPolicySelectFragment\n}\n\nfragment ProjectTraceRetentionPolicySelectFragment on Query {\n projectTraceRetentionPolicies {\n edges {\n node {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n }\n }\n }\n}\n"
}
};
})();
-(node as any).hash = "fe300a92a7895152ba48a9cf2ed207ee";
+(node as any).hash = "b1036aea3db1dd0348858521f91637ef";
export default node;
diff --git a/app/src/pages/project/__generated__/ProjectPageQueriesSpansQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectPageQueriesSpansQuery.graphql.ts
index 204d41f0f8..e2aa92e925 100644
--- a/app/src/pages/project/__generated__/ProjectPageQueriesSpansQuery.graphql.ts
+++ b/app/src/pages/project/__generated__/ProjectPageQueriesSpansQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<53d6af2dbb8876539b9018fb27d405c8>>
+ * @generated SignedSource<<98e1b88dd7f2ed309db46a86f8df8e14>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -111,7 +111,21 @@ v8 = [
"name": "truncatedValue",
"storageKey": null
}
-];
+],
+v9 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v10 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+};
return {
"fragment": {
"argumentDefinitions": [
@@ -335,31 +349,90 @@ return {
"name": "spanAnnotations",
"plural": true,
"selections": [
+ (v5/*: any*/),
(v6/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "label",
+ "name": "annotatorKind",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "score",
+ "name": "createdAt",
"storageKey": null
},
{
"alias": null,
"args": null,
- "kind": "ScalarField",
- "name": "annotatorKind",
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
"storageKey": null
}
],
"storageKey": null
},
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v9/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v6/*: any*/)
+ ],
+ "storageKey": null
+ },
{
"alias": null,
"args": null,
@@ -398,6 +471,103 @@ return {
}
],
"storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/),
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v9/*: any*/),
+ (v10/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
}
],
"storageKey": null
@@ -477,12 +647,12 @@ return {
]
},
"params": {
- "cacheID": "05f5f4fe0a7acdcdf65c5f56c7eb693a",
+ "cacheID": "15140ce84c0e78f00122eef0e92f7aed",
"id": null,
"metadata": {},
"name": "ProjectPageQueriesSpansQuery",
"operationKind": "query",
- "text": "query ProjectPageQueriesSpansQuery(\n $id: GlobalID!\n $timeRange: TimeRange!\n $orphanSpanAsRootSpan: Boolean!\n) {\n project: node(id: $id) {\n __typename\n ...SpansTable_spans\n __isNode: __typename\n id\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment SpansTable_spans on Project {\n name\n ...SpanColumnSelector_annotations\n spans(first: 30, sort: {col: startTime, dir: desc}, rootSpansOnly: true, orphanSpanAsRootSpan: $orphanSpanAsRootSpan, timeRange: $timeRange) {\n edges {\n span: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n spanId\n trace {\n id\n traceId\n }\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanAnnotations {\n name\n label\n score\n annotatorKind\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
+ "text": "query ProjectPageQueriesSpansQuery(\n $id: GlobalID!\n $timeRange: TimeRange!\n $orphanSpanAsRootSpan: Boolean!\n) {\n project: node(id: $id) {\n __typename\n ...SpansTable_spans\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment SpansTable_spans on Project {\n name\n ...SpanColumnSelector_annotations\n spans(first: 30, sort: {col: startTime, dir: desc}, rootSpansOnly: true, orphanSpanAsRootSpan: $orphanSpanAsRootSpan, timeRange: $timeRange) {\n edges {\n span: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n spanId\n trace {\n id\n traceId\n }\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n ...AnnotationSummaryGroup\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
}
};
})();
diff --git a/app/src/pages/project/__generated__/ProjectPageQueriesTracesQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectPageQueriesTracesQuery.graphql.ts
index 9349c1a1cb..d39595d8ca 100644
--- a/app/src/pages/project/__generated__/ProjectPageQueriesTracesQuery.graphql.ts
+++ b/app/src/pages/project/__generated__/ProjectPageQueriesTracesQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<3e656eab138674e745760a35499e72a3>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -166,6 +166,20 @@ v14 = {
"storageKey": null
},
v15 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v16 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+},
+v17 = {
"alias": null,
"args": null,
"concreteType": "SpanAnnotation",
@@ -175,31 +189,186 @@ v15 = {
"selections": [
(v3/*: any*/),
(v4/*: any*/),
+ (v15/*: any*/),
+ (v16/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "label",
+ "name": "annotatorKind",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "score",
+ "name": "createdAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+},
+v18 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v15/*: any*/)
+ ],
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "annotatorKind",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v4/*: any*/)
+ ],
+ "storageKey": null
+},
+v19 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v15/*: any*/),
+ (v16/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
"storageKey": null
}
],
"storageKey": null
},
-v16 = {
+v20 = {
"alias": null,
"args": null,
"concreteType": "DocumentRetrievalMetrics",
@@ -384,8 +553,10 @@ return {
],
"storageKey": null
},
- (v15/*: any*/),
- (v16/*: any*/),
+ (v17/*: any*/),
+ (v18/*: any*/),
+ (v19/*: any*/),
+ (v20/*: any*/),
{
"alias": null,
"args": [
@@ -466,8 +637,10 @@ return {
],
"storageKey": null
},
- (v15/*: any*/),
- (v16/*: any*/)
+ (v17/*: any*/),
+ (v19/*: any*/),
+ (v18/*: any*/),
+ (v20/*: any*/)
],
"storageKey": null
}
@@ -554,12 +727,12 @@ return {
]
},
"params": {
- "cacheID": "64916c7696b304920074bd5ab383b8a0",
+ "cacheID": "fc43b4929e794d4643add9f36f1c45fa",
"id": null,
"metadata": {},
"name": "ProjectPageQueriesTracesQuery",
"operationKind": "query",
- "text": "query ProjectPageQueriesTracesQuery(\n $id: GlobalID!\n $timeRange: TimeRange!\n) {\n project: node(id: $id) {\n __typename\n ...TracesTable_spans\n __isNode: __typename\n id\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment TracesTable_spans on Project {\n name\n ...SpanColumnSelector_annotations\n rootSpans: spans(first: 30, sort: {col: startTime, dir: desc}, rootSpansOnly: true, timeRange: $timeRange) {\n edges {\n rootSpan: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n parentId\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n numSpans\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n descendants(first: 50) {\n edges {\n node {\n id\n spanKind\n name\n statusCode: propagatedStatusCode\n startTime\n latencyMs\n parentId\n cumulativeTokenCountTotal: tokenCountTotal\n cumulativeTokenCountPrompt: tokenCountPrompt\n cumulativeTokenCountCompletion: tokenCountCompletion\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n }\n }\n }\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
+ "text": "query ProjectPageQueriesTracesQuery(\n $id: GlobalID!\n $timeRange: TimeRange!\n) {\n project: node(id: $id) {\n __typename\n ...TracesTable_spans\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment TraceHeaderRootSpanAnnotationsFragment on Span {\n ...AnnotationSummaryGroup\n}\n\nfragment TracesTable_spans on Project {\n name\n ...SpanColumnSelector_annotations\n rootSpans: spans(first: 30, sort: {col: startTime, dir: desc}, rootSpansOnly: true, timeRange: $timeRange) {\n edges {\n rootSpan: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n parentId\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n numSpans\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n ...AnnotationSummaryGroup\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n descendants(first: 50) {\n edges {\n node {\n id\n spanKind\n name\n statusCode: propagatedStatusCode\n startTime\n latencyMs\n parentId\n cumulativeTokenCountTotal: tokenCountTotal\n cumulativeTokenCountPrompt: tokenCountPrompt\n cumulativeTokenCountCompletion: tokenCountCompletion\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n }\n ...AnnotationSummaryGroup\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n ...TraceHeaderRootSpanAnnotationsFragment\n }\n }\n }\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
}
};
})();
diff --git a/app/src/pages/project/__generated__/ProjectRetentionPolicyCardQuery.graphql.ts b/app/src/pages/project/__generated__/ProjectRetentionPolicyCardQuery.graphql.ts
new file mode 100644
index 0000000000..b54c49d008
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectRetentionPolicyCardQuery.graphql.ts
@@ -0,0 +1,206 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectRetentionPolicyCardQuery$variables = {
+ id: string;
+};
+export type ProjectRetentionPolicyCardQuery$data = {
+ readonly node: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectRetentionPolicyCard_policy">;
+ };
+};
+export type ProjectRetentionPolicyCardQuery = {
+ response: ProjectRetentionPolicyCardQuery$data;
+ variables: ProjectRetentionPolicyCardQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "id"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "id"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectRetentionPolicyCardQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectRetentionPolicyCard_policy"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "ProjectRetentionPolicyCardQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v3/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "traceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/),
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "77c202af7295368d5d136811c06adc96",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectRetentionPolicyCardQuery",
+ "operationKind": "query",
+ "text": "query ProjectRetentionPolicyCardQuery(\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...ProjectRetentionPolicyCard_policy\n __isNode: __typename\n id\n }\n}\n\nfragment ProjectRetentionPolicyCard_policy on Project {\n id\n name\n traceRetentionPolicy {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "4fa3fab109a0fda0beab65fa1fcfb590";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation.graphql.ts b/app/src/pages/project/__generated__/ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation.graphql.ts
new file mode 100644
index 0000000000..263cec3b59
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation.graphql.ts
@@ -0,0 +1,295 @@
+/**
+ * @generated SignedSource<<885f696f3a3e2b81f989090d4afaf721>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation$variables = {
+ policyId: string;
+ projectId: string;
+};
+export type ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation$data = {
+ readonly patchProjectTraceRetentionPolicy: {
+ readonly query: {
+ readonly node: {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectRetentionPolicyCard_policy">;
+ };
+ };
+ };
+};
+export type ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation = {
+ response: ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation$data;
+ variables: ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "policyId"
+},
+v1 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "projectId"
+},
+v2 = [
+ {
+ "fields": [
+ {
+ "items": [
+ {
+ "kind": "Variable",
+ "name": "addProjects.0",
+ "variableName": "projectId"
+ }
+ ],
+ "kind": "ListValue",
+ "name": "addProjects"
+ },
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "policyId"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "input"
+ }
+],
+v3 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "projectId"
+ }
+],
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": [
+ (v0/*: any*/),
+ (v1/*: any*/)
+ ],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "patchProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": (v3/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectRetentionPolicyCard_policy"
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": [
+ (v1/*: any*/),
+ (v0/*: any*/)
+ ],
+ "kind": "Operation",
+ "name": "ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "patchProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": (v3/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v5/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "traceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v8/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v8/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "9cf8dd42d8ca1f6810e9e07a682d708e",
+ "id": null,
+ "metadata": {},
+ "name": "ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation",
+ "operationKind": "mutation",
+ "text": "mutation ProjectRetentionPolicyCardSetProjectRetentionPolicyMutation(\n $projectId: GlobalID!\n $policyId: GlobalID!\n) {\n patchProjectTraceRetentionPolicy(input: {id: $policyId, addProjects: [$projectId]}) {\n query {\n node(id: $projectId) {\n __typename\n ... on Project {\n ...ProjectRetentionPolicyCard_policy\n }\n __isNode: __typename\n id\n }\n }\n }\n}\n\nfragment ProjectRetentionPolicyCard_policy on Project {\n id\n name\n traceRetentionPolicy {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "c462ad8c60c830b4188a3d1f7f1cfa0e";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectRetentionPolicyCard_policy.graphql.ts b/app/src/pages/project/__generated__/ProjectRetentionPolicyCard_policy.graphql.ts
new file mode 100644
index 0000000000..a9abecc986
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectRetentionPolicyCard_policy.graphql.ts
@@ -0,0 +1,133 @@
+/**
+ * @generated SignedSource<<0e33c5bf0a44913d025f26ec42d68b1f>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectRetentionPolicyCard_policy$data = {
+ readonly id: string;
+ readonly name: string;
+ readonly traceRetentionPolicy: {
+ readonly cronExpression: string;
+ readonly id: string;
+ readonly name: string;
+ readonly rule: {
+ readonly maxCount?: number;
+ readonly maxDays?: number;
+ };
+ };
+ readonly " $fragmentType": "ProjectRetentionPolicyCard_policy";
+};
+export type ProjectRetentionPolicyCard_policy$key = {
+ readonly " $data"?: ProjectRetentionPolicyCard_policy$data;
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectRetentionPolicyCard_policy">;
+};
+
+const node: ReaderFragment = (function(){
+var v0 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+};
+return {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectRetentionPolicyCard_policy",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "traceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v3/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Project",
+ "abstractKey": null
+};
+})();
+
+(node as any).hash = "6069a92b50f5083a8c8fbeee6982b6b5";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/ProjectRetentionPolicyCard_query.graphql.ts b/app/src/pages/project/__generated__/ProjectRetentionPolicyCard_query.graphql.ts
new file mode 100644
index 0000000000..9142ece071
--- /dev/null
+++ b/app/src/pages/project/__generated__/ProjectRetentionPolicyCard_query.graphql.ts
@@ -0,0 +1,40 @@
+/**
+ * @generated SignedSource<<8c141a7c665e1ff47ebf5ed693c9774a>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type ProjectRetentionPolicyCard_query$data = {
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectTraceRetentionPolicySelectFragment">;
+ readonly " $fragmentType": "ProjectRetentionPolicyCard_query";
+};
+export type ProjectRetentionPolicyCard_query$key = {
+ readonly " $data"?: ProjectRetentionPolicyCard_query$data;
+ readonly " $fragmentSpreads": FragmentRefs<"ProjectRetentionPolicyCard_query">;
+};
+
+const node: ReaderFragment = {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "ProjectRetentionPolicyCard_query",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "ProjectTraceRetentionPolicySelectFragment"
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+};
+
+(node as any).hash = "ae8f95394a5ca83acf7326a90809b351";
+
+export default node;
diff --git a/app/src/pages/project/__generated__/SpansTableSpansQuery.graphql.ts b/app/src/pages/project/__generated__/SpansTableSpansQuery.graphql.ts
index 7ab3ec0107..25e536f343 100644
--- a/app/src/pages/project/__generated__/SpansTableSpansQuery.graphql.ts
+++ b/app/src/pages/project/__generated__/SpansTableSpansQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -168,7 +168,21 @@ v18 = [
"name": "truncatedValue",
"storageKey": null
}
-];
+],
+v19 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v20 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+};
return {
"fragment": {
"argumentDefinitions": [
@@ -408,31 +422,90 @@ return {
"name": "spanAnnotations",
"plural": true,
"selections": [
+ (v15/*: any*/),
(v16/*: any*/),
+ (v19/*: any*/),
+ (v20/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "label",
+ "name": "annotatorKind",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "score",
+ "name": "createdAt",
"storageKey": null
},
{
"alias": null,
"args": null,
- "kind": "ScalarField",
- "name": "annotatorKind",
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
"storageKey": null
}
],
"storageKey": null
},
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v19/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v16/*: any*/)
+ ],
+ "storageKey": null
+ },
{
"alias": null,
"args": null,
@@ -471,6 +544,103 @@ return {
}
],
"storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v15/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v14/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v15/*: any*/),
+ (v16/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v19/*: any*/),
+ (v20/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v15/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
}
],
"storageKey": null
@@ -550,16 +720,16 @@ return {
]
},
"params": {
- "cacheID": "724a34c18ec17312b01be5ccae4d7159",
+ "cacheID": "64f1147e17e486de40dd43a5b99d5c1b",
"id": null,
"metadata": {},
"name": "SpansTableSpansQuery",
"operationKind": "query",
- "text": "query SpansTableSpansQuery(\n $after: String = null\n $filterCondition: String = null\n $first: Int = 30\n $orphanSpanAsRootSpan: Boolean\n $rootSpansOnly: Boolean = true\n $sort: SpanSort = {col: startTime, dir: desc}\n $timeRange: TimeRange\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...SpansTable_spans_xYL0c\n __isNode: __typename\n id\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment SpansTable_spans_xYL0c on Project {\n name\n ...SpanColumnSelector_annotations\n spans(first: $first, after: $after, sort: $sort, rootSpansOnly: $rootSpansOnly, filterCondition: $filterCondition, orphanSpanAsRootSpan: $orphanSpanAsRootSpan, timeRange: $timeRange) {\n edges {\n span: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n spanId\n trace {\n id\n traceId\n }\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanAnnotations {\n name\n label\n score\n annotatorKind\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
+ "text": "query SpansTableSpansQuery(\n $after: String = null\n $filterCondition: String = null\n $first: Int = 30\n $orphanSpanAsRootSpan: Boolean\n $rootSpansOnly: Boolean = true\n $sort: SpanSort = {col: startTime, dir: desc}\n $timeRange: TimeRange\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...SpansTable_spans_xYL0c\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment SpansTable_spans_xYL0c on Project {\n name\n ...SpanColumnSelector_annotations\n spans(first: $first, after: $after, sort: $sort, rootSpansOnly: $rootSpansOnly, filterCondition: $filterCondition, orphanSpanAsRootSpan: $orphanSpanAsRootSpan, timeRange: $timeRange) {\n edges {\n span: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n spanId\n trace {\n id\n traceId\n }\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n ...AnnotationSummaryGroup\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
}
};
})();
-(node as any).hash = "3abfb2f2fae91e346221687ee5213714";
+(node as any).hash = "1128552e9dfc766aa37c1acec4a9807f";
export default node;
diff --git a/app/src/pages/project/__generated__/SpansTable_spans.graphql.ts b/app/src/pages/project/__generated__/SpansTable_spans.graphql.ts
index 8a52c32dfb..a3c164c31e 100644
--- a/app/src/pages/project/__generated__/SpansTable_spans.graphql.ts
+++ b/app/src/pages/project/__generated__/SpansTable_spans.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<15d6b44453b1e7aa781b5edd07316779>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,7 +9,7 @@
// @ts-nocheck
import { ReaderFragment } from 'relay-runtime';
-export type AnnotatorKind = "HUMAN" | "LLM";
+export type AnnotatorKind = "CODE" | "HUMAN" | "LLM";
export type SpanKind = "agent" | "chain" | "embedding" | "evaluator" | "guardrail" | "llm" | "reranker" | "retriever" | "tool" | "unknown";
export type SpanStatusCode = "ERROR" | "OK" | "UNSET";
import { FragmentRefs } from "relay-runtime";
@@ -38,8 +38,18 @@ export type SpansTable_spans$data = {
readonly output: {
readonly value: string;
} | null;
+ readonly spanAnnotationSummaries: ReadonlyArray<{
+ readonly labelFractions: ReadonlyArray<{
+ readonly fraction: number;
+ readonly label: string;
+ }>;
+ readonly meanScore: number | null;
+ readonly name: string;
+ }>;
readonly spanAnnotations: ReadonlyArray<{
readonly annotatorKind: AnnotatorKind;
+ readonly createdAt: string;
+ readonly id: string;
readonly label: string | null;
readonly name: string;
readonly score: number | null;
@@ -55,6 +65,7 @@ export type SpansTable_spans$data = {
readonly id: string;
readonly traceId: string;
};
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationSummaryGroup">;
};
}>;
};
@@ -94,7 +105,14 @@ v3 = [
"name": "truncatedValue",
"storageKey": null
}
-];
+],
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+};
return {
"argumentDefinitions": [
{
@@ -354,31 +372,71 @@ return {
"name": "spanAnnotations",
"plural": true,
"selections": [
+ (v2/*: any*/),
(v1/*: any*/),
+ (v4/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "label",
+ "name": "score",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "score",
+ "name": "annotatorKind",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "annotatorKind",
+ "name": "createdAt",
"storageKey": null
}
],
"storageKey": null
},
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v4/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v1/*: any*/)
+ ],
+ "storageKey": null
+ },
{
"alias": null,
"args": null,
@@ -417,6 +475,11 @@ return {
}
],
"storageKey": null
+ },
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationSummaryGroup"
}
],
"storageKey": null
@@ -484,6 +547,6 @@ return {
};
})();
-(node as any).hash = "3abfb2f2fae91e346221687ee5213714";
+(node as any).hash = "1128552e9dfc766aa37c1acec4a9807f";
export default node;
diff --git a/app/src/pages/project/__generated__/TracesTableQuery.graphql.ts b/app/src/pages/project/__generated__/TracesTableQuery.graphql.ts
index 4f4632b391..949b63b727 100644
--- a/app/src/pages/project/__generated__/TracesTableQuery.graphql.ts
+++ b/app/src/pages/project/__generated__/TracesTableQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<20e968dcd3823cba425a02f01dea30e3>>
+ * @generated SignedSource<<718672afbba488af67d0c932107d640b>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -214,6 +214,20 @@ v23 = {
"storageKey": null
},
v24 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v25 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+},
+v26 = {
"alias": null,
"args": null,
"concreteType": "SpanAnnotation",
@@ -223,31 +237,186 @@ v24 = {
"selections": [
(v12/*: any*/),
(v13/*: any*/),
+ (v24/*: any*/),
+ (v25/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "label",
+ "name": "annotatorKind",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "score",
+ "name": "createdAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+},
+v27 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v24/*: any*/)
+ ],
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "annotatorKind",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v13/*: any*/)
+ ],
+ "storageKey": null
+},
+v28 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v12/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v11/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v12/*: any*/),
+ (v13/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v24/*: any*/),
+ (v25/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v12/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
"storageKey": null
}
],
"storageKey": null
},
-v25 = {
+v29 = {
"alias": null,
"args": null,
"concreteType": "DocumentRetrievalMetrics",
@@ -451,8 +620,10 @@ return {
],
"storageKey": null
},
- (v24/*: any*/),
- (v25/*: any*/),
+ (v26/*: any*/),
+ (v27/*: any*/),
+ (v28/*: any*/),
+ (v29/*: any*/),
{
"alias": null,
"args": [
@@ -533,8 +704,10 @@ return {
],
"storageKey": null
},
- (v24/*: any*/),
- (v25/*: any*/)
+ (v26/*: any*/),
+ (v28/*: any*/),
+ (v27/*: any*/),
+ (v29/*: any*/)
],
"storageKey": null
}
@@ -621,16 +794,16 @@ return {
]
},
"params": {
- "cacheID": "d0a1b8418330452126093b027f027f43",
+ "cacheID": "aaa5159f1a91c5146fb87560873ab5c0",
"id": null,
"metadata": {},
"name": "TracesTableQuery",
"operationKind": "query",
- "text": "query TracesTableQuery(\n $after: String = null\n $filterCondition: String = null\n $first: Int = 30\n $sort: SpanSort = {col: startTime, dir: desc}\n $timeRange: TimeRange\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...TracesTable_spans_1XEuU\n __isNode: __typename\n id\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment TracesTable_spans_1XEuU on Project {\n name\n ...SpanColumnSelector_annotations\n rootSpans: spans(first: $first, after: $after, sort: $sort, rootSpansOnly: true, filterCondition: $filterCondition, timeRange: $timeRange) {\n edges {\n rootSpan: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n parentId\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n numSpans\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n descendants(first: 50) {\n edges {\n node {\n id\n spanKind\n name\n statusCode: propagatedStatusCode\n startTime\n latencyMs\n parentId\n cumulativeTokenCountTotal: tokenCountTotal\n cumulativeTokenCountPrompt: tokenCountPrompt\n cumulativeTokenCountCompletion: tokenCountCompletion\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n }\n }\n }\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
+ "text": "query TracesTableQuery(\n $after: String = null\n $filterCondition: String = null\n $first: Int = 30\n $sort: SpanSort = {col: startTime, dir: desc}\n $timeRange: TimeRange\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...TracesTable_spans_1XEuU\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n\nfragment SpanColumnSelector_annotations on Project {\n spanAnnotationNames\n}\n\nfragment TraceHeaderRootSpanAnnotationsFragment on Span {\n ...AnnotationSummaryGroup\n}\n\nfragment TracesTable_spans_1XEuU on Project {\n name\n ...SpanColumnSelector_annotations\n rootSpans: spans(first: $first, after: $after, sort: $sort, rootSpansOnly: true, filterCondition: $filterCondition, timeRange: $timeRange) {\n edges {\n rootSpan: node {\n id\n spanKind\n name\n metadata\n statusCode\n startTime\n latencyMs\n cumulativeTokenCountTotal\n cumulativeTokenCountPrompt\n cumulativeTokenCountCompletion\n parentId\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n numSpans\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n ...AnnotationSummaryGroup\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n descendants(first: 50) {\n edges {\n node {\n id\n spanKind\n name\n statusCode: propagatedStatusCode\n startTime\n latencyMs\n parentId\n cumulativeTokenCountTotal: tokenCountTotal\n cumulativeTokenCountPrompt: tokenCountPrompt\n cumulativeTokenCountCompletion: tokenCountCompletion\n input {\n value: truncatedValue\n }\n output {\n value: truncatedValue\n }\n spanId\n trace {\n id\n traceId\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n }\n ...AnnotationSummaryGroup\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n ...TraceHeaderRootSpanAnnotationsFragment\n }\n }\n }\n }\n cursor\n node {\n __typename\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n}\n"
}
};
})();
-(node as any).hash = "946e6d9bfbf43ac9def8bfcc2ff30661";
+(node as any).hash = "506e7c2397918ad76b8f1a50a0a2c49b";
export default node;
diff --git a/app/src/pages/project/__generated__/TracesTable_spans.graphql.ts b/app/src/pages/project/__generated__/TracesTable_spans.graphql.ts
index aacbbabd5e..4a3a3bcf54 100644
--- a/app/src/pages/project/__generated__/TracesTable_spans.graphql.ts
+++ b/app/src/pages/project/__generated__/TracesTable_spans.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<680f2c3471984fd01a4b98afcb92bc9b>>
+ * @generated SignedSource<<4bfbdd258cc30acda2c9ccd3467c71c4>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,7 +9,7 @@
// @ts-nocheck
import { ReaderFragment } from 'relay-runtime';
-export type AnnotatorKind = "HUMAN" | "LLM";
+export type AnnotatorKind = "CODE" | "HUMAN" | "LLM";
export type SpanKind = "agent" | "chain" | "embedding" | "evaluator" | "guardrail" | "llm" | "reranker" | "retriever" | "tool" | "unknown";
export type SpanStatusCode = "ERROR" | "OK" | "UNSET";
import { FragmentRefs } from "relay-runtime";
@@ -46,6 +46,7 @@ export type TracesTable_spans$data = {
readonly parentId: string | null;
readonly spanAnnotations: ReadonlyArray<{
readonly annotatorKind: AnnotatorKind;
+ readonly createdAt: string;
readonly id: string;
readonly label: string | null;
readonly name: string;
@@ -59,6 +60,7 @@ export type TracesTable_spans$data = {
readonly id: string;
readonly traceId: string;
};
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationSummaryGroup" | "TraceHeaderRootSpanAnnotationsFragment">;
};
}>;
};
@@ -79,8 +81,17 @@ export type TracesTable_spans$data = {
readonly value: string;
} | null;
readonly parentId: string | null;
+ readonly spanAnnotationSummaries: ReadonlyArray<{
+ readonly labelFractions: ReadonlyArray<{
+ readonly fraction: number;
+ readonly label: string;
+ }>;
+ readonly meanScore: number | null;
+ readonly name: string;
+ }>;
readonly spanAnnotations: ReadonlyArray<{
readonly annotatorKind: AnnotatorKind;
+ readonly createdAt: string;
readonly id: string;
readonly label: string | null;
readonly name: string;
@@ -95,6 +106,7 @@ export type TracesTable_spans$data = {
readonly numSpans: number;
readonly traceId: string;
};
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationSummaryGroup">;
};
}>;
};
@@ -198,6 +210,13 @@ v11 = {
"storageKey": null
},
v12 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v13 = {
"alias": null,
"args": null,
"concreteType": "SpanAnnotation",
@@ -207,31 +226,37 @@ v12 = {
"selections": [
(v2/*: any*/),
(v1/*: any*/),
+ (v12/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "label",
+ "name": "score",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "score",
+ "name": "annotatorKind",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "annotatorKind",
+ "name": "createdAt",
"storageKey": null
}
],
"storageKey": null
},
-v13 = {
+v14 = {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationSummaryGroup"
+},
+v15 = {
"alias": null,
"args": null,
"concreteType": "DocumentRetrievalMetrics",
@@ -446,8 +471,47 @@ return {
],
"storageKey": null
},
- (v12/*: any*/),
(v13/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v12/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v1/*: any*/)
+ ],
+ "storageKey": null
+ },
+ (v14/*: any*/),
+ (v15/*: any*/),
{
"alias": null,
"args": [
@@ -528,8 +592,14 @@ return {
],
"storageKey": null
},
- (v12/*: any*/),
- (v13/*: any*/)
+ (v13/*: any*/),
+ (v14/*: any*/),
+ (v15/*: any*/),
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "TraceHeaderRootSpanAnnotationsFragment"
+ }
],
"storageKey": null
}
@@ -605,6 +675,6 @@ return {
};
})();
-(node as any).hash = "946e6d9bfbf43ac9def8bfcc2ff30661";
+(node as any).hash = "506e7c2397918ad76b8f1a50a0a2c49b";
export default node;
diff --git a/app/src/pages/project/tableUtils.ts b/app/src/pages/project/tableUtils.ts
index d8ad051327..e4f8c9c02e 100644
--- a/app/src/pages/project/tableUtils.ts
+++ b/app/src/pages/project/tableUtils.ts
@@ -6,7 +6,7 @@ import {
} from "./__generated__/TracesTableQuery.graphql";
export const ANNOTATIONS_COLUMN_PREFIX = "annotations";
-export const ANNOTATIONS_KEY_SEPARATOR = ":";
+export const ANNOTATIONS_KEY_SEPARATOR = "-";
export const DEFAULT_SORT: SpanSort = {
col: "startTime",
dir: "desc",
@@ -33,3 +33,11 @@ export function getGqlSort(
dir: sort.desc ? "desc" : "asc",
};
}
+
+export function makeAnnotationColumnId(name: string, type: string) {
+ return (
+ `${ANNOTATIONS_COLUMN_PREFIX}${ANNOTATIONS_KEY_SEPARATOR}${type}${ANNOTATIONS_KEY_SEPARATOR}${name}`
+ // replace anything that's not alphanumeric with a dash
+ .replace(/[^a-zA-Z0-9]/g, "-")
+ );
+}
diff --git a/app/src/pages/settings/AnnotationConfigDialog.tsx b/app/src/pages/settings/AnnotationConfigDialog.tsx
new file mode 100644
index 0000000000..c7b7b46030
--- /dev/null
+++ b/app/src/pages/settings/AnnotationConfigDialog.tsx
@@ -0,0 +1,457 @@
+import React from "react";
+import { Controller, useFieldArray, useForm } from "react-hook-form";
+import { css } from "@emotion/react";
+
+import { Card } from "@arizeai/components";
+
+import {
+ Button,
+ Dialog,
+ FieldError,
+ Flex,
+ Form,
+ Icon,
+ Icons,
+ Input,
+ Label,
+ NumberField,
+ Radio,
+ RadioGroup,
+ Text,
+ TextArea,
+ TextField,
+ View,
+} from "@phoenix/components";
+import { useNotifyError, useNotifySuccess } from "@phoenix/contexts";
+
+import {
+ AnnotationConfig,
+ AnnotationConfigCategorical,
+ AnnotationConfigContinuous,
+ AnnotationConfigFreeform,
+ AnnotationConfigOptimizationDirection,
+ AnnotationConfigType,
+} from "./types";
+
+const ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("");
+
+const optimizationDirections = [
+ "MAXIMIZE",
+ "MINIMIZE",
+ "NONE",
+] satisfies AnnotationConfigOptimizationDirection[];
+
+const types = [
+ "CATEGORICAL",
+ "CONTINUOUS",
+ "FREEFORM",
+] satisfies AnnotationConfigType[];
+
+export const AnnotationConfigDialog = ({
+ onAddAnnotationConfig,
+ initialAnnotationConfig,
+}: {
+ onAddAnnotationConfig: (
+ config: AnnotationConfig,
+ {
+ onCompleted,
+ onError,
+ }?: { onCompleted?: () => void; onError?: (error: string) => void }
+ ) => void;
+ initialAnnotationConfig?: Partial;
+}) => {
+ const notifyError = useNotifyError();
+ const notifySuccess = useNotifySuccess();
+ const mode: "new" | "edit" = initialAnnotationConfig ? "edit" : "new";
+ const { control, handleSubmit, watch } = useForm({
+ defaultValues: initialAnnotationConfig || {
+ annotationType: "CATEGORICAL",
+ values: [
+ { label: "", score: null },
+ { label: "", score: null },
+ ],
+ optimizationDirection: "MAXIMIZE",
+ },
+ });
+ const { fields, append, remove } = useFieldArray({
+ control,
+ name: "values",
+ });
+ const onSubmit = (data: AnnotationConfig, close: () => void) => {
+ const onCompleted = () => {
+ notifySuccess({
+ title:
+ mode === "new"
+ ? "Annotation config created!"
+ : "Annotation config updated!",
+ });
+ close();
+ };
+ const onError = (error: string) => {
+ notifyError({
+ title:
+ mode === "new"
+ ? "Failed to create annotation config"
+ : "Failed to update annotation config",
+ message: error,
+ });
+ };
+ switch (data.annotationType) {
+ case "CATEGORICAL": {
+ const config: AnnotationConfigCategorical = {
+ annotationType: "CATEGORICAL",
+ name: data.name,
+ values: data.values,
+ id: initialAnnotationConfig?.id || "",
+ optimizationDirection: data.optimizationDirection,
+ description: data.description,
+ };
+ onAddAnnotationConfig(config, { onCompleted, onError });
+ break;
+ }
+ case "CONTINUOUS": {
+ const config: AnnotationConfigContinuous = {
+ annotationType: "CONTINUOUS",
+ name: data.name,
+ lowerBound: data.lowerBound,
+ upperBound: data.upperBound,
+ id: initialAnnotationConfig?.id || "",
+ optimizationDirection: data.optimizationDirection,
+ description: data.description,
+ };
+ onAddAnnotationConfig(config, { onCompleted, onError });
+ break;
+ }
+ case "FREEFORM": {
+ const config: AnnotationConfigFreeform = {
+ annotationType: "FREEFORM",
+ name: data.name,
+ id: initialAnnotationConfig?.id || "",
+ description: data.description,
+ };
+ onAddAnnotationConfig(config, { onCompleted, onError });
+ break;
+ }
+ }
+ };
+ const annotationType = watch("annotationType");
+ return (
+
+ );
+};
diff --git a/app/src/pages/settings/AnnotationConfigSelectionToolbar.tsx b/app/src/pages/settings/AnnotationConfigSelectionToolbar.tsx
new file mode 100644
index 0000000000..8d0fd0accc
--- /dev/null
+++ b/app/src/pages/settings/AnnotationConfigSelectionToolbar.tsx
@@ -0,0 +1,162 @@
+import React, { useState } from "react";
+import { css } from "@emotion/react";
+
+import { Card } from "@arizeai/components";
+
+import {
+ Button,
+ Dialog,
+ DialogTrigger,
+ Flex,
+ Icon,
+ Icons,
+ Modal,
+ Text,
+ View,
+} from "@phoenix/components";
+import { AnnotationConfigDialog } from "@phoenix/pages/settings/AnnotationConfigDialog";
+import { AnnotationConfig } from "@phoenix/pages/settings/types";
+
+interface AnnotationConfigSelectionToolbarProps {
+ selectedConfig: AnnotationConfig;
+ onClearSelection: () => void;
+ onEditAnnotationConfig: (config: AnnotationConfig) => void;
+ onDeleteAnnotationConfig: (
+ configId: string,
+ args?: { onCompleted?: () => void; onError?: () => void }
+ ) => void;
+}
+
+export const AnnotationConfigSelectionToolbar = ({
+ selectedConfig,
+ onClearSelection,
+ onEditAnnotationConfig,
+ onDeleteAnnotationConfig,
+}: AnnotationConfigSelectionToolbarProps) => {
+ const [isEditing, setIsEditing] = useState(false);
+ if (!selectedConfig) {
+ return null;
+ }
+ const id = selectedConfig?.id;
+ return (
+
+
+
+ Config: "{selectedConfig?.name}"
+
+
+ {
+ setIsEditing(open);
+ if (!open) {
+ onClearSelection();
+ }
+ }}
+ >
+
+
+
+
+
+ {
+ setIsEditing(open);
+ if (!open) {
+ onClearSelection();
+ }
+ }}
+ >
+
+
+
+
+
+
+
+
+
+ );
+};
diff --git a/app/src/pages/settings/AnnotationConfigTable.tsx b/app/src/pages/settings/AnnotationConfigTable.tsx
new file mode 100644
index 0000000000..591ecf4255
--- /dev/null
+++ b/app/src/pages/settings/AnnotationConfigTable.tsx
@@ -0,0 +1,328 @@
+import React, { useMemo, useState } from "react";
+import { graphql, useFragment } from "react-relay";
+import {
+ CellContext,
+ ColumnDef,
+ flexRender,
+ getCoreRowModel,
+ getSortedRowModel,
+ RowSelectionState,
+ useReactTable,
+} from "@tanstack/react-table";
+import { css } from "@emotion/react";
+
+import { Tooltip, TooltipTrigger, TriggerWrap } from "@arizeai/components";
+
+import { Flex, Icon, Icons, Text, Token } from "@phoenix/components";
+import { AnnotationLabel } from "@phoenix/components/annotation";
+import { IndeterminateCheckboxCell } from "@phoenix/components/table/IndeterminateCheckboxCell";
+import { tableCSS } from "@phoenix/components/table/styles";
+import { TableEmpty } from "@phoenix/components/table/TableEmpty";
+import { Truncate } from "@phoenix/components/utility/Truncate";
+import { AnnotationConfigTableFragment$key } from "@phoenix/pages/settings/__generated__/AnnotationConfigTableFragment.graphql";
+import { AnnotationConfigSelectionToolbar } from "@phoenix/pages/settings/AnnotationConfigSelectionToolbar";
+import { AnnotationConfig } from "@phoenix/pages/settings/types";
+
+const columns = [
+ {
+ id: "select",
+ maxSize: 10,
+ header: () => null,
+ cell: ({ row }: CellContext) => (
+
+ ),
+ },
+ {
+ id: "name",
+ header: "Name",
+ accessorKey: "name",
+ cell: ({ row }: CellContext) => {
+ return (
+
+ );
+ },
+ },
+ {
+ id: "description",
+ header: "Description",
+ accessorKey: "description",
+ sortUndefined: "last",
+ accessorFn: (row: AnnotationConfig) => row.description ?? undefined,
+ maxSize: 150,
+ cell: ({ row }: CellContext) => {
+ return (
+
+ {row.original.description}
+
+ );
+ },
+ },
+ {
+ id: "annotationType",
+ header: "Type",
+ accessorKey: "annotationType",
+ minSize: 10,
+ cell: ({ row }: CellContext) => {
+ return (
+
+ {row.original.annotationType.charAt(0).toUpperCase() +
+ row.original.annotationType.slice(1).toLowerCase()}
+
+ );
+ },
+ },
+ {
+ id: "values",
+ header: "Values",
+ enableSorting: false,
+ accessorFn: (row: AnnotationConfig) => {
+ switch (row.annotationType) {
+ case "CATEGORICAL": {
+ if (!row.values) {
+ return "";
+ }
+ let tokens = row.values.map(
+ (value: { label: string }, index: number) => (
+
+ {value.label}
+
+ )
+ );
+ if (row.values.length > 5) {
+ tokens = [
+ ...tokens.slice(-4),
+
+
+
+ + {row.values.length - 5} more
+
+
+
+ {row.values
+ .map((value: { label: string }) => value.label)
+ .join(", ")}
+
+ ,
+ ];
+ }
+ return tokens;
+ }
+ case "CONTINUOUS":
+ return `from ${row.lowerBound} to ${row.upperBound}`;
+ case "FREEFORM":
+ return "";
+ default:
+ return "";
+ }
+ },
+ cell: ({ getValue }: CellContext) => {
+ const value = getValue() as React.ReactNode;
+ return {value};
+ },
+ },
+] satisfies ColumnDef[];
+
+export const AnnotationConfigTable = ({
+ annotationConfigs,
+ onDeleteAnnotationConfig,
+ onEditAnnotationConfig,
+}: {
+ annotationConfigs: AnnotationConfigTableFragment$key;
+ onDeleteAnnotationConfig: (
+ id: string,
+ {
+ onCompleted,
+ onError,
+ }?: { onCompleted?: () => void; onError?: () => void }
+ ) => void;
+ onEditAnnotationConfig: (
+ annotationConfig: AnnotationConfig,
+ {
+ onCompleted,
+ onError,
+ }?: { onCompleted?: () => void; onError?: () => void }
+ ) => void;
+}) => {
+ const [rowSelection, setRowSelection] = useState({});
+ const data = useFragment(
+ graphql`
+ fragment AnnotationConfigTableFragment on Query {
+ annotationConfigs {
+ edges {
+ annotationConfig: node {
+ ... on CategoricalAnnotationConfig {
+ id
+ name
+ description
+ annotationType
+ optimizationDirection
+ values {
+ label
+ score
+ }
+ }
+ ... on ContinuousAnnotationConfig {
+ id
+ name
+ description
+ annotationType
+ optimizationDirection
+ upperBound
+ lowerBound
+ }
+ ... on FreeformAnnotationConfig {
+ id
+ name
+ description
+ annotationType
+ }
+ }
+ }
+ }
+ }
+ `,
+ annotationConfigs
+ );
+ const configs = useMemo(
+ () => data.annotationConfigs.edges.map((edge) => edge.annotationConfig),
+ [data.annotationConfigs.edges]
+ ) as AnnotationConfig[]; // cast to AnnotationConfig[] because otherwise 'name' and 'annotationType' are optional
+ const table = useReactTable({
+ data: configs,
+ columns,
+ getCoreRowModel: getCoreRowModel(),
+ getSortedRowModel: getSortedRowModel(),
+ enableRowSelection: true,
+ onRowSelectionChange: setRowSelection,
+ enableMultiRowSelection: false,
+ state: {
+ rowSelection,
+ },
+ });
+ const isEmpty = table.getRowCount() === 0;
+ const rows = table.getRowModel().rows;
+
+ const selectedRows = table.getSelectedRowModel().rows;
+ const selectedConfigs = selectedRows.map((row) => row.original);
+
+ const clearSelection = () => {
+ setRowSelection({});
+ };
+
+ return (
+
+
+
+ {table.getHeaderGroups().map((headerGroup) => (
+
+ {headerGroup.headers.map((header) => (
+
+ {header.isPlaceholder ? null : (
+
+ {flexRender(
+ header.column.columnDef.header,
+ header.getContext()
+ )}
+ {header.column.getIsSorted() ? (
+
+ ) : (
+
+ )
+ }
+ />
+ ) : null}
+
+ )}
+ |
+ ))}
+
+ ))}
+
+ {isEmpty ? (
+
+ ) : (
+
+ {rows.map((row) => (
+ {
+ setRowSelection({
+ [row.id]: !rowSelection?.[row.id],
+ });
+ }}
+ >
+ {row.getVisibleCells().map((cell) => (
+
+ {flexRender(cell.column.columnDef.cell, cell.getContext())}
+ |
+ ))}
+
+ ))}
+
+ )}
+
+ {selectedRows.length > 0 && (
+
+ )}
+
+ );
+};
diff --git a/app/src/pages/settings/CreateRetentionPolicy.tsx b/app/src/pages/settings/CreateRetentionPolicy.tsx
new file mode 100644
index 0000000000..03c3f8399b
--- /dev/null
+++ b/app/src/pages/settings/CreateRetentionPolicy.tsx
@@ -0,0 +1,93 @@
+import React from "react";
+import { graphql, useMutation } from "react-relay";
+
+import {
+ useNotifyError,
+ useNotifySuccess,
+} from "@phoenix/contexts/NotificationContext";
+
+import {
+ CreateRetentionPolicyMutation,
+ ProjectTraceRetentionRuleInput,
+} from "./__generated__/CreateRetentionPolicyMutation.graphql";
+import {
+ RetentionPolicyForm,
+ RetentionPolicyFormParams,
+} from "./RetentionPolicyForm";
+
+/**
+ * A Wrapper around the RetentionPolicyForm component that is used to create a new retention policy.
+ */
+export function CreateRetentionPolicy(props: { onCreate: () => void }) {
+ const notifySuccess = useNotifySuccess();
+ const notifyError = useNotifyError();
+ const [submit, isSubitting] = useMutation(
+ graphql`
+ mutation CreateRetentionPolicyMutation(
+ $input: CreateProjectTraceRetentionPolicyInput!
+ ) {
+ createProjectTraceRetentionPolicy(input: $input) {
+ query {
+ ...RetentionPoliciesTable_policies
+ }
+ }
+ }
+ `
+ );
+
+ const onSubmit = (params: RetentionPolicyFormParams) => {
+ let rule: ProjectTraceRetentionRuleInput;
+ if (params.numberOfDays && params.numberOfTraces) {
+ rule = {
+ maxDaysOrCount: {
+ maxDays: params.numberOfDays,
+ maxCount: params.numberOfTraces,
+ },
+ };
+ } else if (params.numberOfDays) {
+ rule = {
+ maxDays: {
+ maxDays: params.numberOfDays,
+ },
+ };
+ } else if (params.numberOfTraces) {
+ rule = {
+ maxCount: {
+ maxCount: params.numberOfTraces,
+ },
+ };
+ } else {
+ throw new Error("Invalid retention policy rule");
+ }
+ submit({
+ variables: {
+ input: {
+ cronExpression: params.schedule,
+ rule,
+ name: params.name,
+ },
+ },
+ onCompleted: () => {
+ notifySuccess({
+ title: "Retention policy created successfully",
+ message:
+ "The retention policy has been created. You can now add this policy to projects.",
+ });
+ props.onCreate();
+ },
+ onError: () => {
+ notifyError({
+ title: "Error creating retention policy",
+ message: "Please try again.",
+ });
+ },
+ });
+ };
+ return (
+
+ );
+}
diff --git a/app/src/pages/settings/EditRetentionPolicy.tsx b/app/src/pages/settings/EditRetentionPolicy.tsx
new file mode 100644
index 0000000000..6fff9492f3
--- /dev/null
+++ b/app/src/pages/settings/EditRetentionPolicy.tsx
@@ -0,0 +1,141 @@
+import React from "react";
+import { graphql, useLazyLoadQuery, useMutation } from "react-relay";
+
+import {
+ useNotifyError,
+ useNotifySuccess,
+} from "@phoenix/contexts/NotificationContext";
+
+import type { ProjectTraceRetentionRuleInput } from "./__generated__/CreateRetentionPolicyMutation.graphql";
+import type { EditRetentionPolicyMutation } from "./__generated__/EditRetentionPolicyMutation.graphql";
+import { EditRetentionPolicyQuery } from "./__generated__/EditRetentionPolicyQuery.graphql";
+import {
+ RetentionPolicyForm,
+ RetentionPolicyFormParams,
+} from "./RetentionPolicyForm";
+
+interface EditRetentionPolicyProps {
+ policyId: string;
+ onEditCompleted: () => void;
+ onCancel?: () => void;
+}
+
+/**
+ * A Wrapper around the RetentionPolicyForm component that is used to edit an existing retention policy.
+ */
+export function EditRetentionPolicy(props: EditRetentionPolicyProps) {
+ const notifySuccess = useNotifySuccess();
+ const notifyError = useNotifyError();
+ const data = useLazyLoadQuery(
+ graphql`
+ query EditRetentionPolicyQuery($id: GlobalID!) {
+ retentionPolicy: node(id: $id) {
+ ... on ProjectTraceRetentionPolicy {
+ id
+ name
+ cronExpression
+ rule {
+ ... on TraceRetentionRuleMaxCount {
+ maxCount
+ }
+ ... on TraceRetentionRuleMaxDays {
+ maxDays
+ }
+ ... on TraceRetentionRuleMaxDaysOrCount {
+ maxDays
+ maxCount
+ }
+ }
+ }
+ }
+ }
+ `,
+ {
+ id: props.policyId,
+ }
+ );
+
+ if (!data?.retentionPolicy) {
+ throw new Error("Retention policy not found");
+ }
+
+ const [submit, isSubmitting] = useMutation(
+ graphql`
+ mutation EditRetentionPolicyMutation(
+ $input: PatchProjectTraceRetentionPolicyInput!
+ ) {
+ patchProjectTraceRetentionPolicy(input: $input) {
+ query {
+ ...RetentionPoliciesTable_policies
+ }
+ }
+ }
+ `
+ );
+
+ const onSubmit = (params: RetentionPolicyFormParams) => {
+ let rule: ProjectTraceRetentionRuleInput;
+ if (params.numberOfDays && params.numberOfTraces) {
+ rule = {
+ maxDaysOrCount: {
+ maxDays: params.numberOfDays,
+ maxCount: params.numberOfTraces,
+ },
+ };
+ } else if (params.numberOfDays) {
+ rule = {
+ maxDays: {
+ maxDays: params.numberOfDays,
+ },
+ };
+ } else if (params.numberOfTraces) {
+ rule = {
+ maxCount: {
+ maxCount: params.numberOfTraces,
+ },
+ };
+ } else {
+ throw new Error("Invalid retention policy rule");
+ }
+
+ submit({
+ variables: {
+ input: {
+ id: props.policyId,
+ cronExpression: params.schedule,
+ rule,
+ name: params.name,
+ },
+ },
+ onCompleted: () => {
+ notifySuccess({
+ title: "Retention policy updated successfully",
+ message: "The retention policy has been updated successfully.",
+ });
+ props.onEditCompleted();
+ },
+ onError: () => {
+ notifyError({
+ title: "Error updating retention policy",
+ message: "Please try again.",
+ });
+ },
+ });
+ };
+
+ return (
+
+ );
+}
diff --git a/app/src/pages/settings/GlobalRetentionPolicyCard.tsx b/app/src/pages/settings/GlobalRetentionPolicyCard.tsx
new file mode 100644
index 0000000000..4c7f7bc946
--- /dev/null
+++ b/app/src/pages/settings/GlobalRetentionPolicyCard.tsx
@@ -0,0 +1,216 @@
+import React, { useCallback } from "react";
+import { Controller, useForm } from "react-hook-form";
+import { graphql, useLazyLoadQuery, useMutation } from "react-relay";
+import { css } from "@emotion/react";
+
+import { Card } from "@arizeai/components";
+
+import {
+ Button,
+ FieldError,
+ Flex,
+ Form,
+ Input,
+ Label,
+ Link,
+ NumberField,
+ Text,
+ View,
+} from "@phoenix/components";
+import { useNotifyError, useNotifySuccess } from "@phoenix/contexts";
+import { GlobalRetentionPolicyCardMutation } from "@phoenix/pages/settings/__generated__/GlobalRetentionPolicyCardMutation.graphql";
+import { GlobalRetentionPolicyCardQuery } from "@phoenix/pages/settings/__generated__/GlobalRetentionPolicyCardQuery.graphql";
+
+export const GlobalRetentionPolicyCard = () => {
+ const notifySuccess = useNotifySuccess();
+ const notifyError = useNotifyError();
+ const data = useLazyLoadQuery(
+ graphql`
+ query GlobalRetentionPolicyCardQuery {
+ defaultProjectTraceRetentionPolicy {
+ cronExpression
+ id
+ name
+ rule {
+ __typename
+ ... on TraceRetentionRuleMaxDays {
+ maxDays
+ }
+ }
+ }
+ }
+ `,
+ {}
+ );
+ const [updateGlobalRetentionPolicy] =
+ useMutation(graphql`
+ mutation GlobalRetentionPolicyCardMutation(
+ $input: PatchProjectTraceRetentionPolicyInput!
+ ) {
+ patchProjectTraceRetentionPolicy(input: $input) {
+ node {
+ id
+ rule {
+ __typename
+ ... on TraceRetentionRuleMaxDays {
+ maxDays
+ }
+ }
+ }
+ }
+ }
+ `);
+
+ const {
+ control,
+ handleSubmit,
+ formState: { isDirty },
+ reset,
+ } = useForm({
+ defaultValues: {
+ maxDays:
+ data.defaultProjectTraceRetentionPolicy?.rule?.__typename ===
+ "TraceRetentionRuleMaxDays"
+ ? data.defaultProjectTraceRetentionPolicy.rule.maxDays
+ : 0,
+ },
+ });
+
+ const id = data.defaultProjectTraceRetentionPolicy?.id;
+ const onSubmit = useCallback(
+ (form: { maxDays: number }) => {
+ updateGlobalRetentionPolicy({
+ variables: {
+ input: {
+ id,
+ rule: {
+ maxDays: {
+ maxDays: form.maxDays,
+ },
+ },
+ },
+ },
+ onCompleted: (data) => {
+ const maxDays =
+ data.patchProjectTraceRetentionPolicy?.node?.rule?.__typename ===
+ "TraceRetentionRuleMaxDays"
+ ? data.patchProjectTraceRetentionPolicy.node.rule.maxDays
+ : null;
+ if (maxDays !== null) {
+ // reset the form to the new maxDays value
+ // this is a workaround to ensure the form is updated with the new value
+ // our relay query is not revalidating correctly because defaultProjectTraceRetentionPolicy
+ // is not a node with an id
+ reset({
+ maxDays,
+ });
+ }
+ if (maxDays === 0) {
+ notifySuccess({
+ title: "Default retention policy disabled",
+ expireMs: 5000,
+ });
+ } else {
+ notifySuccess({
+ title: `Default retention policy has been set to ${maxDays} ${
+ maxDays === 1 ? "day" : "days"
+ }`,
+ expireMs: 5000,
+ });
+ }
+ },
+ onError: () => {
+ notifyError({
+ title: "Failed to update default retention policy",
+ expireMs: 5000,
+ });
+ },
+ });
+ },
+ [id, notifyError, notifySuccess, updateGlobalRetentionPolicy, reset]
+ );
+
+ return (
+
+
+
+
+
+ The default retention policy for all projects that do not have
+ their own custom retention policy. Traces that are older than the
+ specified number of days will be deleted automatically in order to
+ free up storage space.
+
+
+
+
+
+
+
+
+
+ All Retention Policies
+
+
+
+ );
+};
diff --git a/app/src/pages/settings/RetentionPoliciesTable.tsx b/app/src/pages/settings/RetentionPoliciesTable.tsx
new file mode 100644
index 0000000000..ded431c6e6
--- /dev/null
+++ b/app/src/pages/settings/RetentionPoliciesTable.tsx
@@ -0,0 +1,252 @@
+import React, { startTransition, useEffect, useMemo } from "react";
+import { graphql, usePaginationFragment } from "react-relay";
+import {
+ type ColumnDef,
+ flexRender,
+ getCoreRowModel,
+ getSortedRowModel,
+ useReactTable,
+} from "@tanstack/react-table";
+import cronstrue from "cronstrue";
+import { css } from "@emotion/react";
+
+import { Link } from "@phoenix/components";
+import { tableCSS } from "@phoenix/components/table/styles";
+import {
+ useNotifySuccess,
+ useViewerCanManageRetentionPolicy,
+} from "@phoenix/contexts";
+import { assertUnreachable } from "@phoenix/typeUtils";
+
+import { RetentionPoliciesTable_policies$key } from "./__generated__/RetentionPoliciesTable_policies.graphql";
+import { RetentionPoliciesTablePoliciesQuery } from "./__generated__/RetentionPoliciesTablePoliciesQuery.graphql";
+import { RetentionPolicyActionMenu } from "./RetentionPolicyActionMenu";
+export const RetentionPoliciesTable = ({
+ query,
+ fetchKey,
+}: {
+ query: RetentionPoliciesTable_policies$key;
+ /**
+ * A temporary workaround to force a refetch of the table when a new policy is created.
+ * This is because the refetchable fragment doesn't refetch when the data is updated.
+ */
+ fetchKey: number;
+}) => {
+ const notifySuccess = useNotifySuccess();
+ const canManageRetentionPolicy = useViewerCanManageRetentionPolicy();
+ const { data, refetch } = usePaginationFragment<
+ RetentionPoliciesTablePoliciesQuery,
+ RetentionPoliciesTable_policies$key
+ >(
+ graphql`
+ fragment RetentionPoliciesTable_policies on Query
+ @refetchable(queryName: "RetentionPoliciesTablePoliciesQuery")
+ @argumentDefinitions(
+ after: { type: "String", defaultValue: null }
+ first: { type: "Int", defaultValue: 1000 }
+ ) {
+ projectTraceRetentionPolicies(first: $first, after: $after)
+ @connection(
+ key: "RetentionPoliciesTable_projectTraceRetentionPolicies"
+ ) {
+ edges {
+ node {
+ id
+ name
+ cronExpression
+ rule {
+ __typename
+ ... on TraceRetentionRuleMaxCount {
+ maxCount
+ }
+ ... on TraceRetentionRuleMaxDays {
+ maxDays
+ }
+ ... on TraceRetentionRuleMaxDaysOrCount {
+ maxDays
+ maxCount
+ }
+ }
+ projects {
+ edges {
+ node {
+ name
+ id
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ `,
+ query
+ );
+
+ /**
+ * This is a temporary workaround to force a refetch of the table when a new policy is created.
+ */
+ useEffect(() => {
+ if (fetchKey > 0) {
+ refetch(
+ {},
+ {
+ fetchPolicy: "network-only",
+ }
+ );
+ }
+ }, [fetchKey, refetch]);
+
+ const tableData = data.projectTraceRetentionPolicies.edges.map(
+ (edge) => edge.node
+ );
+
+ const columns: ColumnDef<(typeof tableData)[number]>[] = useMemo(() => {
+ const columns: ColumnDef<(typeof tableData)[number]>[] = [
+ {
+ header: "Name",
+ accessorKey: "name",
+ },
+ {
+ header: "Schedule",
+ accessorKey: "cronExpression",
+ cell: ({ row }) => {
+ return cronstrue.toString(row.original.cronExpression);
+ },
+ },
+ {
+ header: "Rule",
+ accessorKey: "rule",
+ cell: ({ row }) => {
+ const rule = row.original.rule;
+ if (rule.__typename === "TraceRetentionRuleMaxCount") {
+ return `${rule.maxCount} traces`;
+ }
+ if (rule.__typename === "TraceRetentionRuleMaxDays") {
+ if (rule.maxDays === 0) {
+ return "Infinite";
+ }
+ return `${rule.maxDays} days`;
+ }
+ if (rule.__typename === "TraceRetentionRuleMaxDaysOrCount") {
+ return `${rule.maxDays} days or ${rule.maxCount} traces`;
+ }
+ if (rule.__typename === "%other") {
+ return "Unknown";
+ }
+ assertUnreachable(rule);
+ },
+ },
+ {
+ header: "Projects",
+ accessorKey: "projects",
+ cell: ({ row }) => {
+ return (
+
+ {row.original.projects.edges.map((edge) => (
+ -
+
+ {edge.node.name}
+
+
+ ))}
+
+ );
+ },
+ },
+ ];
+ if (canManageRetentionPolicy) {
+ columns.push({
+ id: "actions",
+ cell: ({ row }) => {
+ return (
+ edge.node.name
+ )}
+ onPolicyEdit={() => {
+ notifySuccess({
+ title: "Policy Updated",
+ message: `Policy "${row.original.name}" was updated and will take effect shortly.`,
+ });
+ }}
+ onPolicyDelete={() => {
+ notifySuccess({
+ title: "Policy deleted",
+ message: `Policy "${row.original.name}" was deleted`,
+ });
+ startTransition(() => {
+ refetch(
+ {},
+ {
+ fetchPolicy: "network-only",
+ }
+ );
+ });
+ }}
+ />
+ );
+ },
+ });
+ }
+ return columns;
+ }, [canManageRetentionPolicy, notifySuccess, refetch]);
+
+ const table = useReactTable<(typeof tableData)[number]>({
+ columns,
+ data: tableData,
+ getCoreRowModel: getCoreRowModel(),
+ getSortedRowModel: getSortedRowModel(),
+ });
+
+ const rows = table.getRowModel().rows;
+
+ return (
+
+
+ {table.getHeaderGroups().map((headerGroup) => (
+
+ {headerGroup.headers.map((header) => (
+
+ {header.isPlaceholder ? null : (
+
+ {flexRender(
+ header.column.columnDef.header,
+ header.getContext()
+ )}
+
+ )}
+ |
+ ))}
+
+ ))}
+
+
+ {rows.map((row) => {
+ return (
+
+ {row.getVisibleCells().map((cell) => {
+ return (
+
+ {flexRender(cell.column.columnDef.cell, cell.getContext())}
+ |
+ );
+ })}
+
+ );
+ })}
+
+
+ );
+};
diff --git a/app/src/pages/settings/RetentionPolicyActionMenu.tsx b/app/src/pages/settings/RetentionPolicyActionMenu.tsx
new file mode 100644
index 0000000000..7126851f9e
--- /dev/null
+++ b/app/src/pages/settings/RetentionPolicyActionMenu.tsx
@@ -0,0 +1,185 @@
+import React, { ReactNode, Suspense, useCallback, useState } from "react";
+import { graphql, useMutation } from "react-relay";
+
+import { ActionMenu, Dialog, DialogContainer, Item } from "@arizeai/components";
+
+import {
+ Button,
+ Flex,
+ Icon,
+ Icons,
+ Loading,
+ Text,
+ View,
+} from "@phoenix/components";
+import { StopPropagation } from "@phoenix/components/StopPropagation";
+
+import { EditRetentionPolicy } from "./EditRetentionPolicy";
+
+const DEFAULT_POLICY_NAME = "Default";
+
+enum RetentionPolicyAction {
+ EDIT = "editPolicy",
+ DELETE = "deletePolicy",
+}
+
+export interface RetentionPolicyActionMenuProps {
+ policyId: string;
+ policyName: string;
+ /**
+ * The names of the projects associated with the policy.
+ */
+ projectNames: string[];
+ onPolicyEdit: () => void;
+ onPolicyDelete: () => void;
+}
+
+export const RetentionPolicyActionMenu = ({
+ policyId,
+ policyName,
+ projectNames,
+ onPolicyEdit,
+ onPolicyDelete,
+}: RetentionPolicyActionMenuProps) => {
+ const canDelete = policyName !== DEFAULT_POLICY_NAME;
+ const [dialog, setDialog] = useState(null);
+
+ const onEdit = useCallback(() => {
+ setDialog(
+
+ );
+ }, [onPolicyEdit, policyId]);
+
+ const [deletePolicy, isDeleting] = useMutation(graphql`
+ mutation RetentionPolicyActionMenuDeletePolicyMutation(
+ $input: DeleteProjectTraceRetentionPolicyInput!
+ ) {
+ deleteProjectTraceRetentionPolicy(input: $input) {
+ query {
+ ...RetentionPoliciesTable_policies
+ }
+ }
+ }
+ `);
+
+ const onDelete = useCallback(() => {
+ setDialog(
+
+ );
+ }, [
+ policyId,
+ policyName,
+ projectNames,
+ deletePolicy,
+ onPolicyDelete,
+ isDeleting,
+ ]);
+
+ return (
+
+ {
+ switch (action as RetentionPolicyAction) {
+ case RetentionPolicyAction.EDIT: {
+ return onEdit();
+ }
+ case RetentionPolicyAction.DELETE: {
+ return onDelete();
+ }
+ }
+ }}
+ >
+ -
+
+ } />
+ Edit
+
+
+ -
+
+ } />
+ Delete
+
+
+
+ setDialog(null)}
+ >
+ {dialog}
+
+
+ );
+};
diff --git a/app/src/pages/settings/RetentionPolicyForm.tsx b/app/src/pages/settings/RetentionPolicyForm.tsx
new file mode 100644
index 0000000000..9b78439357
--- /dev/null
+++ b/app/src/pages/settings/RetentionPolicyForm.tsx
@@ -0,0 +1,241 @@
+import React from "react";
+import { Controller, useForm } from "react-hook-form";
+import { CronExpressionParser } from "cron-parser";
+import cronstrue from "cronstrue";
+
+import {
+ Button,
+ Flex,
+ Form,
+ Heading,
+ Input,
+ Label,
+ NumberField,
+ Text,
+ TextField,
+ View,
+} from "@phoenix/components";
+import { createPolicyDeletionSummaryText } from "@phoenix/utils/retentionPolicyUtils";
+export type RetentionPolicyFormParams = {
+ name: string;
+ numberOfTraces?: number;
+ numberOfDays?: number;
+ schedule: string;
+};
+
+const createPolicyScheduleSummaryText = ({
+ schedule,
+}: Pick) => {
+ try {
+ CronExpressionParser.parse(schedule);
+ } catch (error) {
+ return "Invalid schedule";
+ }
+ let scheduleString = "Unknown";
+ try {
+ scheduleString = cronstrue.toString(schedule);
+ } catch (error) {
+ return "Invalid schedule";
+ }
+ return `Enforcement Schedule: ${scheduleString}`;
+};
+
+type RetentionPolicyFormProps = {
+ onSubmit: (params: RetentionPolicyFormParams) => void;
+ mode: "create" | "edit";
+ isSubmitting: boolean;
+ defaultValues?: RetentionPolicyFormParams;
+ onCancel?: () => void;
+};
+export function RetentionPolicyForm(props: RetentionPolicyFormProps) {
+ const { onSubmit, mode, isSubmitting, defaultValues } = props;
+ const { control, watch, handleSubmit } = useForm({
+ defaultValues: defaultValues ?? {
+ name: "New Policy",
+ numberOfTraces: undefined,
+ numberOfDays: 400,
+ schedule: "0 0 * * 0",
+ },
+ mode: "onChange",
+ });
+
+ const [numberOfDays, numberOfTraces, schedule] = watch([
+ "numberOfDays",
+ "numberOfTraces",
+ "schedule",
+ ]);
+
+ return (
+
+ );
+}
diff --git a/app/src/pages/settings/SettingsAnnotationsPage.tsx b/app/src/pages/settings/SettingsAnnotationsPage.tsx
new file mode 100644
index 0000000000..58bfb0b25d
--- /dev/null
+++ b/app/src/pages/settings/SettingsAnnotationsPage.tsx
@@ -0,0 +1,186 @@
+import React from "react";
+import { graphql, useFragment, useMutation } from "react-relay";
+import { useLoaderData, useRevalidator } from "react-router";
+
+import { Card } from "@arizeai/components";
+
+import { Button, DialogTrigger, Icon, Icons, Modal } from "@phoenix/components";
+import { AnnotationConfigDialog } from "@phoenix/pages/settings/AnnotationConfigDialog";
+import { AnnotationConfigTable } from "@phoenix/pages/settings/AnnotationConfigTable";
+import { SettingsAnnotationsPageLoaderData } from "@phoenix/pages/settings/settingsAnnotationsPageLoader";
+import { AnnotationConfig } from "@phoenix/pages/settings/types";
+import { getErrorMessagesFromRelayMutationError } from "@phoenix/utils/errorUtils";
+
+import { SettingsAnnotationsPageFragment$key } from "./__generated__/SettingsAnnotationsPageFragment.graphql";
+
+export const SettingsAnnotationsPage = () => {
+ const annotations = useLoaderData() as SettingsAnnotationsPageLoaderData;
+ return ;
+};
+
+const SettingsAnnotations = ({
+ annotations,
+}: {
+ annotations: SettingsAnnotationsPageFragment$key;
+}) => {
+ const { revalidate } = useRevalidator();
+ const data = useFragment(
+ graphql`
+ fragment SettingsAnnotationsPageFragment on Query {
+ ...AnnotationConfigTableFragment
+ }
+ `,
+ annotations
+ );
+
+ const [deleteAnnotationConfigs] = useMutation(graphql`
+ mutation SettingsAnnotationsPageDeleteAnnotationConfigsMutation(
+ $input: DeleteAnnotationConfigsInput!
+ ) {
+ deleteAnnotationConfigs(input: $input) {
+ query {
+ ...AnnotationConfigTableFragment
+ }
+ annotationConfigs {
+ __typename
+ }
+ }
+ }
+ `);
+
+ const [createAnnotationConfig] = useMutation(graphql`
+ mutation SettingsAnnotationsPageCreateAnnotationConfigMutation(
+ $input: CreateAnnotationConfigInput!
+ ) {
+ createAnnotationConfig(input: $input) {
+ query {
+ ...AnnotationConfigTableFragment
+ }
+ annotationConfig {
+ ... on ContinuousAnnotationConfig {
+ id
+ }
+ ... on CategoricalAnnotationConfig {
+ id
+ }
+ ... on FreeformAnnotationConfig {
+ id
+ }
+ }
+ }
+ }
+ `);
+
+ const parseError = (callback?: (error: string) => void) => (error: Error) => {
+ const formattedError = getErrorMessagesFromRelayMutationError(error);
+ callback?.(formattedError?.[0] ?? "Failed to create annotation config");
+ };
+
+ const handleAddAnnotationConfig = (
+ _config: AnnotationConfig,
+ {
+ onCompleted,
+ onError,
+ }: { onCompleted?: () => void; onError?: (error: string) => void } = {}
+ ) => {
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ const { id: _, annotationType, ...config } = _config;
+ const key = annotationType.toLowerCase();
+ createAnnotationConfig({
+ variables: { input: { annotationConfig: { [key]: config } } },
+ onCompleted,
+ onError: parseError(onError),
+ });
+ revalidate();
+ };
+
+ const [updateAnnotationConfig] = useMutation(graphql`
+ mutation SettingsAnnotationsPageUpdateAnnotationConfigMutation(
+ $input: UpdateAnnotationConfigInput!
+ ) {
+ updateAnnotationConfig(input: $input) {
+ query {
+ ...AnnotationConfigTableFragment
+ }
+ annotationConfig {
+ ... on ContinuousAnnotationConfig {
+ id
+ }
+ ... on CategoricalAnnotationConfig {
+ id
+ }
+ ... on FreeformAnnotationConfig {
+ id
+ }
+ }
+ }
+ }
+ `);
+
+ const handleEditAnnotationConfig = (
+ _config: AnnotationConfig,
+ {
+ onCompleted,
+ onError,
+ }: { onCompleted?: () => void; onError?: (error: string) => void } = {}
+ ) => {
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ const { id, annotationType, ...config } = _config;
+ const key = annotationType.toLowerCase();
+ updateAnnotationConfig({
+ variables: {
+ input: {
+ id,
+ annotationConfig: {
+ [key]: config,
+ },
+ },
+ },
+ onCompleted,
+ onError: parseError(onError),
+ });
+ revalidate();
+ };
+
+ const handleDeleteAnnotationConfig = (
+ id: string,
+ {
+ onCompleted,
+ onError,
+ }: { onCompleted?: () => void; onError?: (error: string) => void } = {}
+ ) => {
+ deleteAnnotationConfigs({
+ variables: { input: { ids: [id] } },
+ onCompleted,
+ onError: parseError(onError),
+ });
+ revalidate();
+ };
+
+ return (
+
+
+
+
+
+
+ }
+ >
+
+
+ );
+};
diff --git a/app/src/pages/settings/SettingsDataPage.tsx b/app/src/pages/settings/SettingsDataPage.tsx
new file mode 100644
index 0000000000..e5b6da3de9
--- /dev/null
+++ b/app/src/pages/settings/SettingsDataPage.tsx
@@ -0,0 +1,63 @@
+import React, { useState } from "react";
+import { useLoaderData } from "react-router";
+import invariant from "tiny-invariant";
+
+import { Card } from "@arizeai/components";
+
+import {
+ Button,
+ Dialog,
+ DialogTrigger,
+ Heading,
+ Icon,
+ Icons,
+ Modal,
+} from "@phoenix/components";
+import { CanManageRetentionPolicy } from "@phoenix/components/auth";
+
+import { CreateRetentionPolicy } from "./CreateRetentionPolicy";
+import { RetentionPoliciesTable } from "./RetentionPoliciesTable";
+import { settingsDataPageLoader } from "./settingsDataPageLoader";
+
+export function SettingsDataPage() {
+ const [fetchKey, setFetchKey] = useState(0);
+ const loaderData = useLoaderData();
+ invariant(loaderData, "loaderData is required");
+
+ return (
+
+
+ } />}
+ >
+ New Policy
+
+
+
+
+
+
+ }
+ >
+
+
+ );
+}
diff --git a/app/src/pages/settings/SettingsGeneralPage.tsx b/app/src/pages/settings/SettingsGeneralPage.tsx
index 07158b93f6..7544e29872 100644
--- a/app/src/pages/settings/SettingsGeneralPage.tsx
+++ b/app/src/pages/settings/SettingsGeneralPage.tsx
@@ -14,10 +14,11 @@ import {
TextField,
View,
} from "@phoenix/components";
-import { IsAdmin } from "@phoenix/components/auth";
+import { CanManageRetentionPolicy, IsAdmin } from "@phoenix/components/auth";
import { BASE_URL, VERSION } from "@phoenix/config";
import { APIKeysCard } from "@phoenix/pages/settings/APIKeysCard";
import { DBUsagePieChart } from "@phoenix/pages/settings/DBUsagePieChart";
+import { GlobalRetentionPolicyCard } from "@phoenix/pages/settings/GlobalRetentionPolicyCard";
import { settingsGeneralPageLoader } from "@phoenix/pages/settings/settingsGeneralPageLoader";
import { UsersCard } from "@phoenix/pages/settings/UsersCard";
@@ -81,11 +82,12 @@ export function SettingsGeneralPage() {
- <>
-
-
- >
+
+
+
+
+
);
}
diff --git a/app/src/pages/settings/SettingsPage.tsx b/app/src/pages/settings/SettingsPage.tsx
index 21aa6631f7..dc600c2f07 100644
--- a/app/src/pages/settings/SettingsPage.tsx
+++ b/app/src/pages/settings/SettingsPage.tsx
@@ -3,16 +3,7 @@ import { Key } from "react-aria-components";
import { Navigate, Outlet, useLocation, useNavigate } from "react-router";
import { css } from "@emotion/react";
-import { Card } from "@arizeai/components";
-
-import {
- Flex,
- LazyTabPanel,
- Tab,
- TabList,
- Tabs,
- Text,
-} from "@phoenix/components";
+import { LazyTabPanel, Tab, TabList, Tabs } from "@phoenix/components";
const settingsPageCSS = css`
overflow-y: auto;
@@ -51,7 +42,8 @@ export function SettingsPage() {
General
AI Providers
- {/* Annotations */}
+ Annotations
+ Data Retention
@@ -60,17 +52,10 @@ export function SettingsPage() {
-
-
-
- Annotation settings and configurations will be available here.
-
-
- Configure annotation types, labels, and workflows for your
- projects.
-
-
-
+
+
+
+
diff --git a/app/src/pages/settings/__generated__/AnnotationConfigTableFragment.graphql.ts b/app/src/pages/settings/__generated__/AnnotationConfigTableFragment.graphql.ts
new file mode 100644
index 0000000000..688ead3017
--- /dev/null
+++ b/app/src/pages/settings/__generated__/AnnotationConfigTableFragment.graphql.ts
@@ -0,0 +1,197 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+export type AnnotationType = "CATEGORICAL" | "CONTINUOUS" | "FREEFORM";
+export type OptimizationDirection = "MAXIMIZE" | "MINIMIZE" | "NONE";
+import { FragmentRefs } from "relay-runtime";
+export type AnnotationConfigTableFragment$data = {
+ readonly annotationConfigs: {
+ readonly edges: ReadonlyArray<{
+ readonly annotationConfig: {
+ readonly annotationType?: AnnotationType;
+ readonly description?: string | null;
+ readonly id?: string;
+ readonly lowerBound?: number | null;
+ readonly name?: string;
+ readonly optimizationDirection?: OptimizationDirection;
+ readonly upperBound?: number | null;
+ readonly values?: ReadonlyArray<{
+ readonly label: string;
+ readonly score: number | null;
+ }>;
+ };
+ }>;
+ };
+ readonly " $fragmentType": "AnnotationConfigTableFragment";
+};
+export type AnnotationConfigTableFragment$key = {
+ readonly " $data"?: AnnotationConfigTableFragment$data;
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationConfigTableFragment">;
+};
+
+const node: ReaderFragment = (function(){
+var v0 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+};
+return {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "AnnotationConfigTableFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "annotationConfig",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ }
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ (v2/*: any*/),
+ (v3/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+};
+})();
+
+(node as any).hash = "21da6fc76dc125423a2f4ecd8ef70866";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/CreateRetentionPolicyMutation.graphql.ts b/app/src/pages/settings/__generated__/CreateRetentionPolicyMutation.graphql.ts
new file mode 100644
index 0000000000..14ab2af380
--- /dev/null
+++ b/app/src/pages/settings/__generated__/CreateRetentionPolicyMutation.graphql.ts
@@ -0,0 +1,343 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type CreateProjectTraceRetentionPolicyInput = {
+ addProjects?: ReadonlyArray | null;
+ cronExpression: string;
+ name: string;
+ rule: ProjectTraceRetentionRuleInput;
+};
+export type ProjectTraceRetentionRuleInput = {
+ maxCount?: ProjectTraceRetentionRuleMaxCountInput | null;
+ maxDays?: ProjectTraceRetentionRuleMaxDaysInput | null;
+ maxDaysOrCount?: ProjectTraceRetentionRuleMaxDaysOrCountInput | null;
+};
+export type ProjectTraceRetentionRuleMaxDaysInput = {
+ maxDays: number;
+};
+export type ProjectTraceRetentionRuleMaxCountInput = {
+ maxCount: number;
+};
+export type ProjectTraceRetentionRuleMaxDaysOrCountInput = {
+ maxCount: number;
+ maxDays: number;
+};
+export type CreateRetentionPolicyMutation$variables = {
+ input: CreateProjectTraceRetentionPolicyInput;
+};
+export type CreateRetentionPolicyMutation$data = {
+ readonly createProjectTraceRetentionPolicy: {
+ readonly query: {
+ readonly " $fragmentSpreads": FragmentRefs<"RetentionPoliciesTable_policies">;
+ };
+ };
+};
+export type CreateRetentionPolicyMutation = {
+ response: CreateRetentionPolicyMutation$data;
+ variables: CreateRetentionPolicyMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+],
+v2 = [
+ {
+ "kind": "Literal",
+ "name": "first",
+ "value": 1000
+ }
+],
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "CreateRetentionPolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "createProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "RetentionPoliciesTable_policies"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "CreateRetentionPolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "createProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "projectTraceRetentionPolicies",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectConnection",
+ "kind": "LinkedField",
+ "name": "projects",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v5/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cursor",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "PageInfo",
+ "kind": "LinkedField",
+ "name": "pageInfo",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "endCursor",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "hasNextPage",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": "projectTraceRetentionPolicies(first:1000)"
+ },
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "filters": null,
+ "handle": "connection",
+ "key": "RetentionPoliciesTable_projectTraceRetentionPolicies",
+ "kind": "LinkedHandle",
+ "name": "projectTraceRetentionPolicies"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "12a0cf6ab2f80fe2096528b210217a40",
+ "id": null,
+ "metadata": {},
+ "name": "CreateRetentionPolicyMutation",
+ "operationKind": "mutation",
+ "text": "mutation CreateRetentionPolicyMutation(\n $input: CreateProjectTraceRetentionPolicyInput!\n) {\n createProjectTraceRetentionPolicy(input: $input) {\n query {\n ...RetentionPoliciesTable_policies\n }\n }\n}\n\nfragment RetentionPoliciesTable_policies on Query {\n projectTraceRetentionPolicies(first: 1000) {\n edges {\n node {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n projects {\n edges {\n node {\n name\n id\n }\n }\n }\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "b012653380fd1ba759a391f3eec9652b";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/EditRetentionPolicyMutation.graphql.ts b/app/src/pages/settings/__generated__/EditRetentionPolicyMutation.graphql.ts
new file mode 100644
index 0000000000..0134e3bdec
--- /dev/null
+++ b/app/src/pages/settings/__generated__/EditRetentionPolicyMutation.graphql.ts
@@ -0,0 +1,345 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type PatchProjectTraceRetentionPolicyInput = {
+ addProjects?: ReadonlyArray | null;
+ cronExpression?: string | null;
+ id: string;
+ name?: string | null;
+ removeProjects?: ReadonlyArray | null;
+ rule?: ProjectTraceRetentionRuleInput | null;
+};
+export type ProjectTraceRetentionRuleInput = {
+ maxCount?: ProjectTraceRetentionRuleMaxCountInput | null;
+ maxDays?: ProjectTraceRetentionRuleMaxDaysInput | null;
+ maxDaysOrCount?: ProjectTraceRetentionRuleMaxDaysOrCountInput | null;
+};
+export type ProjectTraceRetentionRuleMaxDaysInput = {
+ maxDays: number;
+};
+export type ProjectTraceRetentionRuleMaxCountInput = {
+ maxCount: number;
+};
+export type ProjectTraceRetentionRuleMaxDaysOrCountInput = {
+ maxCount: number;
+ maxDays: number;
+};
+export type EditRetentionPolicyMutation$variables = {
+ input: PatchProjectTraceRetentionPolicyInput;
+};
+export type EditRetentionPolicyMutation$data = {
+ readonly patchProjectTraceRetentionPolicy: {
+ readonly query: {
+ readonly " $fragmentSpreads": FragmentRefs<"RetentionPoliciesTable_policies">;
+ };
+ };
+};
+export type EditRetentionPolicyMutation = {
+ response: EditRetentionPolicyMutation$data;
+ variables: EditRetentionPolicyMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+],
+v2 = [
+ {
+ "kind": "Literal",
+ "name": "first",
+ "value": 1000
+ }
+],
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "EditRetentionPolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "patchProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "RetentionPoliciesTable_policies"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "EditRetentionPolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "patchProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "projectTraceRetentionPolicies",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectConnection",
+ "kind": "LinkedField",
+ "name": "projects",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v5/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cursor",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "PageInfo",
+ "kind": "LinkedField",
+ "name": "pageInfo",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "endCursor",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "hasNextPage",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": "projectTraceRetentionPolicies(first:1000)"
+ },
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "filters": null,
+ "handle": "connection",
+ "key": "RetentionPoliciesTable_projectTraceRetentionPolicies",
+ "kind": "LinkedHandle",
+ "name": "projectTraceRetentionPolicies"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "fc6be2930c80504b884b8bbfd7fdcfb9",
+ "id": null,
+ "metadata": {},
+ "name": "EditRetentionPolicyMutation",
+ "operationKind": "mutation",
+ "text": "mutation EditRetentionPolicyMutation(\n $input: PatchProjectTraceRetentionPolicyInput!\n) {\n patchProjectTraceRetentionPolicy(input: $input) {\n query {\n ...RetentionPoliciesTable_policies\n }\n }\n}\n\nfragment RetentionPoliciesTable_policies on Query {\n projectTraceRetentionPolicies(first: 1000) {\n edges {\n node {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n projects {\n edges {\n node {\n name\n id\n }\n }\n }\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "9abcdadabb062864df4609df1e0448fd";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/EditRetentionPolicyQuery.graphql.ts b/app/src/pages/settings/__generated__/EditRetentionPolicyQuery.graphql.ts
new file mode 100644
index 0000000000..a06f344042
--- /dev/null
+++ b/app/src/pages/settings/__generated__/EditRetentionPolicyQuery.graphql.ts
@@ -0,0 +1,221 @@
+/**
+ * @generated SignedSource<<93ee3e634f476390a8ec2ed7038b0e8e>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+export type EditRetentionPolicyQuery$variables = {
+ id: string;
+};
+export type EditRetentionPolicyQuery$data = {
+ readonly retentionPolicy: {
+ readonly cronExpression?: string;
+ readonly id?: string;
+ readonly name?: string;
+ readonly rule?: {
+ readonly maxCount?: number;
+ readonly maxDays?: number;
+ };
+ };
+};
+export type EditRetentionPolicyQuery = {
+ response: EditRetentionPolicyQuery$data;
+ variables: EditRetentionPolicyQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "id"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "id"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v6 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+},
+v8 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+},
+v9 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+},
+v10 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "EditRetentionPolicyQuery",
+ "selections": [
+ {
+ "alias": "retentionPolicy",
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "ProjectTraceRetentionPolicy",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "EditRetentionPolicyQuery",
+ "selections": [
+ {
+ "alias": "retentionPolicy",
+ "args": (v1/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v10/*: any*/),
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v10/*: any*/),
+ (v6/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "ProjectTraceRetentionPolicy",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "b22ef3b4215b44c32f941eb1483a6899",
+ "id": null,
+ "metadata": {},
+ "name": "EditRetentionPolicyQuery",
+ "operationKind": "query",
+ "text": "query EditRetentionPolicyQuery(\n $id: GlobalID!\n) {\n retentionPolicy: node(id: $id) {\n __typename\n ... on ProjectTraceRetentionPolicy {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n }\n __isNode: __typename\n id\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "fdfb6bfe85f8c83dc75da4e5aa1403f0";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/GlobalRetentionPolicyCardMutation.graphql.ts b/app/src/pages/settings/__generated__/GlobalRetentionPolicyCardMutation.graphql.ts
new file mode 100644
index 0000000000..984645aaca
--- /dev/null
+++ b/app/src/pages/settings/__generated__/GlobalRetentionPolicyCardMutation.graphql.ts
@@ -0,0 +1,165 @@
+/**
+ * @generated SignedSource<<0daa8fa2fabd3c12756220ece8632a77>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+export type PatchProjectTraceRetentionPolicyInput = {
+ addProjects?: ReadonlyArray | null;
+ cronExpression?: string | null;
+ id: string;
+ name?: string | null;
+ removeProjects?: ReadonlyArray | null;
+ rule?: ProjectTraceRetentionRuleInput | null;
+};
+export type ProjectTraceRetentionRuleInput = {
+ maxCount?: ProjectTraceRetentionRuleMaxCountInput | null;
+ maxDays?: ProjectTraceRetentionRuleMaxDaysInput | null;
+ maxDaysOrCount?: ProjectTraceRetentionRuleMaxDaysOrCountInput | null;
+};
+export type ProjectTraceRetentionRuleMaxDaysInput = {
+ maxDays: number;
+};
+export type ProjectTraceRetentionRuleMaxCountInput = {
+ maxCount: number;
+};
+export type ProjectTraceRetentionRuleMaxDaysOrCountInput = {
+ maxCount: number;
+ maxDays: number;
+};
+export type GlobalRetentionPolicyCardMutation$variables = {
+ input: PatchProjectTraceRetentionPolicyInput;
+};
+export type GlobalRetentionPolicyCardMutation$data = {
+ readonly patchProjectTraceRetentionPolicy: {
+ readonly node: {
+ readonly id: string;
+ readonly rule: {
+ readonly __typename: "TraceRetentionRuleMaxDays";
+ readonly maxDays: number;
+ } | {
+ // This will never be '%other', but we need some
+ // value in case none of the concrete values match.
+ readonly __typename: "%other";
+ };
+ };
+ };
+};
+export type GlobalRetentionPolicyCardMutation = {
+ response: GlobalRetentionPolicyCardMutation$data;
+ variables: GlobalRetentionPolicyCardMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "alias": null,
+ "args": [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+ ],
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "patchProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+ }
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+];
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "GlobalRetentionPolicyCardMutation",
+ "selections": (v1/*: any*/),
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "GlobalRetentionPolicyCardMutation",
+ "selections": (v1/*: any*/)
+ },
+ "params": {
+ "cacheID": "096e3d4c546229fc771e14be03b168a3",
+ "id": null,
+ "metadata": {},
+ "name": "GlobalRetentionPolicyCardMutation",
+ "operationKind": "mutation",
+ "text": "mutation GlobalRetentionPolicyCardMutation(\n $input: PatchProjectTraceRetentionPolicyInput!\n) {\n patchProjectTraceRetentionPolicy(input: $input) {\n node {\n id\n rule {\n __typename\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "d1757c8159d448039635f1fe035803c4";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/GlobalRetentionPolicyCardQuery.graphql.ts b/app/src/pages/settings/__generated__/GlobalRetentionPolicyCardQuery.graphql.ts
new file mode 100644
index 0000000000..b2417829d0
--- /dev/null
+++ b/app/src/pages/settings/__generated__/GlobalRetentionPolicyCardQuery.graphql.ts
@@ -0,0 +1,130 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+export type GlobalRetentionPolicyCardQuery$variables = Record;
+export type GlobalRetentionPolicyCardQuery$data = {
+ readonly defaultProjectTraceRetentionPolicy: {
+ readonly cronExpression: string;
+ readonly id: string;
+ readonly name: string;
+ readonly rule: {
+ readonly __typename: "TraceRetentionRuleMaxDays";
+ readonly maxDays: number;
+ } | {
+ // This will never be '%other', but we need some
+ // value in case none of the concrete values match.
+ readonly __typename: "%other";
+ };
+ };
+};
+export type GlobalRetentionPolicyCardQuery = {
+ response: GlobalRetentionPolicyCardQuery$data;
+ variables: GlobalRetentionPolicyCardQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "defaultProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+ }
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+];
+return {
+ "fragment": {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "GlobalRetentionPolicyCardQuery",
+ "selections": (v0/*: any*/),
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": [],
+ "kind": "Operation",
+ "name": "GlobalRetentionPolicyCardQuery",
+ "selections": (v0/*: any*/)
+ },
+ "params": {
+ "cacheID": "cc26cab6fcf5acfdf11abdb30f7f937f",
+ "id": null,
+ "metadata": {},
+ "name": "GlobalRetentionPolicyCardQuery",
+ "operationKind": "query",
+ "text": "query GlobalRetentionPolicyCardQuery {\n defaultProjectTraceRetentionPolicy {\n cronExpression\n id\n name\n rule {\n __typename\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "15a513ac6f9c30b739810e2e18d6f0ab";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/RetentionPoliciesTablePoliciesQuery.graphql.ts b/app/src/pages/settings/__generated__/RetentionPoliciesTablePoliciesQuery.graphql.ts
new file mode 100644
index 0000000000..8a53cde4f6
--- /dev/null
+++ b/app/src/pages/settings/__generated__/RetentionPoliciesTablePoliciesQuery.graphql.ts
@@ -0,0 +1,278 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type RetentionPoliciesTablePoliciesQuery$variables = {
+ after?: string | null;
+ first?: number | null;
+};
+export type RetentionPoliciesTablePoliciesQuery$data = {
+ readonly " $fragmentSpreads": FragmentRefs<"RetentionPoliciesTable_policies">;
+};
+export type RetentionPoliciesTablePoliciesQuery = {
+ response: RetentionPoliciesTablePoliciesQuery$data;
+ variables: RetentionPoliciesTablePoliciesQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "after"
+ },
+ {
+ "defaultValue": 1000,
+ "kind": "LocalArgument",
+ "name": "first"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "after",
+ "variableName": "after"
+ },
+ {
+ "kind": "Variable",
+ "name": "first",
+ "variableName": "first"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "RetentionPoliciesTablePoliciesQuery",
+ "selections": [
+ {
+ "args": (v1/*: any*/),
+ "kind": "FragmentSpread",
+ "name": "RetentionPoliciesTable_policies"
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "RetentionPoliciesTablePoliciesQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "projectTraceRetentionPolicies",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ (v3/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/),
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectConnection",
+ "kind": "LinkedField",
+ "name": "projects",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v2/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v4/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cursor",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "PageInfo",
+ "kind": "LinkedField",
+ "name": "pageInfo",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "endCursor",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "hasNextPage",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "filters": null,
+ "handle": "connection",
+ "key": "RetentionPoliciesTable_projectTraceRetentionPolicies",
+ "kind": "LinkedHandle",
+ "name": "projectTraceRetentionPolicies"
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "62c02ffd47a43c93aab0b0033afb3695",
+ "id": null,
+ "metadata": {},
+ "name": "RetentionPoliciesTablePoliciesQuery",
+ "operationKind": "query",
+ "text": "query RetentionPoliciesTablePoliciesQuery(\n $after: String = null\n $first: Int = 1000\n) {\n ...RetentionPoliciesTable_policies_2HEEH6\n}\n\nfragment RetentionPoliciesTable_policies_2HEEH6 on Query {\n projectTraceRetentionPolicies(first: $first, after: $after) {\n edges {\n node {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n projects {\n edges {\n node {\n name\n id\n }\n }\n }\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "05f4dfa540c2b394a25b5a004349c49f";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/RetentionPoliciesTable_policies.graphql.ts b/app/src/pages/settings/__generated__/RetentionPoliciesTable_policies.graphql.ts
new file mode 100644
index 0000000000..6449453cf6
--- /dev/null
+++ b/app/src/pages/settings/__generated__/RetentionPoliciesTable_policies.graphql.ts
@@ -0,0 +1,287 @@
+/**
+ * @generated SignedSource<<28d8f8e109ecc4d30c216058bab9986c>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type RetentionPoliciesTable_policies$data = {
+ readonly projectTraceRetentionPolicies: {
+ readonly edges: ReadonlyArray<{
+ readonly node: {
+ readonly cronExpression: string;
+ readonly id: string;
+ readonly name: string;
+ readonly projects: {
+ readonly edges: ReadonlyArray<{
+ readonly node: {
+ readonly id: string;
+ readonly name: string;
+ };
+ }>;
+ };
+ readonly rule: {
+ readonly __typename: "TraceRetentionRuleMaxCount";
+ readonly maxCount: number;
+ } | {
+ readonly __typename: "TraceRetentionRuleMaxDays";
+ readonly maxDays: number;
+ } | {
+ readonly __typename: "TraceRetentionRuleMaxDaysOrCount";
+ readonly maxCount: number;
+ readonly maxDays: number;
+ } | {
+ // This will never be '%other', but we need some
+ // value in case none of the concrete values match.
+ readonly __typename: "%other";
+ };
+ };
+ }>;
+ };
+ readonly " $fragmentType": "RetentionPoliciesTable_policies";
+};
+export type RetentionPoliciesTable_policies$key = {
+ readonly " $data"?: RetentionPoliciesTable_policies$data;
+ readonly " $fragmentSpreads": FragmentRefs<"RetentionPoliciesTable_policies">;
+};
+
+import RetentionPoliciesTablePoliciesQuery_graphql from './RetentionPoliciesTablePoliciesQuery.graphql';
+
+const node: ReaderFragment = (function(){
+var v0 = [
+ "projectTraceRetentionPolicies"
+],
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+};
+return {
+ "argumentDefinitions": [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "after"
+ },
+ {
+ "defaultValue": 1000,
+ "kind": "LocalArgument",
+ "name": "first"
+ }
+ ],
+ "kind": "Fragment",
+ "metadata": {
+ "connection": [
+ {
+ "count": "first",
+ "cursor": "after",
+ "direction": "forward",
+ "path": (v0/*: any*/)
+ }
+ ],
+ "refetch": {
+ "connection": {
+ "forward": {
+ "count": "first",
+ "cursor": "after"
+ },
+ "backward": null,
+ "path": (v0/*: any*/)
+ },
+ "fragmentPathInResult": [],
+ "operation": RetentionPoliciesTablePoliciesQuery_graphql
+ }
+ },
+ "name": "RetentionPoliciesTable_policies",
+ "selections": [
+ {
+ "alias": "projectTraceRetentionPolicies",
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "__RetentionPoliciesTable_projectTraceRetentionPolicies_connection",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v1/*: any*/),
+ (v2/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/),
+ (v4/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectConnection",
+ "kind": "LinkedField",
+ "name": "projects",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ (v1/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cursor",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "PageInfo",
+ "kind": "LinkedField",
+ "name": "pageInfo",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "endCursor",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "hasNextPage",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+};
+})();
+
+(node as any).hash = "05f4dfa540c2b394a25b5a004349c49f";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/RetentionPolicyActionMenuDeletePolicyMutation.graphql.ts b/app/src/pages/settings/__generated__/RetentionPolicyActionMenuDeletePolicyMutation.graphql.ts
new file mode 100644
index 0000000000..78041bee21
--- /dev/null
+++ b/app/src/pages/settings/__generated__/RetentionPolicyActionMenuDeletePolicyMutation.graphql.ts
@@ -0,0 +1,325 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type DeleteProjectTraceRetentionPolicyInput = {
+ id: string;
+};
+export type RetentionPolicyActionMenuDeletePolicyMutation$variables = {
+ input: DeleteProjectTraceRetentionPolicyInput;
+};
+export type RetentionPolicyActionMenuDeletePolicyMutation$data = {
+ readonly deleteProjectTraceRetentionPolicy: {
+ readonly query: {
+ readonly " $fragmentSpreads": FragmentRefs<"RetentionPoliciesTable_policies">;
+ };
+ };
+};
+export type RetentionPolicyActionMenuDeletePolicyMutation = {
+ response: RetentionPolicyActionMenuDeletePolicyMutation$data;
+ variables: RetentionPolicyActionMenuDeletePolicyMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+],
+v2 = [
+ {
+ "kind": "Literal",
+ "name": "first",
+ "value": 1000
+ }
+],
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "RetentionPolicyActionMenuDeletePolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "deleteProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "RetentionPoliciesTable_policies"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "RetentionPolicyActionMenuDeletePolicyMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyMutationPayload",
+ "kind": "LinkedField",
+ "name": "deleteProjectTraceRetentionPolicy",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "projectTraceRetentionPolicies",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v5/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v6/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectConnection",
+ "kind": "LinkedField",
+ "name": "projects",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v5/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cursor",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "PageInfo",
+ "kind": "LinkedField",
+ "name": "pageInfo",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "endCursor",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "hasNextPage",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": "projectTraceRetentionPolicies(first:1000)"
+ },
+ {
+ "alias": null,
+ "args": (v2/*: any*/),
+ "filters": null,
+ "handle": "connection",
+ "key": "RetentionPoliciesTable_projectTraceRetentionPolicies",
+ "kind": "LinkedHandle",
+ "name": "projectTraceRetentionPolicies"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "afc021156633e97d64f94bfdf6a1829d",
+ "id": null,
+ "metadata": {},
+ "name": "RetentionPolicyActionMenuDeletePolicyMutation",
+ "operationKind": "mutation",
+ "text": "mutation RetentionPolicyActionMenuDeletePolicyMutation(\n $input: DeleteProjectTraceRetentionPolicyInput!\n) {\n deleteProjectTraceRetentionPolicy(input: $input) {\n query {\n ...RetentionPoliciesTable_policies\n }\n }\n}\n\nfragment RetentionPoliciesTable_policies on Query {\n projectTraceRetentionPolicies(first: 1000) {\n edges {\n node {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n projects {\n edges {\n node {\n name\n id\n }\n }\n }\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "788c62dc4dbfec7dfce278655dc9cc63";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/SettingsAnnotationsPageCreateAnnotationConfigMutation.graphql.ts b/app/src/pages/settings/__generated__/SettingsAnnotationsPageCreateAnnotationConfigMutation.graphql.ts
new file mode 100644
index 0000000000..2ee43ac0ec
--- /dev/null
+++ b/app/src/pages/settings/__generated__/SettingsAnnotationsPageCreateAnnotationConfigMutation.graphql.ts
@@ -0,0 +1,365 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type OptimizationDirection = "MAXIMIZE" | "MINIMIZE" | "NONE";
+export type CreateAnnotationConfigInput = {
+ annotationConfig: AnnotationConfigInput;
+};
+export type AnnotationConfigInput = {
+ categorical?: CategoricalAnnotationConfigInput | null;
+ continuous?: ContinuousAnnotationConfigInput | null;
+ freeform?: FreeformAnnotationConfigInput | null;
+};
+export type CategoricalAnnotationConfigInput = {
+ description?: string | null;
+ name: string;
+ optimizationDirection: OptimizationDirection;
+ values: ReadonlyArray;
+};
+export type CategoricalAnnotationConfigValueInput = {
+ label: string;
+ score?: number | null;
+};
+export type ContinuousAnnotationConfigInput = {
+ description?: string | null;
+ lowerBound?: number | null;
+ name: string;
+ optimizationDirection: OptimizationDirection;
+ upperBound?: number | null;
+};
+export type FreeformAnnotationConfigInput = {
+ description?: string | null;
+ name: string;
+};
+export type SettingsAnnotationsPageCreateAnnotationConfigMutation$variables = {
+ input: CreateAnnotationConfigInput;
+};
+export type SettingsAnnotationsPageCreateAnnotationConfigMutation$data = {
+ readonly createAnnotationConfig: {
+ readonly annotationConfig: {
+ readonly id?: string;
+ };
+ readonly query: {
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationConfigTableFragment">;
+ };
+ };
+};
+export type SettingsAnnotationsPageCreateAnnotationConfigMutation = {
+ response: SettingsAnnotationsPageCreateAnnotationConfigMutation$data;
+ variables: SettingsAnnotationsPageCreateAnnotationConfigMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v3 = [
+ (v2/*: any*/)
+],
+v4 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+},
+v5 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+},
+v6 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v9 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+},
+v10 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v11 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+},
+v12 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "Node",
+ "abstractKey": "__isNode"
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SettingsAnnotationsPageCreateAnnotationConfigMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "CreateAnnotationConfigPayload",
+ "kind": "LinkedField",
+ "name": "createAnnotationConfig",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationConfigTableFragment"
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "annotationConfig",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "SettingsAnnotationsPageCreateAnnotationConfigMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "CreateAnnotationConfigPayload",
+ "kind": "LinkedField",
+ "name": "createAnnotationConfig",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "annotationConfig",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v7/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/),
+ (v11/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/),
+ (v11/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ }
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+ },
+ (v12/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "annotationConfig",
+ "plural": false,
+ "selections": [
+ (v7/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ (v12/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "4435e31203bf8e2b8e4f332973a4cd4e",
+ "id": null,
+ "metadata": {},
+ "name": "SettingsAnnotationsPageCreateAnnotationConfigMutation",
+ "operationKind": "mutation",
+ "text": "mutation SettingsAnnotationsPageCreateAnnotationConfigMutation(\n $input: CreateAnnotationConfigInput!\n) {\n createAnnotationConfig(input: $input) {\n query {\n ...AnnotationConfigTableFragment\n }\n annotationConfig {\n __typename\n ... on ContinuousAnnotationConfig {\n id\n }\n ... on CategoricalAnnotationConfig {\n id\n }\n ... on FreeformAnnotationConfig {\n id\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n}\n\nfragment AnnotationConfigTableFragment on Query {\n annotationConfigs {\n edges {\n annotationConfig: node {\n __typename\n ... on CategoricalAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on ContinuousAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n upperBound\n lowerBound\n }\n ... on FreeformAnnotationConfig {\n id\n name\n description\n annotationType\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "74c458f1e12e1bea2f151135710bbc9b";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/SettingsAnnotationsPageDeleteAnnotationConfigsMutation.graphql.ts b/app/src/pages/settings/__generated__/SettingsAnnotationsPageDeleteAnnotationConfigsMutation.graphql.ts
new file mode 100644
index 0000000000..0cc109287a
--- /dev/null
+++ b/app/src/pages/settings/__generated__/SettingsAnnotationsPageDeleteAnnotationConfigsMutation.graphql.ts
@@ -0,0 +1,314 @@
+/**
+ * @generated SignedSource<<7eb8694ecae2dc63cf907067ab06e3ed>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type DeleteAnnotationConfigsInput = {
+ ids: ReadonlyArray;
+};
+export type SettingsAnnotationsPageDeleteAnnotationConfigsMutation$variables = {
+ input: DeleteAnnotationConfigsInput;
+};
+export type SettingsAnnotationsPageDeleteAnnotationConfigsMutation$data = {
+ readonly deleteAnnotationConfigs: {
+ readonly annotationConfigs: ReadonlyArray<{
+ readonly __typename: string;
+ }>;
+ readonly query: {
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationConfigTableFragment">;
+ };
+ };
+};
+export type SettingsAnnotationsPageDeleteAnnotationConfigsMutation = {
+ response: SettingsAnnotationsPageDeleteAnnotationConfigsMutation$data;
+ variables: SettingsAnnotationsPageDeleteAnnotationConfigsMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+},
+v8 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SettingsAnnotationsPageDeleteAnnotationConfigsMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "DeleteAnnotationConfigsPayload",
+ "kind": "LinkedField",
+ "name": "deleteAnnotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationConfigTableFragment"
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": true,
+ "selections": [
+ (v2/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "SettingsAnnotationsPageDeleteAnnotationConfigsMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "DeleteAnnotationConfigsPayload",
+ "kind": "LinkedField",
+ "name": "deleteAnnotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "annotationConfig",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ (v7/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ (v7/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ }
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+ },
+ (v8/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": true,
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "28849f96b36f84c371f844e7dd29a556",
+ "id": null,
+ "metadata": {},
+ "name": "SettingsAnnotationsPageDeleteAnnotationConfigsMutation",
+ "operationKind": "mutation",
+ "text": "mutation SettingsAnnotationsPageDeleteAnnotationConfigsMutation(\n $input: DeleteAnnotationConfigsInput!\n) {\n deleteAnnotationConfigs(input: $input) {\n query {\n ...AnnotationConfigTableFragment\n }\n annotationConfigs {\n __typename\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n}\n\nfragment AnnotationConfigTableFragment on Query {\n annotationConfigs {\n edges {\n annotationConfig: node {\n __typename\n ... on CategoricalAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on ContinuousAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n upperBound\n lowerBound\n }\n ... on FreeformAnnotationConfig {\n id\n name\n description\n annotationType\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "eb70268f6479962e1e34d53ffe89c333";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/SettingsAnnotationsPageFragment.graphql.ts b/app/src/pages/settings/__generated__/SettingsAnnotationsPageFragment.graphql.ts
new file mode 100644
index 0000000000..6e32a44776
--- /dev/null
+++ b/app/src/pages/settings/__generated__/SettingsAnnotationsPageFragment.graphql.ts
@@ -0,0 +1,40 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type SettingsAnnotationsPageFragment$data = {
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationConfigTableFragment">;
+ readonly " $fragmentType": "SettingsAnnotationsPageFragment";
+};
+export type SettingsAnnotationsPageFragment$key = {
+ readonly " $data"?: SettingsAnnotationsPageFragment$data;
+ readonly " $fragmentSpreads": FragmentRefs<"SettingsAnnotationsPageFragment">;
+};
+
+const node: ReaderFragment = {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SettingsAnnotationsPageFragment",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationConfigTableFragment"
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+};
+
+(node as any).hash = "899b97b5d6aabeea206cd9b4ffb1a773";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/SettingsAnnotationsPageUpdateAnnotationConfigMutation.graphql.ts b/app/src/pages/settings/__generated__/SettingsAnnotationsPageUpdateAnnotationConfigMutation.graphql.ts
new file mode 100644
index 0000000000..ea98fdbb94
--- /dev/null
+++ b/app/src/pages/settings/__generated__/SettingsAnnotationsPageUpdateAnnotationConfigMutation.graphql.ts
@@ -0,0 +1,366 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type OptimizationDirection = "MAXIMIZE" | "MINIMIZE" | "NONE";
+export type UpdateAnnotationConfigInput = {
+ annotationConfig: AnnotationConfigInput;
+ id: string;
+};
+export type AnnotationConfigInput = {
+ categorical?: CategoricalAnnotationConfigInput | null;
+ continuous?: ContinuousAnnotationConfigInput | null;
+ freeform?: FreeformAnnotationConfigInput | null;
+};
+export type CategoricalAnnotationConfigInput = {
+ description?: string | null;
+ name: string;
+ optimizationDirection: OptimizationDirection;
+ values: ReadonlyArray;
+};
+export type CategoricalAnnotationConfigValueInput = {
+ label: string;
+ score?: number | null;
+};
+export type ContinuousAnnotationConfigInput = {
+ description?: string | null;
+ lowerBound?: number | null;
+ name: string;
+ optimizationDirection: OptimizationDirection;
+ upperBound?: number | null;
+};
+export type FreeformAnnotationConfigInput = {
+ description?: string | null;
+ name: string;
+};
+export type SettingsAnnotationsPageUpdateAnnotationConfigMutation$variables = {
+ input: UpdateAnnotationConfigInput;
+};
+export type SettingsAnnotationsPageUpdateAnnotationConfigMutation$data = {
+ readonly updateAnnotationConfig: {
+ readonly annotationConfig: {
+ readonly id?: string;
+ };
+ readonly query: {
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationConfigTableFragment">;
+ };
+ };
+};
+export type SettingsAnnotationsPageUpdateAnnotationConfigMutation = {
+ response: SettingsAnnotationsPageUpdateAnnotationConfigMutation$data;
+ variables: SettingsAnnotationsPageUpdateAnnotationConfigMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "kind": "Variable",
+ "name": "input",
+ "variableName": "input"
+ }
+],
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v3 = [
+ (v2/*: any*/)
+],
+v4 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+},
+v5 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+},
+v6 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v9 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+},
+v10 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v11 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+},
+v12 = {
+ "kind": "InlineFragment",
+ "selections": (v3/*: any*/),
+ "type": "Node",
+ "abstractKey": "__isNode"
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SettingsAnnotationsPageUpdateAnnotationConfigMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "UpdateAnnotationConfigPayload",
+ "kind": "LinkedField",
+ "name": "updateAnnotationConfig",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationConfigTableFragment"
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "annotationConfig",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "SettingsAnnotationsPageUpdateAnnotationConfigMutation",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v1/*: any*/),
+ "concreteType": "UpdateAnnotationConfigPayload",
+ "kind": "LinkedField",
+ "name": "updateAnnotationConfig",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Query",
+ "kind": "LinkedField",
+ "name": "query",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "annotationConfig",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v7/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/),
+ (v11/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/),
+ (v11/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ }
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v8/*: any*/),
+ (v9/*: any*/),
+ (v10/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+ },
+ (v12/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "annotationConfig",
+ "plural": false,
+ "selections": [
+ (v7/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ (v12/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "257070989d0363192027975755cb6b4a",
+ "id": null,
+ "metadata": {},
+ "name": "SettingsAnnotationsPageUpdateAnnotationConfigMutation",
+ "operationKind": "mutation",
+ "text": "mutation SettingsAnnotationsPageUpdateAnnotationConfigMutation(\n $input: UpdateAnnotationConfigInput!\n) {\n updateAnnotationConfig(input: $input) {\n query {\n ...AnnotationConfigTableFragment\n }\n annotationConfig {\n __typename\n ... on ContinuousAnnotationConfig {\n id\n }\n ... on CategoricalAnnotationConfig {\n id\n }\n ... on FreeformAnnotationConfig {\n id\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n}\n\nfragment AnnotationConfigTableFragment on Query {\n annotationConfigs {\n edges {\n annotationConfig: node {\n __typename\n ... on CategoricalAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on ContinuousAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n upperBound\n lowerBound\n }\n ... on FreeformAnnotationConfig {\n id\n name\n description\n annotationType\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "c40b34d87894e9ff40a6b2d9dbfd035b";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/settingsAnnotationsPageLoaderQuery.graphql.ts b/app/src/pages/settings/__generated__/settingsAnnotationsPageLoaderQuery.graphql.ts
new file mode 100644
index 0000000000..413bfb935a
--- /dev/null
+++ b/app/src/pages/settings/__generated__/settingsAnnotationsPageLoaderQuery.graphql.ts
@@ -0,0 +1,217 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type settingsAnnotationsPageLoaderQuery$variables = Record;
+export type settingsAnnotationsPageLoaderQuery$data = {
+ readonly " $fragmentSpreads": FragmentRefs<"SettingsAnnotationsPageFragment">;
+};
+export type settingsAnnotationsPageLoaderQuery = {
+ response: settingsAnnotationsPageLoaderQuery$data;
+ variables: settingsAnnotationsPageLoaderQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "settingsAnnotationsPageLoaderQuery",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "SettingsAnnotationsPageFragment"
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": [],
+ "kind": "Operation",
+ "name": "settingsAnnotationsPageLoaderQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "annotationConfig",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ }
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ (v2/*: any*/),
+ (v3/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "38239699bb1676189730a7c5bfaf3e16",
+ "id": null,
+ "metadata": {},
+ "name": "settingsAnnotationsPageLoaderQuery",
+ "operationKind": "query",
+ "text": "query settingsAnnotationsPageLoaderQuery {\n ...SettingsAnnotationsPageFragment\n}\n\nfragment AnnotationConfigTableFragment on Query {\n annotationConfigs {\n edges {\n annotationConfig: node {\n __typename\n ... on CategoricalAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on ContinuousAnnotationConfig {\n id\n name\n description\n annotationType\n optimizationDirection\n upperBound\n lowerBound\n }\n ... on FreeformAnnotationConfig {\n id\n name\n description\n annotationType\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n}\n\nfragment SettingsAnnotationsPageFragment on Query {\n ...AnnotationConfigTableFragment\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "b14c2976ebc78d4709a4e54aa2a20132";
+
+export default node;
diff --git a/app/src/pages/settings/__generated__/settingsDataPageLoaderQuery.graphql.ts b/app/src/pages/settings/__generated__/settingsDataPageLoaderQuery.graphql.ts
new file mode 100644
index 0000000000..59e98f6cc1
--- /dev/null
+++ b/app/src/pages/settings/__generated__/settingsDataPageLoaderQuery.graphql.ts
@@ -0,0 +1,258 @@
+/**
+ * @generated SignedSource<<728b6b0b94b33778ba61e176718c54b0>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type settingsDataPageLoaderQuery$variables = Record;
+export type settingsDataPageLoaderQuery$data = {
+ readonly " $fragmentSpreads": FragmentRefs<"RetentionPoliciesTable_policies">;
+};
+export type settingsDataPageLoaderQuery = {
+ response: settingsDataPageLoaderQuery$data;
+ variables: settingsDataPageLoaderQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "kind": "Literal",
+ "name": "first",
+ "value": 1000
+ }
+],
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v2 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxCount",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "maxDays",
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "settingsDataPageLoaderQuery",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "RetentionPoliciesTable_policies"
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": [],
+ "kind": "Operation",
+ "name": "settingsDataPageLoaderQuery",
+ "selections": [
+ {
+ "alias": null,
+ "args": (v0/*: any*/),
+ "concreteType": "ProjectTraceRetentionPolicyConnection",
+ "kind": "LinkedField",
+ "name": "projectTraceRetentionPolicies",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicyEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectTraceRetentionPolicy",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v1/*: any*/),
+ (v2/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cronExpression",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "rule",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxCount",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDays",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v5/*: any*/),
+ (v4/*: any*/)
+ ],
+ "type": "TraceRetentionRuleMaxDaysOrCount",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectConnection",
+ "kind": "LinkedField",
+ "name": "projects",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "ProjectEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ (v1/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v3/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cursor",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "PageInfo",
+ "kind": "LinkedField",
+ "name": "pageInfo",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "endCursor",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "hasNextPage",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": "projectTraceRetentionPolicies(first:1000)"
+ },
+ {
+ "alias": null,
+ "args": (v0/*: any*/),
+ "filters": null,
+ "handle": "connection",
+ "key": "RetentionPoliciesTable_projectTraceRetentionPolicies",
+ "kind": "LinkedHandle",
+ "name": "projectTraceRetentionPolicies"
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "b36af62a21f20f8cd9b25811d3426697",
+ "id": null,
+ "metadata": {},
+ "name": "settingsDataPageLoaderQuery",
+ "operationKind": "query",
+ "text": "query settingsDataPageLoaderQuery {\n ...RetentionPoliciesTable_policies\n}\n\nfragment RetentionPoliciesTable_policies on Query {\n projectTraceRetentionPolicies(first: 1000) {\n edges {\n node {\n id\n name\n cronExpression\n rule {\n __typename\n ... on TraceRetentionRuleMaxCount {\n maxCount\n }\n ... on TraceRetentionRuleMaxDays {\n maxDays\n }\n ... on TraceRetentionRuleMaxDaysOrCount {\n maxDays\n maxCount\n }\n }\n projects {\n edges {\n node {\n name\n id\n }\n }\n }\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "5d39aac927106159abef842419b123ed";
+
+export default node;
diff --git a/app/src/pages/settings/settingsAnnotationsPageLoader.ts b/app/src/pages/settings/settingsAnnotationsPageLoader.ts
new file mode 100644
index 0000000000..4b0436b298
--- /dev/null
+++ b/app/src/pages/settings/settingsAnnotationsPageLoader.ts
@@ -0,0 +1,21 @@
+import { fetchQuery, graphql } from "react-relay";
+
+import RelayEnvironment from "@phoenix/RelayEnvironment";
+
+import { settingsAnnotationsPageLoaderQuery } from "./__generated__/settingsAnnotationsPageLoaderQuery.graphql";
+
+export async function settingsAnnotationsPageLoader() {
+ return await fetchQuery(
+ RelayEnvironment,
+ graphql`
+ query settingsAnnotationsPageLoaderQuery {
+ ...SettingsAnnotationsPageFragment
+ }
+ `,
+ {}
+ ).toPromise();
+}
+
+export type SettingsAnnotationsPageLoaderData = NonNullable<
+ Awaited>
+>;
diff --git a/app/src/pages/settings/settingsDataPageLoader.tsx b/app/src/pages/settings/settingsDataPageLoader.tsx
new file mode 100644
index 0000000000..a889b3ecc3
--- /dev/null
+++ b/app/src/pages/settings/settingsDataPageLoader.tsx
@@ -0,0 +1,17 @@
+import { fetchQuery, graphql } from "react-relay";
+
+import RelayEnvironment from "@phoenix/RelayEnvironment";
+
+import { settingsDataPageLoaderQuery } from "./__generated__/settingsDataPageLoaderQuery.graphql";
+
+export async function settingsDataPageLoader() {
+ return await fetchQuery(
+ RelayEnvironment,
+ graphql`
+ query settingsDataPageLoaderQuery {
+ ...RetentionPoliciesTable_policies
+ }
+ `,
+ {}
+ ).toPromise();
+}
diff --git a/app/src/pages/settings/types.ts b/app/src/pages/settings/types.ts
new file mode 100644
index 0000000000..547e2a2d5d
--- /dev/null
+++ b/app/src/pages/settings/types.ts
@@ -0,0 +1,42 @@
+import {
+ AnnotationConfigTableFragment$data,
+ AnnotationType,
+} from "@phoenix/pages/settings/__generated__/AnnotationConfigTableFragment.graphql";
+import { Mutable } from "@phoenix/typeUtils";
+
+export type AnnotationConfigBase = Mutable<
+ AnnotationConfigTableFragment$data["annotationConfigs"]["edges"][number]["annotationConfig"]
+> & { name: string; annotationType: AnnotationType };
+
+export type AnnotationConfigContinuous = Pick<
+ AnnotationConfigBase,
+ | "id"
+ | "name"
+ | "description"
+ | "optimizationDirection"
+ | "upperBound"
+ | "lowerBound"
+> & { annotationType: "CONTINUOUS" };
+
+export type AnnotationConfigCategorical = Pick<
+ AnnotationConfigBase,
+ "id" | "name" | "description" | "optimizationDirection" | "values"
+> & { annotationType: "CATEGORICAL" };
+
+export type AnnotationConfigFreeform = Pick<
+ AnnotationConfigBase,
+ "id" | "name" | "description"
+> & { annotationType: "FREEFORM" };
+
+export type AnnotationConfigType = NonNullable<
+ AnnotationConfigBase["annotationType"]
+>;
+
+export type AnnotationConfigOptimizationDirection = NonNullable<
+ AnnotationConfigBase["optimizationDirection"]
+>;
+
+export type AnnotationConfig =
+ | AnnotationConfigContinuous
+ | AnnotationConfigCategorical
+ | AnnotationConfigFreeform;
diff --git a/app/src/pages/trace/SessionDetails.tsx b/app/src/pages/trace/SessionDetails.tsx
index 6758fc94f6..c23632ad30 100644
--- a/app/src/pages/trace/SessionDetails.tsx
+++ b/app/src/pages/trace/SessionDetails.tsx
@@ -106,14 +106,8 @@ export function SessionDetails(props: SessionDetailsProps) {
cumulativeTokenCountPrompt
latencyMs
startTime
- spanAnnotations {
- name
- label
- score
- explanation
- annotatorKind
- }
spanId
+ ...AnnotationSummaryGroup
}
}
}
diff --git a/app/src/pages/trace/SessionDetailsTraceList.tsx b/app/src/pages/trace/SessionDetailsTraceList.tsx
index 472ea575e3..e9f9e74947 100644
--- a/app/src/pages/trace/SessionDetailsTraceList.tsx
+++ b/app/src/pages/trace/SessionDetailsTraceList.tsx
@@ -7,13 +7,11 @@ import {
} from "@arizeai/openinference-semantic-conventions";
import { Flex, Icon, Icons, Link, Text, View } from "@phoenix/components";
-import {
- AnnotationLabel,
- AnnotationTooltip,
-} from "@phoenix/components/annotation";
+import { AnnotationSummaryGroupTokens } from "@phoenix/components/annotation/AnnotationSummaryGroup";
import { JSONBlock } from "@phoenix/components/code";
import { LatencyText } from "@phoenix/components/trace/LatencyText";
import { TokenCount } from "@phoenix/components/trace/TokenCount";
+import { SELECTED_SPAN_NODE_ID_PARAM } from "@phoenix/constants/searchParams";
import { useChatMessageStyles } from "@phoenix/hooks/useChatMessageStyles";
import { isStringKeyedObject } from "@phoenix/typeUtils";
import { safelyParseJSON } from "@phoenix/utils/jsonUtils";
@@ -114,7 +112,7 @@ function RootSpanDetails({
Trace #{index + 1}
View Trace
@@ -151,19 +149,10 @@ function RootSpanDetails({
Feedback
- {rootSpan.spanAnnotations.length > 0
- ? rootSpan.spanAnnotations.map((annotation) => (
-
-
-
- ))
- : "--"}
+ "--"}
+ />
diff --git a/app/src/pages/trace/SpanAside.tsx b/app/src/pages/trace/SpanAside.tsx
index d371517a47..a4a0668dad 100644
--- a/app/src/pages/trace/SpanAside.tsx
+++ b/app/src/pages/trace/SpanAside.tsx
@@ -1,15 +1,24 @@
-import React from "react";
-import { graphql, useRefetchableFragment } from "react-relay";
+import React, { Suspense, useMemo } from "react";
+import { graphql, useFragment } from "react-relay";
import { PanelGroup } from "react-resizable-panels";
import { css } from "@emotion/react";
-import { View } from "@phoenix/components";
+import { Flex, KeyboardToken, View } from "@phoenix/components";
import { AnnotationLabel } from "@phoenix/components/annotation";
import { TitledPanel } from "@phoenix/components/react-resizable-panels";
-import { SpanAnnotationsEditor } from "@phoenix/components/trace/SpanAnnotationsEditor";
+import {
+ EDIT_ANNOTATION_HOTKEY,
+ SpanAnnotationsEditor,
+} from "@phoenix/components/trace/SpanAnnotationsEditor";
+import { SpanAsideAnnotationList_span$key } from "@phoenix/pages/trace/__generated__/SpanAsideAnnotationList_span.graphql";
+import { deduplicateAnnotationsByName } from "@phoenix/pages/trace/utils";
import { SpanAside_span$key } from "./__generated__/SpanAside_span.graphql";
-import { SpanAsideSpanQuery } from "./__generated__/SpanAsideSpanQuery.graphql";
+import {
+ NOTE_HOTKEY,
+ SpanNotesEditor,
+ SpanNotesEditorSkeleton,
+} from "./SpanNotesEditor";
const annotationListCSS = css`
display: flex;
@@ -20,17 +29,50 @@ const annotationListCSS = css`
align-items: flex-start;
`;
+type SpanAsideProps = {
+ span: SpanAside_span$key;
+};
+
/**
* A component that shows the details of a span that is supplementary to the main span details
*/
-export function SpanAside(props: { span: SpanAside_span$key }) {
- const [data] = useRefetchableFragment(
+export function SpanAside(props: SpanAsideProps) {
+ const data = useFragment(
graphql`
fragment SpanAside_span on Span
- @refetchable(queryName: "SpanAsideSpanQuery") {
+ @argumentDefinitions(filterUserIds: { type: "[GlobalID]" }) {
id
project {
id
+ ...AnnotationConfigListProjectAnnotationConfigFragment
+ annotationConfigs {
+ configs: edges {
+ config: node {
+ ... on Node {
+ id
+ }
+ ... on AnnotationConfigBase {
+ name
+ description
+ annotationType
+ }
+ ... on CategoricalAnnotationConfig {
+ values {
+ label
+ score
+ }
+ }
+ ... on ContinuousAnnotationConfig {
+ lowerBound
+ upperBound
+ optimizationDirection
+ }
+ ... on FreeformAnnotationConfig {
+ name
+ }
+ }
+ }
+ }
}
code: statusCode
startTime
@@ -38,42 +80,29 @@ export function SpanAside(props: { span: SpanAside_span$key }) {
tokenCountTotal
tokenCountPrompt
tokenCountCompletion
- spanAnnotations {
- id
- name
- label
- annotatorKind
- score
- }
+ ...TraceHeaderRootSpanAnnotationsFragment
+ ...SpanAsideAnnotationList_span
+ @arguments(filterUserIds: $filterUserIds)
}
`,
props.span
);
- const annotations = data.spanAnnotations;
- const hasAnnotations = annotations.length > 0;
return (
- {hasAnnotations && (
-
-
-
- {annotations.map((annotation) => (
- -
-
-
- ))}
-
-
-
- )}
-
+
+
+
+
+ Edit annotations
+ {EDIT_ANNOTATION_HOTKEY}
+
+ }
+ panelProps={{ order: 2, minSize: 10 }}
+ >
+
+ Notes
+ {NOTE_HOTKEY}
+
+ }
+ panelProps={{ order: 3, minSize: 10 }}
+ >
+
+ }>
+
+
+
+
);
}
+
+function SpanAsideAnnotationList(props: {
+ span: SpanAsideAnnotationList_span$key;
+}) {
+ const data = useFragment(
+ graphql`
+ fragment SpanAsideAnnotationList_span on Span
+ @argumentDefinitions(filterUserIds: { type: "[GlobalID]" }) {
+ project {
+ id
+ annotationConfigs {
+ configs: edges {
+ config: node {
+ ... on Node {
+ id
+ }
+ ... on AnnotationConfigBase {
+ name
+ }
+ }
+ }
+ }
+ }
+ filteredSpanAnnotations: spanAnnotations(
+ filter: {
+ exclude: { names: ["note"] }
+ include: { userIds: $filterUserIds }
+ }
+ ) {
+ id
+ name
+ annotatorKind
+ score
+ label
+ explanation
+ createdAt
+ }
+ }
+ `,
+ props.span
+ );
+ const hasAnnotationConfigByName =
+ data.project.annotationConfigs.configs.reduce(
+ (acc, config) => {
+ acc[config.config.name!] = true;
+ return acc;
+ },
+ {} as Record
+ );
+ const filteredSpanAnnotations = data.filteredSpanAnnotations;
+ const annotations = useMemo(
+ () =>
+ deduplicateAnnotationsByName(
+ filteredSpanAnnotations.filter(
+ (annotation) => hasAnnotationConfigByName[annotation.name]
+ )
+ ),
+ [filteredSpanAnnotations, hasAnnotationConfigByName]
+ );
+ const hasAnnotations = annotations.length > 0;
+ return (
+
+
+
+ {annotations.map((annotation) => (
+ -
+
+
+ ))}
+
+
+
+ );
+}
diff --git a/app/src/pages/trace/SpanDetails.tsx b/app/src/pages/trace/SpanDetails.tsx
index 31df93648e..62dc19b5cc 100644
--- a/app/src/pages/trace/SpanDetails.tsx
+++ b/app/src/pages/trace/SpanDetails.tsx
@@ -7,6 +7,7 @@ import React, {
useRef,
useState,
} from "react";
+import { useHotkeys } from "react-hotkeys-hook";
import { graphql, useLazyLoadQuery } from "react-relay";
import {
type ImperativePanelHandle,
@@ -60,6 +61,7 @@ import {
Heading,
Icon,
Icons,
+ KeyboardToken,
LazyTabPanel,
LinkButton,
Tab,
@@ -79,12 +81,14 @@ import {
MarkdownDisplayProvider,
} from "@phoenix/components/markdown";
import { compactResizeHandleCSS } from "@phoenix/components/resize";
+import { ShareLinkButton } from "@phoenix/components/ShareLinkButton";
import { SpanKindIcon } from "@phoenix/components/trace";
import {
useNotifySuccess,
usePreferencesContext,
useTheme,
} from "@phoenix/contexts";
+import { useViewer } from "@phoenix/contexts/ViewerContext";
import { useDimensions } from "@phoenix/hooks";
import { useChatMessageStyles } from "@phoenix/hooks/useChatMessageStyles";
import {
@@ -162,6 +166,7 @@ const defaultCardProps: Partial = {
const CONDENSED_VIEW_CONTAINER_WIDTH_THRESHOLD = 900;
const ASIDE_PANEL_DEFAULT_SIZE = 33;
+const EDIT_ANNOTATION_HOTKEY = "e";
export function SpanDetails({
spanNodeId,
@@ -181,13 +186,14 @@ export function SpanDetails({
const asidePanelRef = useRef(null);
const spanDetailsContainerRef = useRef(null);
const spanDetailsContainerDimensions = useDimensions(spanDetailsContainerRef);
- const isCondensedView =
- spanDetailsContainerDimensions?.width &&
- spanDetailsContainerDimensions.width <
- CONDENSED_VIEW_CONTAINER_WIDTH_THRESHOLD;
+ const isCondensedView = spanDetailsContainerDimensions?.width
+ ? spanDetailsContainerDimensions.width <
+ CONDENSED_VIEW_CONTAINER_WIDTH_THRESHOLD
+ : true;
+ const { viewer } = useViewer();
const { span } = useLazyLoadQuery(
graphql`
- query SpanDetailsQuery($id: GlobalID!) {
+ query SpanDetailsQuery($id: GlobalID!, $filterUserIds: [GlobalID]) {
span: node(id: $id) {
__typename
... on Span {
@@ -243,13 +249,14 @@ export function SpanDetails({
}
...SpanHeader_span
...SpanFeedback_annotations
- ...SpanAside_span
+ ...SpanAside_span @arguments(filterUserIds: $filterUserIds)
}
}
}
`,
{
id: spanNodeId,
+ filterUserIds: viewer ? [viewer.id] : [null],
}
);
@@ -259,6 +266,16 @@ export function SpanDetails({
);
}
+ useHotkeys(
+ EDIT_ANNOTATION_HOTKEY,
+ () => {
+ if (!isAnnotatingSpans) {
+ setIsAnnotatingSpans(true);
+ }
+ },
+ { preventDefault: true }
+ );
+
const hasExceptions = useMemo(() => {
return spanHasException(span);
}, [span]);
@@ -320,9 +337,20 @@ export function SpanDetails({
}
}}
leadingVisual={} />}
+ trailingVisual={
+ !isCondensedView &&
+ !isAnnotatingSpans && (
+ {EDIT_ANNOTATION_HOTKEY}
+ )
+ }
>
{isCondensedView ? null : "Annotate"}
+
Info
- Feedback {span.spanAnnotations.length}
+ Annotations {span.spanAnnotations.length}
Attributes
@@ -386,6 +414,8 @@ export function SpanDetails({
order={2}
ref={asidePanelRef}
defaultSize={ASIDE_PANEL_DEFAULT_SIZE}
+ minSize={10}
+ collapsible
onCollapse={() => {
setIsAnnotatingSpans(false);
}}
diff --git a/app/src/pages/trace/SpanFeedback.tsx b/app/src/pages/trace/SpanFeedback.tsx
index 5afbe5d285..26d9fc4969 100644
--- a/app/src/pages/trace/SpanFeedback.tsx
+++ b/app/src/pages/trace/SpanFeedback.tsx
@@ -1,19 +1,27 @@
-import React, { useMemo } from "react";
+import React, { useMemo, useState } from "react";
import { graphql, useFragment } from "react-relay";
import {
ColumnDef,
flexRender,
getCoreRowModel,
+ getSortedRowModel,
+ SortingState,
useReactTable,
} from "@tanstack/react-table";
import { css } from "@emotion/react";
import { JSONText } from "@phoenix/components/code/JSONText";
+import { Icons } from "@phoenix/components/icon";
+import { Icon } from "@phoenix/components/icon/Icon";
+import { Flex } from "@phoenix/components/layout/Flex";
import { PreformattedTextCell } from "@phoenix/components/table";
import { tableCSS } from "@phoenix/components/table/styles";
import { TableEmpty } from "@phoenix/components/table/TableEmpty";
+import { TimestampCell } from "@phoenix/components/table/TimestampCell";
import { AnnotatorKindToken } from "@phoenix/components/trace/AnnotatorKindToken";
import { SpanAnnotationActionMenu } from "@phoenix/components/trace/SpanAnnotationActionMenu";
+import { UserPicture } from "@phoenix/components/user/UserPicture";
+import { Truncate } from "@phoenix/components/utility/Truncate";
import { useNotifyError, useNotifySuccess } from "@phoenix/contexts";
import {
@@ -62,6 +70,25 @@ function SpanAnnotationsTable({
return ;
},
},
+ {
+ header: "user",
+ accessorKey: "user",
+ size: 100,
+ cell: ({ row }) => {
+ const user = row.original.user;
+ const userName = user?.username || "system";
+ return (
+
+
+ {userName}
+
+ );
+ },
+ },
{
header: "label",
accessorKey: "label",
@@ -78,6 +105,16 @@ function SpanAnnotationsTable({
cell: PreformattedTextCell,
size: 400,
},
+ {
+ header: "source",
+ accessorKey: "source",
+ size: 100,
+ },
+ {
+ header: "identifier",
+ accessorKey: "identifier",
+ size: 100,
+ },
{
header: "metadata",
accessorKey: "metadata",
@@ -91,6 +128,18 @@ function SpanAnnotationsTable({
);
},
},
+ {
+ header: "created at",
+ accessorKey: "createdAt",
+ size: 100,
+ cell: TimestampCell,
+ },
+ {
+ header: "updated at",
+ accessorKey: "updatedAt",
+ size: 100,
+ cell: TimestampCell,
+ },
{
header: "",
accessorKey: "actions",
@@ -118,10 +167,18 @@ function SpanAnnotationsTable({
[notifyError, notifySuccess]
);
+ const [sorting, setSorting] = useState([
+ { id: "createdAt", desc: true },
+ ]);
const table = useReactTable({
columns,
data: tableData,
getCoreRowModel: getCoreRowModel(),
+ getSortedRowModel: getSortedRowModel(),
+ onSortingChange: setSorting,
+ state: {
+ sorting,
+ },
});
const rows = table.getRowModel().rows;
const isEmpty = rows.length === 0;
@@ -136,10 +193,39 @@ function SpanAnnotationsTable({
{header.isPlaceholder ? null : (
<>
- {flexRender(
- header.column.columnDef.header,
- header.getContext()
- )}
+
+
+ {flexRender(
+ header.column.columnDef.header,
+ header.getContext()
+ )}
+
+ {header.column.getIsSorted() ? (
+
+ ) : (
+
+ )
+ }
+ />
+ ) : null}
+
>
)}
|
@@ -185,6 +271,15 @@ export function SpanFeedback({ span }: { span: SpanFeedback_annotations$key }) {
explanation
metadata
annotatorKind
+ identifier
+ source
+ createdAt
+ updatedAt
+ user {
+ id
+ username
+ profilePictureUrl
+ }
}
}
`,
diff --git a/app/src/pages/trace/SpanNotesEditor.tsx b/app/src/pages/trace/SpanNotesEditor.tsx
new file mode 100644
index 0000000000..ff4a3d0c3e
--- /dev/null
+++ b/app/src/pages/trace/SpanNotesEditor.tsx
@@ -0,0 +1,154 @@
+import React, { startTransition, useEffect, useRef, useState } from "react";
+import { FocusScope } from "react-aria";
+import { graphql, useLazyLoadQuery, useMutation } from "react-relay";
+import { css } from "@emotion/react";
+
+import { Flex, View } from "@phoenix/components";
+import {
+ MessageBar,
+ MessageBubble,
+ MessageBubbleSkeleton,
+} from "@phoenix/components/chat";
+import { FocusHotkey } from "@phoenix/components/FocusHotkey";
+
+import { SpanNotesEditorAddNoteMutation } from "./__generated__/SpanNotesEditorAddNoteMutation.graphql";
+import { SpanNotesEditorQuery } from "./__generated__/SpanNotesEditorQuery.graphql";
+
+type SpanNotesEditorProps = {
+ spanNodeId: string;
+};
+
+export const NOTE_HOTKEY = "n";
+
+const notesListCSS = css`
+ width: 100%;
+ height: 100%;
+ max-height: 100%;
+ overflow: auto;
+ display: flex;
+ flex-direction: column;
+ gap: var(--ac-global-dimension-size-100);
+ padding: var(--ac-global-dimension-size-100);
+ box-sizing: border-box;
+ li {
+ width: 100%;
+ }
+`;
+
+export function SpanNotesEditor(props: SpanNotesEditorProps) {
+ const [fetchKey, setFetchKey] = useState(0);
+ const notesEndRef = useRef(null);
+ const data = useLazyLoadQuery(
+ graphql`
+ query SpanNotesEditorQuery($spanNodeId: GlobalID!) {
+ viewer {
+ id
+ username
+ profilePictureUrl
+ }
+ span: node(id: $spanNodeId) {
+ ... on Span {
+ spanAnnotations {
+ id
+ name
+ explanation
+ createdAt
+ user {
+ id
+ username
+ profilePictureUrl
+ }
+ }
+ ...SpanFeedback_annotations
+ }
+ }
+ }
+ `,
+ {
+ spanNodeId: props.spanNodeId,
+ },
+ {
+ fetchKey: fetchKey,
+ fetchPolicy: "store-and-network",
+ }
+ );
+
+ const [addNote, isAddingNote] = useMutation(
+ graphql`
+ mutation SpanNotesEditorAddNoteMutation($input: CreateSpanNoteInput!) {
+ createSpanNote(annotationInput: $input) {
+ __typename
+ }
+ }
+ `
+ );
+
+ const onAddNote = (note: string) => {
+ startTransition(() => {
+ addNote({
+ variables: {
+ input: {
+ note,
+ spanId: props.spanNodeId,
+ },
+ },
+ });
+ setFetchKey(fetchKey + 1);
+ });
+ };
+
+ const annotations = data.span?.spanAnnotations || [];
+
+ const notes = annotations.filter(
+ // we do this on the client side because one of our query fragments requires all annotations
+ // if we filtered here, we would not refresh the spanfeedback query when a note is added
+ (annotation) => annotation.name === "note"
+ );
+
+ useEffect(() => {
+ if (notesEndRef.current) {
+ notesEndRef.current.scrollIntoView({ behavior: "smooth" });
+ }
+ }, [notes]);
+
+ return (
+
+
+ {notes.map((note) => (
+ -
+
+
+ ))}
+
+
+
+
+
+
+
+ );
+}
+
+export function SpanNotesEditorSkeleton() {
+ return (
+
+
+
+
+
+
+
+ {}} placeholder="Add a note" />
+
+ );
+}
diff --git a/app/src/pages/trace/TraceDetails.tsx b/app/src/pages/trace/TraceDetails.tsx
index b17a9da7e2..c5c0189fb0 100644
--- a/app/src/pages/trace/TraceDetails.tsx
+++ b/app/src/pages/trace/TraceDetails.tsx
@@ -1,4 +1,4 @@
-import React, { PropsWithChildren, Suspense, useEffect, useMemo } from "react";
+import React, { PropsWithChildren, Suspense, useMemo } from "react";
import { graphql, useLazyLoadQuery } from "react-relay";
import { Panel, PanelGroup, PanelResizeHandle } from "react-resizable-panels";
import { useParams, useSearchParams } from "react-router";
@@ -10,6 +10,7 @@ import { LatencyText } from "@phoenix/components/trace/LatencyText";
import { SpanStatusCodeIcon } from "@phoenix/components/trace/SpanStatusCodeIcon";
import { TraceTree } from "@phoenix/components/trace/TraceTree";
import { useSpanStatusCodeColor } from "@phoenix/components/trace/useSpanStatusCodeColor";
+import { SELECTED_SPAN_NODE_ID_PARAM } from "@phoenix/constants/searchParams";
import {
TraceDetailsQuery,
@@ -18,8 +19,6 @@ import {
import { SpanDetails } from "./SpanDetails";
import { TraceHeaderRootSpanAnnotations } from "./TraceHeaderRootSpanAnnotations";
-export const SELECTED_SPAN_NODE_ID_URL_PARAM = "selectedSpanNodeId";
-
type Span = NonNullable<
TraceDetailsQuery$data["project"]["trace"]
>["spans"]["edges"][number]["span"];
@@ -74,6 +73,18 @@ export function TraceDetails(props: TraceDetailsProps) {
tokenCountTotal
tokenCountPrompt
tokenCountCompletion
+ spanAnnotationSummaries {
+ labels
+ count
+ labelCount
+ labelFractions {
+ fraction
+ label
+ }
+ name
+ scoreCount
+ meanScore
+ }
}
}
}
@@ -94,24 +105,10 @@ export function TraceDetails(props: TraceDetailsProps) {
const gqlSpans = data.project.trace?.spans.edges || [];
return gqlSpans.map((node) => node.span);
}, [data]);
- const urlSpanNodeId = searchParams.get(SELECTED_SPAN_NODE_ID_URL_PARAM);
+ const urlSpanNodeId = searchParams.get(SELECTED_SPAN_NODE_ID_PARAM);
const selectedSpanNodeId = urlSpanNodeId ?? spansList[0].id;
const rootSpan = useMemo(() => findRootSpan(spansList), [spansList]);
- // Clear the selected span param when the component unmounts
- useEffect(() => {
- return () => {
- setSearchParams(
- (searchParams) => {
- searchParams.delete(SELECTED_SPAN_NODE_ID_URL_PARAM);
- return searchParams;
- },
- { replace: true }
- );
- };
- // eslint-disable-next-line react-compiler/react-compiler
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, []);
return (
{
setSearchParams(
(searchParams) => {
- searchParams.set(SELECTED_SPAN_NODE_ID_URL_PARAM, span.id);
+ searchParams.set(SELECTED_SPAN_NODE_ID_PARAM, span.id);
return searchParams;
},
{ replace: true }
diff --git a/app/src/pages/trace/TraceDetailsPaginator.tsx b/app/src/pages/trace/TraceDetailsPaginator.tsx
new file mode 100644
index 0000000000..1bce83c27b
--- /dev/null
+++ b/app/src/pages/trace/TraceDetailsPaginator.tsx
@@ -0,0 +1,115 @@
+import React from "react";
+import { Tooltip, TooltipTrigger } from "react-aria-components";
+import { useHotkeys } from "react-hotkeys-hook";
+import { css } from "@emotion/react";
+
+import {
+ Button,
+ Flex,
+ Group,
+ Icon,
+ Icons,
+ KeyboardToken,
+ View,
+} from "@phoenix/components";
+import {
+ getNeighbors,
+ useTracePagination,
+} from "@phoenix/pages/trace/TracePaginationContext";
+
+export const NEXT_TRACE_HOTKEY = "j";
+export const PREVIOUS_TRACE_HOTKEY = "k";
+
+export const TraceDetailsPaginator = ({
+ currentId,
+}: {
+ currentId?: string;
+}) => {
+ const pagination = useTracePagination();
+
+ useHotkeys(NEXT_TRACE_HOTKEY, () => {
+ if (pagination) {
+ pagination.next(currentId);
+ }
+ });
+
+ useHotkeys(PREVIOUS_TRACE_HOTKEY, () => {
+ if (pagination) {
+ pagination.previous(currentId);
+ }
+ });
+
+ if (!pagination || !pagination.traceSequence.length) {
+ return null;
+ }
+
+ const { previous, next, traceSequence } = pagination;
+ const { nextTraceId, previousTraceId } = getNeighbors(
+ traceSequence,
+ currentId
+ );
+ const hasPrevious = !!previousTraceId;
+ const hasNext = !!nextTraceId;
+
+ return (
+
+
+
+ } />}
+ aria-label="Next trace"
+ isDisabled={!hasNext}
+ onPress={() => next(currentId)}
+ />
+
+
+
+ Next trace
+ {NEXT_TRACE_HOTKEY}
+
+
+
+
+
+ } />}
+ aria-label="Previous trace"
+ isDisabled={!hasPrevious}
+ onPress={() => previous(currentId)}
+ />
+
+
+
+ Previous trace
+ {PREVIOUS_TRACE_HOTKEY}
+
+
+
+
+
+
+ );
+};
diff --git a/app/src/pages/trace/TraceHeaderRootSpanAnnotations.tsx b/app/src/pages/trace/TraceHeaderRootSpanAnnotations.tsx
index 4c9d206e75..df44ae3fe7 100644
--- a/app/src/pages/trace/TraceHeaderRootSpanAnnotations.tsx
+++ b/app/src/pages/trace/TraceHeaderRootSpanAnnotations.tsx
@@ -1,26 +1,18 @@
import React from "react";
-import { graphql, useLazyLoadQuery } from "react-relay";
+import { graphql, useFragment, useLazyLoadQuery } from "react-relay";
-import { Flex, Text } from "../../components";
-import {
- AnnotationLabel,
- AnnotationTooltip,
-} from "../../components/annotation";
+import { AnnotationSummaryGroupStacks } from "@phoenix/components/annotation/AnnotationSummaryGroup";
+import { TraceHeaderRootSpanAnnotationsFragment$key } from "@phoenix/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsFragment.graphql";
import { TraceHeaderRootSpanAnnotationsQuery } from "./__generated__/TraceHeaderRootSpanAnnotationsQuery.graphql";
export function TraceHeaderRootSpanAnnotations({ spanId }: { spanId: string }) {
- const data = useLazyLoadQuery(
+ const query = useLazyLoadQuery(
graphql`
query TraceHeaderRootSpanAnnotationsQuery($spanId: GlobalID!) {
span: node(id: $spanId) {
... on Span {
- spanAnnotations {
- name
- label
- score
- annotatorKind
- }
+ ...TraceHeaderRootSpanAnnotationsFragment
}
}
}
@@ -30,25 +22,15 @@ export function TraceHeaderRootSpanAnnotations({ spanId }: { spanId: string }) {
fetchPolicy: "store-and-network",
}
);
- const spanAnnotations = data.span.spanAnnotations ?? [];
- const hasAnnotations = spanAnnotations.length > 0;
- return hasAnnotations ? (
-
-
- Feedback
-
-
- {spanAnnotations.map((annotation) => {
- return (
-
-
-
- );
- })}
-
-
- ) : null;
+ const span = useFragment(
+ graphql`
+ fragment TraceHeaderRootSpanAnnotationsFragment on Span {
+ ...AnnotationSummaryGroup
+ }
+ `,
+ query.span
+ );
+ return (
+ null} />
+ );
}
diff --git a/app/src/pages/trace/TracePage.tsx b/app/src/pages/trace/TracePage.tsx
index bdca369221..2c0c5a5221 100644
--- a/app/src/pages/trace/TracePage.tsx
+++ b/app/src/pages/trace/TracePage.tsx
@@ -1,9 +1,12 @@
-import React from "react";
-import { useNavigate, useParams } from "react-router";
+import React, { Suspense } from "react";
+import { useNavigate, useParams, useSearchParams } from "react-router";
import { Dialog, DialogContainer } from "@arizeai/components";
+import { Loading } from "@phoenix/components";
+import { SELECTED_SPAN_NODE_ID_PARAM } from "@phoenix/constants/searchParams";
import { useProjectRootPath } from "@phoenix/hooks/useProjectRootPath";
+import { TraceDetailsPaginator } from "@phoenix/pages/trace/TraceDetailsPaginator";
import { TraceDetails } from "./TraceDetails";
@@ -12,20 +15,37 @@ import { TraceDetails } from "./TraceDetails";
*/
export function TracePage() {
const { traceId, projectId } = useParams();
+ const [searchParams] = useSearchParams();
const navigate = useNavigate();
const { rootPath, tab } = useProjectRootPath();
+ const selectedSpanNodeId = searchParams.get(SELECTED_SPAN_NODE_ID_PARAM);
+
+ // if we are focused on a particular span, use that as the subjectId
+ // otherwise, use the traceId
+ const paginationSubjectId = selectedSpanNodeId || traceId;
return (
navigate(`${rootPath}/${tab}`)}
+ onDismiss={() => {
+ navigate(`${rootPath}/${tab}`);
+ }}
>
-
);
diff --git a/app/src/pages/trace/TracePaginationContext.tsx b/app/src/pages/trace/TracePaginationContext.tsx
new file mode 100644
index 0000000000..bd6a8d4bc6
--- /dev/null
+++ b/app/src/pages/trace/TracePaginationContext.tsx
@@ -0,0 +1,149 @@
+import React, {
+ createContext,
+ PropsWithChildren,
+ useCallback,
+ useContext,
+ useState,
+} from "react";
+import { useLocation, useNavigate } from "react-router";
+
+import { SELECTED_SPAN_NODE_ID_PARAM } from "@phoenix/constants/searchParams";
+
+/**
+ * A sequence of traceId/spanId pairs that represent the trace sequence.
+ * The sequence is used to navigate between traces, or spans within a trace.
+ */
+type TraceSequence = { traceId: string; spanId: string }[];
+
+type TracePaginationContextType = {
+ traceSequence: TraceSequence;
+ next: (currentId?: string) => void;
+ previous: (currentId?: string) => void;
+ setTraceSequence: (traceSequence: TraceSequence) => void;
+};
+
+export const TracePaginationContext =
+ createContext(null);
+
+export const useTracePagination = () => {
+ const context = useContext(TracePaginationContext);
+
+ return context;
+};
+
+/**
+ * Get the next and previous traceId/spanId pairs based on the current traceId/spanId
+ * @param traceSequence - The sequence of traceId/spanId pairs to paginate against, this could be from the spans table for example
+ * @param currentId - The current traceId or spanId, the first sequence with a matching traceId or spanId will be matched against
+ * @returns The next and previous traceId and spanId, if they exist in the sequence
+ */
+export const getNeighbors = (
+ traceSequence: { traceId: string; spanId: string }[],
+ /** May be a traceId or a spanId */
+ currentId?: string
+) => {
+ const currentIndex = traceSequence.findIndex(
+ ({ traceId, spanId }) =>
+ currentId && (traceId === currentId || spanId === currentId)
+ );
+ const previousIndex = currentIndex - 1;
+ const nextIndex = currentIndex + 1;
+ const previousSequenceMember = traceSequence[previousIndex];
+ const nextSequenceMember = traceSequence[nextIndex];
+ return {
+ nextTraceId: nextSequenceMember?.traceId,
+ nextSpanId: nextSequenceMember?.spanId,
+ previousTraceId: previousSequenceMember?.traceId,
+ previousSpanId: previousSequenceMember?.spanId,
+ };
+};
+
+/**
+ * Make the next and previous trace urls based on the current traceId, spanId, and url pathname
+ * @param location - The location object from useLocation
+ * @param traceSequence - The sequence of traceId/spanId pairs to paginate against, this could be from the spans table for example
+ * @param currentId - The current traceId or spanId, the first sequence with a matching traceId or spanId will be matched against
+ * @returns The next and previous trace urls, if they exist in the sequence
+ */
+export const makeTraceUrls = (
+ location: ReturnType,
+ traceSequence: { traceId: string; spanId: string }[],
+ currentId?: string
+) => {
+ const { nextTraceId, previousTraceId, nextSpanId, previousSpanId } =
+ getNeighbors(traceSequence, currentId);
+ // split up the url pathname into its components
+ // e.g. /projects/my-project/traces/123/spans/456 -> ["projects", "my-project", "traces", "123", "spans", "456"]
+ // we only really care about the last two components, which are the projectId and the resource
+ // resource is either "traces" or "spans", which we need to keep track of so we can build the correct url
+ const [projects, projectId, resource] = location.pathname
+ .split("/")
+ .filter((part) => part !== "");
+ const makeUrl = (traceId: string, currentSpanId?: string) => {
+ // we always navigate directly to a traceId
+ let path = `/${projects}/${projectId}/${resource}/${traceId}`;
+ // we add a selected span node id if provided to makeUrl
+ if (currentSpanId) {
+ path += `?${SELECTED_SPAN_NODE_ID_PARAM}=${currentSpanId}`;
+ }
+ return path;
+ };
+ const hasNext = !!nextTraceId;
+ const hasPrevious = !!previousTraceId;
+ // we build the next and previous trace urls if the traceId is present for those directions
+ return {
+ nextTracePath: hasNext ? makeUrl(nextTraceId, nextSpanId) : null,
+ previousTracePath: hasPrevious
+ ? makeUrl(previousTraceId, previousSpanId)
+ : null,
+ };
+};
+
+export const TracePaginationProvider = ({ children }: PropsWithChildren) => {
+ const navigate = useNavigate();
+ const location = useLocation();
+ const [traceSequence, setTraceSequence] = useState<
+ { traceId: string; spanId: string }[]
+ >([]);
+
+ const next = useCallback(
+ (currentId?: string) => {
+ const { nextTracePath } = makeTraceUrls(
+ location,
+ traceSequence,
+ currentId
+ );
+ if (nextTracePath) {
+ navigate(nextTracePath);
+ }
+ },
+ [navigate, location, traceSequence]
+ );
+
+ const previous = useCallback(
+ (currentId?: string) => {
+ const { previousTracePath } = makeTraceUrls(
+ location,
+ traceSequence,
+ currentId
+ );
+ if (previousTracePath) {
+ navigate(previousTracePath);
+ }
+ },
+ [navigate, location, traceSequence]
+ );
+
+ return (
+
+ {children}
+
+ );
+};
diff --git a/app/src/pages/trace/__generated__/SessionDetailsQuery.graphql.ts b/app/src/pages/trace/__generated__/SessionDetailsQuery.graphql.ts
index 340d4da75d..b8f0d5997a 100644
--- a/app/src/pages/trace/__generated__/SessionDetailsQuery.graphql.ts
+++ b/app/src/pages/trace/__generated__/SessionDetailsQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<0201f3f1d37d413eed0774de949bfbfb>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,7 +9,7 @@
// @ts-nocheck
import { ConcreteRequest } from 'relay-runtime';
-export type AnnotatorKind = "HUMAN" | "LLM";
+import { FragmentRefs } from "relay-runtime";
export type MimeType = "json" | "text";
export type SessionDetailsQuery$variables = {
id: string;
@@ -46,15 +46,9 @@ export type SessionDetailsQuery$data = {
readonly project: {
readonly id: string;
};
- readonly spanAnnotations: ReadonlyArray<{
- readonly annotatorKind: AnnotatorKind;
- readonly explanation: string | null;
- readonly label: string | null;
- readonly name: string;
- readonly score: number | null;
- }>;
readonly spanId: string;
readonly startTime: string;
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationSummaryGroup">;
} | null;
readonly traceId: string;
};
@@ -86,270 +80,190 @@ v2 = {
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "id",
+ "name": "numTraces",
"storageKey": null
},
-v3 = [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "value",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "mimeType",
- "storageKey": null
- }
-],
-v4 = {
- "kind": "InlineFragment",
+v3 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "TokenUsage",
+ "kind": "LinkedField",
+ "name": "tokenUsage",
+ "plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "numTraces",
+ "name": "total",
"storageKey": null
},
{
"alias": null,
"args": null,
- "concreteType": "TokenUsage",
- "kind": "LinkedField",
- "name": "tokenUsage",
- "plural": false,
- "selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "total",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "completion",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "prompt",
- "storageKey": null
- }
- ],
+ "kind": "ScalarField",
+ "name": "completion",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "sessionId",
+ "name": "prompt",
"storageKey": null
- },
- {
- "alias": "latencyP50",
- "args": [
- {
- "kind": "Literal",
- "name": "probability",
- "value": 0.5
- }
- ],
- "kind": "ScalarField",
- "name": "traceLatencyMsQuantile",
- "storageKey": "traceLatencyMsQuantile(probability:0.5)"
- },
+ }
+ ],
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "sessionId",
+ "storageKey": null
+},
+v5 = {
+ "alias": "latencyP50",
+ "args": [
{
- "alias": null,
- "args": null,
- "concreteType": "TraceConnection",
- "kind": "LinkedField",
- "name": "traces",
- "plural": false,
- "selections": [
- {
- "alias": null,
- "args": null,
- "concreteType": "TraceEdge",
- "kind": "LinkedField",
- "name": "edges",
- "plural": true,
- "selections": [
- {
- "alias": "trace",
- "args": null,
- "concreteType": "Trace",
- "kind": "LinkedField",
- "name": "node",
- "plural": false,
- "selections": [
- (v2/*: any*/),
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "traceId",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "concreteType": "Span",
- "kind": "LinkedField",
- "name": "rootSpan",
- "plural": false,
- "selections": [
- (v2/*: any*/),
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "attributes",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "concreteType": "Project",
- "kind": "LinkedField",
- "name": "project",
- "plural": false,
- "selections": [
- (v2/*: any*/)
- ],
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "concreteType": "SpanIOValue",
- "kind": "LinkedField",
- "name": "input",
- "plural": false,
- "selections": (v3/*: any*/),
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "concreteType": "SpanIOValue",
- "kind": "LinkedField",
- "name": "output",
- "plural": false,
- "selections": (v3/*: any*/),
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "cumulativeTokenCountTotal",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "cumulativeTokenCountCompletion",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "cumulativeTokenCountPrompt",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "latencyMs",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "startTime",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "concreteType": "SpanAnnotation",
- "kind": "LinkedField",
- "name": "spanAnnotations",
- "plural": true,
- "selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "name",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "score",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "explanation",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "annotatorKind",
- "storageKey": null
- }
- ],
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "spanId",
- "storageKey": null
- }
- ],
- "storageKey": null
- }
- ],
- "storageKey": null
- }
- ],
- "storageKey": null
- }
- ],
- "storageKey": null
+ "kind": "Literal",
+ "name": "probability",
+ "value": 0.5
}
],
- "type": "ProjectSession",
- "abstractKey": null
+ "kind": "ScalarField",
+ "name": "traceLatencyMsQuantile",
+ "storageKey": "traceLatencyMsQuantile(probability:0.5)"
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "traceId",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "attributes",
+ "storageKey": null
+},
+v9 = [
+ (v6/*: any*/)
+],
+v10 = [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "value",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "mimeType",
+ "storageKey": null
+ }
+],
+v11 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "SpanIOValue",
+ "kind": "LinkedField",
+ "name": "input",
+ "plural": false,
+ "selections": (v10/*: any*/),
+ "storageKey": null
+},
+v12 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "SpanIOValue",
+ "kind": "LinkedField",
+ "name": "output",
+ "plural": false,
+ "selections": (v10/*: any*/),
+ "storageKey": null
+},
+v13 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cumulativeTokenCountTotal",
+ "storageKey": null
+},
+v14 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cumulativeTokenCountCompletion",
+ "storageKey": null
+},
+v15 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "cumulativeTokenCountPrompt",
+ "storageKey": null
+},
+v16 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "latencyMs",
+ "storageKey": null
+},
+v17 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "startTime",
+ "storageKey": null
+},
+v18 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "spanId",
+ "storageKey": null
+},
+v19 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v20 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v21 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v22 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
};
return {
"fragment": {
@@ -366,7 +280,88 @@ return {
"name": "node",
"plural": false,
"selections": [
- (v4/*: any*/)
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "TraceConnection",
+ "kind": "LinkedField",
+ "name": "traces",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "TraceEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "trace",
+ "args": null,
+ "concreteType": "Trace",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ (v7/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Span",
+ "kind": "LinkedField",
+ "name": "rootSpan",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ (v8/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": (v9/*: any*/),
+ "storageKey": null
+ },
+ (v11/*: any*/),
+ (v12/*: any*/),
+ (v13/*: any*/),
+ (v14/*: any*/),
+ (v15/*: any*/),
+ (v16/*: any*/),
+ (v17/*: any*/),
+ (v18/*: any*/),
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationSummaryGroup"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "ProjectSession",
+ "abstractKey": null
+ }
],
"storageKey": null
}
@@ -388,35 +383,282 @@ return {
"name": "node",
"plural": false,
"selections": [
+ (v19/*: any*/),
{
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "__typename",
- "storageKey": null
+ "kind": "InlineFragment",
+ "selections": [
+ (v2/*: any*/),
+ (v3/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "TraceConnection",
+ "kind": "LinkedField",
+ "name": "traces",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "TraceEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "trace",
+ "args": null,
+ "concreteType": "Trace",
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ (v7/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Span",
+ "kind": "LinkedField",
+ "name": "rootSpan",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ (v8/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v19/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v6/*: any*/),
+ (v20/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v21/*: any*/),
+ (v22/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": (v9/*: any*/),
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v11/*: any*/),
+ (v12/*: any*/),
+ (v13/*: any*/),
+ (v14/*: any*/),
+ (v15/*: any*/),
+ (v16/*: any*/),
+ (v17/*: any*/),
+ (v18/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "SpanAnnotation",
+ "kind": "LinkedField",
+ "name": "spanAnnotations",
+ "plural": true,
+ "selections": [
+ (v6/*: any*/),
+ (v20/*: any*/),
+ (v21/*: any*/),
+ (v22/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotatorKind",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "createdAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v21/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v20/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "ProjectSession",
+ "abstractKey": null
},
- (v4/*: any*/),
{
"kind": "TypeDiscriminator",
"abstractKey": "__isNode"
},
- (v2/*: any*/)
+ (v6/*: any*/)
],
"storageKey": null
}
]
},
"params": {
- "cacheID": "537f0044aba3bca68c76ef30c43a810c",
+ "cacheID": "2a154f223a55d4092a7477e2db8a8ec7",
"id": null,
"metadata": {},
"name": "SessionDetailsQuery",
"operationKind": "query",
- "text": "query SessionDetailsQuery(\n $id: GlobalID!\n) {\n session: node(id: $id) {\n __typename\n ... on ProjectSession {\n numTraces\n tokenUsage {\n total\n completion\n prompt\n }\n sessionId\n latencyP50: traceLatencyMsQuantile(probability: 0.5)\n traces {\n edges {\n trace: node {\n id\n traceId\n rootSpan {\n id\n attributes\n project {\n id\n }\n input {\n value\n mimeType\n }\n output {\n value\n mimeType\n }\n cumulativeTokenCountTotal\n cumulativeTokenCountCompletion\n cumulativeTokenCountPrompt\n latencyMs\n startTime\n spanAnnotations {\n name\n label\n score\n explanation\n annotatorKind\n }\n spanId\n }\n }\n }\n }\n }\n __isNode: __typename\n id\n }\n}\n"
+ "text": "query SessionDetailsQuery(\n $id: GlobalID!\n) {\n session: node(id: $id) {\n __typename\n ... on ProjectSession {\n numTraces\n tokenUsage {\n total\n completion\n prompt\n }\n sessionId\n latencyP50: traceLatencyMsQuantile(probability: 0.5)\n traces {\n edges {\n trace: node {\n id\n traceId\n rootSpan {\n id\n attributes\n project {\n id\n }\n input {\n value\n mimeType\n }\n output {\n value\n mimeType\n }\n cumulativeTokenCountTotal\n cumulativeTokenCountCompletion\n cumulativeTokenCountPrompt\n latencyMs\n startTime\n spanId\n ...AnnotationSummaryGroup\n }\n }\n }\n }\n }\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n"
}
};
})();
-(node as any).hash = "48d10974b52b961361ff750b74e21d8e";
+(node as any).hash = "176ae04ef3520c758c1710c5900b1c0c";
export default node;
diff --git a/app/src/pages/trace/__generated__/SpanAsideAnnotationList_span.graphql.ts b/app/src/pages/trace/__generated__/SpanAsideAnnotationList_span.graphql.ts
new file mode 100644
index 0000000000..8095bab074
--- /dev/null
+++ b/app/src/pages/trace/__generated__/SpanAsideAnnotationList_span.graphql.ts
@@ -0,0 +1,213 @@
+/**
+ * @generated SignedSource<>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+export type AnnotatorKind = "CODE" | "HUMAN" | "LLM";
+import { FragmentRefs } from "relay-runtime";
+export type SpanAsideAnnotationList_span$data = {
+ readonly filteredSpanAnnotations: ReadonlyArray<{
+ readonly annotatorKind: AnnotatorKind;
+ readonly createdAt: string;
+ readonly explanation: string | null;
+ readonly id: string;
+ readonly label: string | null;
+ readonly name: string;
+ readonly score: number | null;
+ }>;
+ readonly project: {
+ readonly annotationConfigs: {
+ readonly configs: ReadonlyArray<{
+ readonly config: {
+ readonly id?: string;
+ readonly name?: string;
+ };
+ }>;
+ };
+ readonly id: string;
+ };
+ readonly " $fragmentType": "SpanAsideAnnotationList_span";
+};
+export type SpanAsideAnnotationList_span$key = {
+ readonly " $data"?: SpanAsideAnnotationList_span$data;
+ readonly " $fragmentSpreads": FragmentRefs<"SpanAsideAnnotationList_span">;
+};
+
+const node: ReaderFragment = (function(){
+var v0 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+};
+return {
+ "argumentDefinitions": [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "filterUserIds"
+ }
+ ],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SpanAsideAnnotationList_span",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v0/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": "configs",
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "config",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v1/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": "filteredSpanAnnotations",
+ "args": [
+ {
+ "fields": [
+ {
+ "kind": "Literal",
+ "name": "exclude",
+ "value": {
+ "names": [
+ "note"
+ ]
+ }
+ },
+ {
+ "fields": [
+ {
+ "kind": "Variable",
+ "name": "userIds",
+ "variableName": "filterUserIds"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "include"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "filter"
+ }
+ ],
+ "concreteType": "SpanAnnotation",
+ "kind": "LinkedField",
+ "name": "spanAnnotations",
+ "plural": true,
+ "selections": [
+ (v0/*: any*/),
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotatorKind",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "explanation",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "createdAt",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Span",
+ "abstractKey": null
+};
+})();
+
+(node as any).hash = "c50e31b549b3d291e5dfba3b23e550bb";
+
+export default node;
diff --git a/app/src/pages/trace/__generated__/SpanAsideSpanQuery.graphql.ts b/app/src/pages/trace/__generated__/SpanAsideSpanQuery.graphql.ts
deleted file mode 100644
index 2dd81ee68a..0000000000
--- a/app/src/pages/trace/__generated__/SpanAsideSpanQuery.graphql.ts
+++ /dev/null
@@ -1,220 +0,0 @@
-/**
- * @generated SignedSource<>
- * @lightSyntaxTransform
- * @nogrep
- */
-
-/* tslint:disable */
-/* eslint-disable */
-// @ts-nocheck
-
-import { ConcreteRequest } from 'relay-runtime';
-import { FragmentRefs } from "relay-runtime";
-export type SpanAsideSpanQuery$variables = {
- id: string;
-};
-export type SpanAsideSpanQuery$data = {
- readonly node: {
- readonly " $fragmentSpreads": FragmentRefs<"SpanAside_span">;
- };
-};
-export type SpanAsideSpanQuery = {
- response: SpanAsideSpanQuery$data;
- variables: SpanAsideSpanQuery$variables;
-};
-
-const node: ConcreteRequest = (function(){
-var v0 = [
- {
- "defaultValue": null,
- "kind": "LocalArgument",
- "name": "id"
- }
-],
-v1 = [
- {
- "kind": "Variable",
- "name": "id",
- "variableName": "id"
- }
-],
-v2 = {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "id",
- "storageKey": null
-};
-return {
- "fragment": {
- "argumentDefinitions": (v0/*: any*/),
- "kind": "Fragment",
- "metadata": null,
- "name": "SpanAsideSpanQuery",
- "selections": [
- {
- "alias": null,
- "args": (v1/*: any*/),
- "concreteType": null,
- "kind": "LinkedField",
- "name": "node",
- "plural": false,
- "selections": [
- {
- "args": null,
- "kind": "FragmentSpread",
- "name": "SpanAside_span"
- }
- ],
- "storageKey": null
- }
- ],
- "type": "Query",
- "abstractKey": null
- },
- "kind": "Request",
- "operation": {
- "argumentDefinitions": (v0/*: any*/),
- "kind": "Operation",
- "name": "SpanAsideSpanQuery",
- "selections": [
- {
- "alias": null,
- "args": (v1/*: any*/),
- "concreteType": null,
- "kind": "LinkedField",
- "name": "node",
- "plural": false,
- "selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "__typename",
- "storageKey": null
- },
- {
- "kind": "TypeDiscriminator",
- "abstractKey": "__isNode"
- },
- (v2/*: any*/),
- {
- "kind": "InlineFragment",
- "selections": [
- {
- "alias": null,
- "args": null,
- "concreteType": "Project",
- "kind": "LinkedField",
- "name": "project",
- "plural": false,
- "selections": [
- (v2/*: any*/)
- ],
- "storageKey": null
- },
- {
- "alias": "code",
- "args": null,
- "kind": "ScalarField",
- "name": "statusCode",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "startTime",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "endTime",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "tokenCountTotal",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "tokenCountPrompt",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "tokenCountCompletion",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "concreteType": "SpanAnnotation",
- "kind": "LinkedField",
- "name": "spanAnnotations",
- "plural": true,
- "selections": [
- (v2/*: any*/),
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "name",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "annotatorKind",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "score",
- "storageKey": null
- }
- ],
- "storageKey": null
- }
- ],
- "type": "Span",
- "abstractKey": null
- }
- ],
- "storageKey": null
- }
- ]
- },
- "params": {
- "cacheID": "1a86b74a3488b1d784333546762802fa",
- "id": null,
- "metadata": {},
- "name": "SpanAsideSpanQuery",
- "operationKind": "query",
- "text": "query SpanAsideSpanQuery(\n $id: GlobalID!\n) {\n node(id: $id) {\n __typename\n ...SpanAside_span\n __isNode: __typename\n id\n }\n}\n\nfragment SpanAside_span on Span {\n id\n project {\n id\n }\n code: statusCode\n startTime\n endTime\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n spanAnnotations {\n id\n name\n label\n annotatorKind\n score\n }\n}\n"
- }
-};
-})();
-
-(node as any).hash = "40874c824cdcd1a4790f1960b840457c";
-
-export default node;
diff --git a/app/src/pages/trace/__generated__/SpanAside_span.graphql.ts b/app/src/pages/trace/__generated__/SpanAside_span.graphql.ts
index 76cb0dd186..7ae6ca14eb 100644
--- a/app/src/pages/trace/__generated__/SpanAside_span.graphql.ts
+++ b/app/src/pages/trace/__generated__/SpanAside_span.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<258b2ed39a2e2a1ee47bad48c1e1eb48>>
+ * @generated SignedSource<<86c0503933a78f382711f26c70401692>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,7 +9,8 @@
// @ts-nocheck
import { ReaderFragment } from 'relay-runtime';
-export type AnnotatorKind = "HUMAN" | "LLM";
+export type AnnotationType = "CATEGORICAL" | "CONTINUOUS" | "FREEFORM";
+export type OptimizationDirection = "MAXIMIZE" | "MINIMIZE" | "NONE";
export type SpanStatusCode = "ERROR" | "OK" | "UNSET";
import { FragmentRefs } from "relay-runtime";
export type SpanAside_span$data = {
@@ -17,19 +18,31 @@ export type SpanAside_span$data = {
readonly endTime: string | null;
readonly id: string;
readonly project: {
+ readonly annotationConfigs: {
+ readonly configs: ReadonlyArray<{
+ readonly config: {
+ readonly annotationType?: AnnotationType;
+ readonly description?: string | null;
+ readonly id?: string;
+ readonly lowerBound?: number | null;
+ readonly name?: string;
+ readonly optimizationDirection?: OptimizationDirection;
+ readonly upperBound?: number | null;
+ readonly values?: ReadonlyArray<{
+ readonly label: string;
+ readonly score: number | null;
+ }>;
+ };
+ }>;
+ };
readonly id: string;
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationConfigListProjectAnnotationConfigFragment">;
};
- readonly spanAnnotations: ReadonlyArray<{
- readonly annotatorKind: AnnotatorKind;
- readonly id: string;
- readonly label: string | null;
- readonly name: string;
- readonly score: number | null;
- }>;
readonly startTime: string;
readonly tokenCountCompletion: number | null;
readonly tokenCountPrompt: number | null;
readonly tokenCountTotal: number | null;
+ readonly " $fragmentSpreads": FragmentRefs<"SpanAsideAnnotationList_span" | "TraceHeaderRootSpanAnnotationsFragment">;
readonly " $fragmentType": "SpanAside_span";
};
export type SpanAside_span$key = {
@@ -37,8 +50,6 @@ export type SpanAside_span$key = {
readonly " $fragmentSpreads": FragmentRefs<"SpanAside_span">;
};
-import SpanAsideSpanQuery_graphql from './SpanAsideSpanQuery.graphql';
-
const node: ReaderFragment = (function(){
var v0 = {
"alias": null,
@@ -46,23 +57,24 @@ var v0 = {
"kind": "ScalarField",
"name": "id",
"storageKey": null
+},
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
};
return {
- "argumentDefinitions": [],
- "kind": "Fragment",
- "metadata": {
- "refetch": {
- "connection": null,
- "fragmentPathInResult": [
- "node"
- ],
- "operation": SpanAsideSpanQuery_graphql,
- "identifierInfo": {
- "identifierField": "id",
- "identifierQueryVariableName": "id"
- }
+ "argumentDefinitions": [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "filterUserIds"
}
- },
+ ],
+ "kind": "Fragment",
+ "metadata": null,
"name": "SpanAside_span",
"selections": [
(v0/*: any*/),
@@ -74,7 +86,143 @@ return {
"name": "project",
"plural": false,
"selections": [
- (v0/*: any*/)
+ (v0/*: any*/),
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationConfigListProjectAnnotationConfigFragment"
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": "configs",
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "config",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v0/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ }
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v1/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
],
"storageKey": null
},
@@ -121,44 +269,20 @@ return {
"storageKey": null
},
{
- "alias": null,
"args": null,
- "concreteType": "SpanAnnotation",
- "kind": "LinkedField",
- "name": "spanAnnotations",
- "plural": true,
- "selections": [
- (v0/*: any*/),
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "name",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "annotatorKind",
- "storageKey": null
- },
+ "kind": "FragmentSpread",
+ "name": "TraceHeaderRootSpanAnnotationsFragment"
+ },
+ {
+ "args": [
{
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "score",
- "storageKey": null
+ "kind": "Variable",
+ "name": "filterUserIds",
+ "variableName": "filterUserIds"
}
],
- "storageKey": null
+ "kind": "FragmentSpread",
+ "name": "SpanAsideAnnotationList_span"
}
],
"type": "Span",
@@ -166,6 +290,6 @@ return {
};
})();
-(node as any).hash = "40874c824cdcd1a4790f1960b840457c";
+(node as any).hash = "f0ee814959700becf7d7d066e15b4a7e";
export default node;
diff --git a/app/src/pages/trace/__generated__/SpanDetailsQuery.graphql.ts b/app/src/pages/trace/__generated__/SpanDetailsQuery.graphql.ts
index eb9fbb9a9f..3b6dfc511b 100644
--- a/app/src/pages/trace/__generated__/SpanDetailsQuery.graphql.ts
+++ b/app/src/pages/trace/__generated__/SpanDetailsQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<71eb3aef63505f2b3ff9fed9fa0014ee>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -14,6 +14,7 @@ export type MimeType = "json" | "text";
export type SpanKind = "agent" | "chain" | "embedding" | "evaluator" | "guardrail" | "llm" | "reranker" | "retriever" | "tool" | "unknown";
export type SpanStatusCode = "ERROR" | "OK" | "UNSET";
export type SpanDetailsQuery$variables = {
+ filterUserIds?: ReadonlyArray | null;
id: string;
};
export type SpanDetailsQuery$data = {
@@ -80,42 +81,45 @@ export type SpanDetailsQuery = {
};
const node: ConcreteRequest = (function(){
-var v0 = [
- {
- "defaultValue": null,
- "kind": "LocalArgument",
- "name": "id"
- }
-],
-v1 = [
+var v0 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "filterUserIds"
+},
+v1 = {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "id"
+},
+v2 = [
{
"kind": "Variable",
"name": "id",
"variableName": "id"
}
],
-v2 = {
+v3 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "__typename",
"storageKey": null
},
-v3 = {
+v4 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "id",
"storageKey": null
},
-v4 = {
+v5 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "spanId",
"storageKey": null
},
-v5 = {
+v6 = {
"alias": null,
"args": null,
"concreteType": "Trace",
@@ -123,7 +127,7 @@ v5 = {
"name": "trace",
"plural": false,
"selections": [
- (v3/*: any*/),
+ (v4/*: any*/),
{
"alias": null,
"args": null,
@@ -134,84 +138,84 @@ v5 = {
],
"storageKey": null
},
-v6 = {
+v7 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "name",
"storageKey": null
},
-v7 = {
+v8 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "spanKind",
"storageKey": null
},
-v8 = {
+v9 = {
"alias": "statusCode",
"args": null,
"kind": "ScalarField",
"name": "propagatedStatusCode",
"storageKey": null
},
-v9 = {
+v10 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "statusMessage",
"storageKey": null
},
-v10 = {
+v11 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "startTime",
"storageKey": null
},
-v11 = {
+v12 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "parentId",
"storageKey": null
},
-v12 = {
+v13 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "latencyMs",
"storageKey": null
},
-v13 = {
+v14 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "tokenCountTotal",
"storageKey": null
},
-v14 = {
+v15 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "tokenCountPrompt",
"storageKey": null
},
-v15 = {
+v16 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "tokenCountCompletion",
"storageKey": null
},
-v16 = {
+v17 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "endTime",
"storageKey": null
},
-v17 = [
+v18 = [
{
"alias": null,
"args": null,
@@ -227,34 +231,34 @@ v17 = [
"storageKey": null
}
],
-v18 = {
+v19 = {
"alias": null,
"args": null,
"concreteType": "SpanIOValue",
"kind": "LinkedField",
"name": "input",
"plural": false,
- "selections": (v17/*: any*/),
+ "selections": (v18/*: any*/),
"storageKey": null
},
-v19 = {
+v20 = {
"alias": null,
"args": null,
"concreteType": "SpanIOValue",
"kind": "LinkedField",
"name": "output",
"plural": false,
- "selections": (v17/*: any*/),
+ "selections": (v18/*: any*/),
"storageKey": null
},
-v20 = {
+v21 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "attributes",
"storageKey": null
},
-v21 = {
+v22 = {
"alias": null,
"args": null,
"concreteType": "SpanEvent",
@@ -262,7 +266,7 @@ v21 = {
"name": "events",
"plural": true,
"selections": [
- (v6/*: any*/),
+ (v7/*: any*/),
{
"alias": null,
"args": null,
@@ -280,7 +284,7 @@ v21 = {
],
"storageKey": null
},
-v22 = {
+v23 = {
"alias": null,
"args": null,
"concreteType": "DocumentRetrievalMetrics",
@@ -319,28 +323,28 @@ v22 = {
],
"storageKey": null
},
-v23 = {
+v24 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "label",
"storageKey": null
},
-v24 = {
+v25 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "score",
"storageKey": null
},
-v25 = {
+v26 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "explanation",
"storageKey": null
},
-v26 = {
+v27 = {
"alias": null,
"args": null,
"concreteType": "DocumentEvaluation",
@@ -355,33 +359,121 @@ v26 = {
"name": "documentPosition",
"storageKey": null
},
- (v6/*: any*/),
- (v23/*: any*/),
+ (v7/*: any*/),
+ (v24/*: any*/),
+ (v25/*: any*/),
+ (v26/*: any*/)
+ ],
+ "storageKey": null
+},
+v28 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotatorKind",
+ "storageKey": null
+},
+v29 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "createdAt",
+ "storageKey": null
+},
+v30 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v4/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+},
+v31 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+},
+v32 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "description",
+ "storageKey": null
+},
+v33 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
(v24/*: any*/),
(v25/*: any*/)
],
"storageKey": null
+},
+v34 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+},
+v35 = {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "lowerBound",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "upperBound",
+ "storageKey": null
+ },
+ (v34/*: any*/)
+ ],
+ "type": "ContinuousAnnotationConfig",
+ "abstractKey": null
+},
+v36 = {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/)
+ ],
+ "type": "FreeformAnnotationConfig",
+ "abstractKey": null
};
return {
"fragment": {
- "argumentDefinitions": (v0/*: any*/),
+ "argumentDefinitions": [
+ (v0/*: any*/),
+ (v1/*: any*/)
+ ],
"kind": "Fragment",
"metadata": null,
"name": "SpanDetailsQuery",
"selections": [
{
"alias": "span",
- "args": (v1/*: any*/),
+ "args": (v2/*: any*/),
"concreteType": null,
"kind": "LinkedField",
"name": "node",
"plural": false,
"selections": [
- (v2/*: any*/),
+ (v3/*: any*/),
{
"kind": "InlineFragment",
"selections": [
- (v3/*: any*/),
(v4/*: any*/),
(v5/*: any*/),
(v6/*: any*/),
@@ -395,16 +487,17 @@ return {
(v14/*: any*/),
(v15/*: any*/),
(v16/*: any*/),
- (v18/*: any*/),
+ (v17/*: any*/),
(v19/*: any*/),
(v20/*: any*/),
+ (v21/*: any*/),
{
"kind": "RequiredField",
- "field": (v21/*: any*/),
+ "field": (v22/*: any*/),
"action": "THROW"
},
- (v22/*: any*/),
- (v26/*: any*/),
+ (v23/*: any*/),
+ (v27/*: any*/),
{
"alias": null,
"args": null,
@@ -413,8 +506,8 @@ return {
"name": "spanAnnotations",
"plural": true,
"selections": [
- (v3/*: any*/),
- (v6/*: any*/)
+ (v4/*: any*/),
+ (v7/*: any*/)
],
"storageKey": null
},
@@ -429,7 +522,13 @@ return {
"name": "SpanFeedback_annotations"
},
{
- "args": null,
+ "args": [
+ {
+ "kind": "Variable",
+ "name": "filterUserIds",
+ "variableName": "filterUserIds"
+ }
+ ],
"kind": "FragmentSpread",
"name": "SpanAside_span"
}
@@ -446,28 +545,30 @@ return {
},
"kind": "Request",
"operation": {
- "argumentDefinitions": (v0/*: any*/),
+ "argumentDefinitions": [
+ (v1/*: any*/),
+ (v0/*: any*/)
+ ],
"kind": "Operation",
"name": "SpanDetailsQuery",
"selections": [
{
"alias": "span",
- "args": (v1/*: any*/),
+ "args": (v2/*: any*/),
"concreteType": null,
"kind": "LinkedField",
"name": "node",
"plural": false,
"selections": [
- (v2/*: any*/),
+ (v3/*: any*/),
{
"kind": "TypeDiscriminator",
"abstractKey": "__isNode"
},
- (v3/*: any*/),
+ (v4/*: any*/),
{
"kind": "InlineFragment",
"selections": [
- (v4/*: any*/),
(v5/*: any*/),
(v6/*: any*/),
(v7/*: any*/),
@@ -480,12 +581,13 @@ return {
(v14/*: any*/),
(v15/*: any*/),
(v16/*: any*/),
- (v18/*: any*/),
+ (v17/*: any*/),
(v19/*: any*/),
(v20/*: any*/),
(v21/*: any*/),
(v22/*: any*/),
- (v26/*: any*/),
+ (v23/*: any*/),
+ (v27/*: any*/),
{
"alias": null,
"args": null,
@@ -494,11 +596,11 @@ return {
"name": "spanAnnotations",
"plural": true,
"selections": [
- (v3/*: any*/),
- (v6/*: any*/),
- (v23/*: any*/),
+ (v4/*: any*/),
+ (v7/*: any*/),
(v24/*: any*/),
(v25/*: any*/),
+ (v26/*: any*/),
{
"alias": null,
"args": null,
@@ -506,11 +608,53 @@ return {
"name": "metadata",
"storageKey": null
},
+ (v28/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "annotatorKind",
+ "name": "identifier",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "source",
+ "storageKey": null
+ },
+ (v29/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "updatedAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
"storageKey": null
}
],
@@ -531,7 +675,192 @@ return {
"name": "project",
"plural": false,
"selections": [
- (v3/*: any*/)
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v30/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v31/*: any*/),
+ (v32/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v33/*: any*/),
+ (v4/*: any*/),
+ (v7/*: any*/),
+ (v34/*: any*/)
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ (v35/*: any*/),
+ (v36/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": "configs",
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": "config",
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ (v30/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v7/*: any*/),
+ (v32/*: any*/),
+ (v31/*: any*/)
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v33/*: any*/)
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ (v35/*: any*/),
+ (v36/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v24/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v7/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": "filteredSpanAnnotations",
+ "args": [
+ {
+ "fields": [
+ {
+ "kind": "Literal",
+ "name": "exclude",
+ "value": {
+ "names": [
+ "note"
+ ]
+ }
+ },
+ {
+ "fields": [
+ {
+ "kind": "Variable",
+ "name": "userIds",
+ "variableName": "filterUserIds"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "include"
+ }
+ ],
+ "kind": "ObjectValue",
+ "name": "filter"
+ }
+ ],
+ "concreteType": "SpanAnnotation",
+ "kind": "LinkedField",
+ "name": "spanAnnotations",
+ "plural": true,
+ "selections": [
+ (v4/*: any*/),
+ (v7/*: any*/),
+ (v28/*: any*/),
+ (v25/*: any*/),
+ (v24/*: any*/),
+ (v26/*: any*/),
+ (v29/*: any*/)
],
"storageKey": null
}
@@ -545,16 +874,16 @@ return {
]
},
"params": {
- "cacheID": "5c3846067ae228d13a34f9ee3e533b43",
+ "cacheID": "f6d0a095f22eb689f6ad811bd9e3d231",
"id": null,
"metadata": {},
"name": "SpanDetailsQuery",
"operationKind": "query",
- "text": "query SpanDetailsQuery(\n $id: GlobalID!\n) {\n span: node(id: $id) {\n __typename\n ... on Span {\n id\n spanId\n trace {\n id\n traceId\n }\n name\n spanKind\n statusCode: propagatedStatusCode\n statusMessage\n startTime\n parentId\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n endTime\n input {\n value\n mimeType\n }\n output {\n value\n mimeType\n }\n attributes\n events {\n name\n message\n timestamp\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n documentEvaluations {\n documentPosition\n name\n label\n score\n explanation\n }\n spanAnnotations {\n id\n name\n }\n ...SpanHeader_span\n ...SpanFeedback_annotations\n ...SpanAside_span\n }\n __isNode: __typename\n id\n }\n}\n\nfragment SpanAside_span on Span {\n id\n project {\n id\n }\n code: statusCode\n startTime\n endTime\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n spanAnnotations {\n id\n name\n label\n annotatorKind\n score\n }\n}\n\nfragment SpanFeedback_annotations on Span {\n id\n spanAnnotations {\n id\n name\n label\n score\n explanation\n metadata\n annotatorKind\n }\n}\n\nfragment SpanHeader_span on Span {\n name\n spanKind\n code: statusCode\n latencyMs\n startTime\n tokenCountPrompt\n tokenCountCompletion\n tokenCountTotal\n}\n"
+ "text": "query SpanDetailsQuery(\n $id: GlobalID!\n $filterUserIds: [GlobalID]\n) {\n span: node(id: $id) {\n __typename\n ... on Span {\n id\n spanId\n trace {\n id\n traceId\n }\n name\n spanKind\n statusCode: propagatedStatusCode\n statusMessage\n startTime\n parentId\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n endTime\n input {\n value\n mimeType\n }\n output {\n value\n mimeType\n }\n attributes\n events {\n name\n message\n timestamp\n }\n documentRetrievalMetrics {\n evaluationName\n ndcg\n precision\n hit\n }\n documentEvaluations {\n documentPosition\n name\n label\n score\n explanation\n }\n spanAnnotations {\n id\n name\n }\n ...SpanHeader_span\n ...SpanFeedback_annotations\n ...SpanAside_span_3lpqY\n }\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationConfigListProjectAnnotationConfigFragment on Project {\n annotationConfigs {\n edges {\n node {\n __typename\n ... on Node {\n __isNode: __typename\n id\n }\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n annotationType\n description\n }\n ... on CategoricalAnnotationConfig {\n values {\n label\n score\n }\n }\n ... on ContinuousAnnotationConfig {\n lowerBound\n upperBound\n optimizationDirection\n }\n ... on FreeformAnnotationConfig {\n name\n }\n }\n }\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n\nfragment SpanAsideAnnotationList_span_3lpqY on Span {\n project {\n id\n annotationConfigs {\n configs: edges {\n config: node {\n __typename\n ... on Node {\n __isNode: __typename\n id\n }\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n }\n }\n }\n }\n }\n filteredSpanAnnotations: spanAnnotations(filter: {exclude: {names: [\"note\"]}, include: {userIds: $filterUserIds}}) {\n id\n name\n annotatorKind\n score\n label\n explanation\n createdAt\n }\n}\n\nfragment SpanAside_span_3lpqY on Span {\n id\n project {\n id\n ...AnnotationConfigListProjectAnnotationConfigFragment\n annotationConfigs {\n configs: edges {\n config: node {\n __typename\n ... on Node {\n __isNode: __typename\n id\n }\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n name\n description\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n values {\n label\n score\n }\n }\n ... on ContinuousAnnotationConfig {\n lowerBound\n upperBound\n optimizationDirection\n }\n ... on FreeformAnnotationConfig {\n name\n }\n }\n }\n }\n }\n code: statusCode\n startTime\n endTime\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n ...TraceHeaderRootSpanAnnotationsFragment\n ...SpanAsideAnnotationList_span_3lpqY\n}\n\nfragment SpanFeedback_annotations on Span {\n id\n spanAnnotations {\n id\n name\n label\n score\n explanation\n metadata\n annotatorKind\n identifier\n source\n createdAt\n updatedAt\n user {\n id\n username\n profilePictureUrl\n }\n }\n}\n\nfragment SpanHeader_span on Span {\n name\n spanKind\n code: statusCode\n latencyMs\n startTime\n tokenCountPrompt\n tokenCountCompletion\n tokenCountTotal\n}\n\nfragment TraceHeaderRootSpanAnnotationsFragment on Span {\n ...AnnotationSummaryGroup\n}\n"
}
};
})();
-(node as any).hash = "7cbd32bca298fe80e84fff1dfb6444e8";
+(node as any).hash = "3fbc6bd8e6439dc1a3084403445dac3d";
export default node;
diff --git a/app/src/pages/trace/__generated__/SpanFeedback_annotations.graphql.ts b/app/src/pages/trace/__generated__/SpanFeedback_annotations.graphql.ts
index 1d2c6149c4..d2a0be1509 100644
--- a/app/src/pages/trace/__generated__/SpanFeedback_annotations.graphql.ts
+++ b/app/src/pages/trace/__generated__/SpanFeedback_annotations.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<3ebe78017fc09f298e6f20a6e1a90035>>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,18 +9,28 @@
// @ts-nocheck
import { ReaderFragment } from 'relay-runtime';
-export type AnnotatorKind = "HUMAN" | "LLM";
+export type AnnotationSource = "API" | "APP";
+export type AnnotatorKind = "CODE" | "HUMAN" | "LLM";
import { FragmentRefs } from "relay-runtime";
export type SpanFeedback_annotations$data = {
readonly id: string;
readonly spanAnnotations: ReadonlyArray<{
readonly annotatorKind: AnnotatorKind;
+ readonly createdAt: string;
readonly explanation: string | null;
readonly id: string;
+ readonly identifier: string | null;
readonly label: string | null;
readonly metadata: any;
readonly name: string;
readonly score: number | null;
+ readonly source: AnnotationSource;
+ readonly updatedAt: string;
+ readonly user: {
+ readonly id: string;
+ readonly profilePictureUrl: string | null;
+ readonly username: string;
+ } | null;
}>;
readonly " $fragmentType": "SpanFeedback_annotations";
};
@@ -94,6 +104,60 @@ return {
"kind": "ScalarField",
"name": "annotatorKind",
"storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "identifier",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "source",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "createdAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "updatedAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ (v0/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
}
],
"storageKey": null
@@ -104,6 +168,6 @@ return {
};
})();
-(node as any).hash = "8ed7e3ac3c8cc4b5934f6f269116d159";
+(node as any).hash = "9a73b57370acadba7a0398c1439e9893";
export default node;
diff --git a/app/src/pages/trace/__generated__/SpanNotesEditorAddNoteMutation.graphql.ts b/app/src/pages/trace/__generated__/SpanNotesEditorAddNoteMutation.graphql.ts
new file mode 100644
index 0000000000..3aacec7aae
--- /dev/null
+++ b/app/src/pages/trace/__generated__/SpanNotesEditorAddNoteMutation.graphql.ts
@@ -0,0 +1,93 @@
+/**
+ * @generated SignedSource<<0ca568b7668d28faa5d4b14509858522>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+export type CreateSpanNoteInput = {
+ note: string;
+ spanId: string;
+};
+export type SpanNotesEditorAddNoteMutation$variables = {
+ input: CreateSpanNoteInput;
+};
+export type SpanNotesEditorAddNoteMutation$data = {
+ readonly createSpanNote: {
+ readonly __typename: "SpanAnnotationMutationPayload";
+ };
+};
+export type SpanNotesEditorAddNoteMutation = {
+ response: SpanNotesEditorAddNoteMutation$data;
+ variables: SpanNotesEditorAddNoteMutation$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "input"
+ }
+],
+v1 = [
+ {
+ "alias": null,
+ "args": [
+ {
+ "kind": "Variable",
+ "name": "annotationInput",
+ "variableName": "input"
+ }
+ ],
+ "concreteType": "SpanAnnotationMutationPayload",
+ "kind": "LinkedField",
+ "name": "createSpanNote",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+];
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SpanNotesEditorAddNoteMutation",
+ "selections": (v1/*: any*/),
+ "type": "Mutation",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "SpanNotesEditorAddNoteMutation",
+ "selections": (v1/*: any*/)
+ },
+ "params": {
+ "cacheID": "855aa00f68b36e86f375e7765413ac81",
+ "id": null,
+ "metadata": {},
+ "name": "SpanNotesEditorAddNoteMutation",
+ "operationKind": "mutation",
+ "text": "mutation SpanNotesEditorAddNoteMutation(\n $input: CreateSpanNoteInput!\n) {\n createSpanNote(annotationInput: $input) {\n __typename\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "fc7452182db44527c3f68ba81dd53e58";
+
+export default node;
diff --git a/app/src/pages/trace/__generated__/SpanNotesEditorQuery.graphql.ts b/app/src/pages/trace/__generated__/SpanNotesEditorQuery.graphql.ts
new file mode 100644
index 0000000000..6e5e7030f5
--- /dev/null
+++ b/app/src/pages/trace/__generated__/SpanNotesEditorQuery.graphql.ts
@@ -0,0 +1,290 @@
+/**
+ * @generated SignedSource<<4c3856663c8358197fb6fe6bf74d583c>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ConcreteRequest } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type SpanNotesEditorQuery$variables = {
+ spanNodeId: string;
+};
+export type SpanNotesEditorQuery$data = {
+ readonly span: {
+ readonly spanAnnotations?: ReadonlyArray<{
+ readonly createdAt: string;
+ readonly explanation: string | null;
+ readonly id: string;
+ readonly name: string;
+ readonly user: {
+ readonly id: string;
+ readonly profilePictureUrl: string | null;
+ readonly username: string;
+ } | null;
+ }>;
+ readonly " $fragmentSpreads": FragmentRefs<"SpanFeedback_annotations">;
+ };
+ readonly viewer: {
+ readonly id: string;
+ readonly profilePictureUrl: string | null;
+ readonly username: string;
+ } | null;
+};
+export type SpanNotesEditorQuery = {
+ response: SpanNotesEditorQuery$data;
+ variables: SpanNotesEditorQuery$variables;
+};
+
+const node: ConcreteRequest = (function(){
+var v0 = [
+ {
+ "defaultValue": null,
+ "kind": "LocalArgument",
+ "name": "spanNodeId"
+ }
+],
+v1 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v2 = [
+ (v1/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+],
+v3 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "viewer",
+ "plural": false,
+ "selections": (v2/*: any*/),
+ "storageKey": null
+},
+v4 = [
+ {
+ "kind": "Variable",
+ "name": "id",
+ "variableName": "spanNodeId"
+ }
+],
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "explanation",
+ "storageKey": null
+},
+v7 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "createdAt",
+ "storageKey": null
+},
+v8 = {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": (v2/*: any*/),
+ "storageKey": null
+};
+return {
+ "fragment": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "SpanNotesEditorQuery",
+ "selections": [
+ (v3/*: any*/),
+ {
+ "alias": "span",
+ "args": (v4/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "SpanAnnotation",
+ "kind": "LinkedField",
+ "name": "spanAnnotations",
+ "plural": true,
+ "selections": [
+ (v1/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ (v7/*: any*/),
+ (v8/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "SpanFeedback_annotations"
+ }
+ ],
+ "type": "Span",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Query",
+ "abstractKey": null
+ },
+ "kind": "Request",
+ "operation": {
+ "argumentDefinitions": (v0/*: any*/),
+ "kind": "Operation",
+ "name": "SpanNotesEditorQuery",
+ "selections": [
+ (v3/*: any*/),
+ {
+ "alias": "span",
+ "args": (v4/*: any*/),
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+ },
+ {
+ "kind": "TypeDiscriminator",
+ "abstractKey": "__isNode"
+ },
+ (v1/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "SpanAnnotation",
+ "kind": "LinkedField",
+ "name": "spanAnnotations",
+ "plural": true,
+ "selections": [
+ (v1/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ (v7/*: any*/),
+ (v8/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "metadata",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotatorKind",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "identifier",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "source",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "updatedAt",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Span",
+ "abstractKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ]
+ },
+ "params": {
+ "cacheID": "f199554faad2a910a81eceeffdecafd2",
+ "id": null,
+ "metadata": {},
+ "name": "SpanNotesEditorQuery",
+ "operationKind": "query",
+ "text": "query SpanNotesEditorQuery(\n $spanNodeId: GlobalID!\n) {\n viewer {\n id\n username\n profilePictureUrl\n }\n span: node(id: $spanNodeId) {\n __typename\n ... on Span {\n spanAnnotations {\n id\n name\n explanation\n createdAt\n user {\n id\n username\n profilePictureUrl\n }\n }\n ...SpanFeedback_annotations\n }\n __isNode: __typename\n id\n }\n}\n\nfragment SpanFeedback_annotations on Span {\n id\n spanAnnotations {\n id\n name\n label\n score\n explanation\n metadata\n annotatorKind\n identifier\n source\n createdAt\n updatedAt\n user {\n id\n username\n profilePictureUrl\n }\n }\n}\n"
+ }
+};
+})();
+
+(node as any).hash = "52b812698dcd109bea51c9cac2040c8e";
+
+export default node;
diff --git a/app/src/pages/trace/__generated__/TraceDetailsQuery.graphql.ts b/app/src/pages/trace/__generated__/TraceDetailsQuery.graphql.ts
index ea71e9e0b1..c5159b4de2 100644
--- a/app/src/pages/trace/__generated__/TraceDetailsQuery.graphql.ts
+++ b/app/src/pages/trace/__generated__/TraceDetailsQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -27,6 +27,18 @@ export type TraceDetailsQuery$data = {
readonly latencyMs: number | null;
readonly name: string;
readonly parentId: string | null;
+ readonly spanAnnotationSummaries: ReadonlyArray<{
+ readonly count: number;
+ readonly labelCount: number;
+ readonly labelFractions: ReadonlyArray<{
+ readonly fraction: number;
+ readonly label: string;
+ }>;
+ readonly labels: ReadonlyArray;
+ readonly meanScore: number | null;
+ readonly name: string;
+ readonly scoreCount: number;
+ }>;
readonly spanId: string;
readonly spanKind: SpanKind;
readonly startTime: string;
@@ -74,10 +86,17 @@ v4 = {
"alias": null,
"args": null,
"kind": "ScalarField",
- "name": "latencyMs",
+ "name": "name",
"storageKey": null
},
v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "latencyMs",
+ "storageKey": null
+},
+v6 = {
"kind": "InlineFragment",
"selections": [
{
@@ -139,13 +158,7 @@ v5 = {
"name": "spanId",
"storageKey": null
},
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "name",
- "storageKey": null
- },
+ (v4/*: any*/),
{
"alias": null,
"args": null,
@@ -174,7 +187,7 @@ v5 = {
"name": "parentId",
"storageKey": null
},
- (v4/*: any*/),
+ (v5/*: any*/),
{
"alias": null,
"args": null,
@@ -195,6 +208,78 @@ v5 = {
"kind": "ScalarField",
"name": "tokenCountCompletion",
"storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "labels",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "count",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "labelCount",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "scoreCount",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
}
],
"storageKey": null
@@ -205,7 +290,7 @@ v5 = {
],
"storageKey": "spans(first:1000)"
},
- (v4/*: any*/)
+ (v5/*: any*/)
],
"storageKey": null
}
@@ -231,7 +316,7 @@ return {
"name": "node",
"plural": false,
"selections": [
- (v5/*: any*/)
+ (v6/*: any*/)
],
"storageKey": null
}
@@ -263,7 +348,7 @@ return {
"name": "__typename",
"storageKey": null
},
- (v5/*: any*/),
+ (v6/*: any*/),
{
"kind": "TypeDiscriminator",
"abstractKey": "__isNode"
@@ -275,16 +360,16 @@ return {
]
},
"params": {
- "cacheID": "9434c8c6e07d5ca4dd3a19286644f369",
+ "cacheID": "5ffc9800226eec2e4590fe4983227ca7",
"id": null,
"metadata": {},
"name": "TraceDetailsQuery",
"operationKind": "query",
- "text": "query TraceDetailsQuery(\n $traceId: ID!\n $id: GlobalID!\n) {\n project: node(id: $id) {\n __typename\n ... on Project {\n trace(traceId: $traceId) {\n projectSessionId\n spans(first: 1000) {\n edges {\n span: node {\n id\n spanId\n name\n spanKind\n statusCode\n startTime\n parentId\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n }\n }\n }\n latencyMs\n }\n }\n __isNode: __typename\n id\n }\n}\n"
+ "text": "query TraceDetailsQuery(\n $traceId: ID!\n $id: GlobalID!\n) {\n project: node(id: $id) {\n __typename\n ... on Project {\n trace(traceId: $traceId) {\n projectSessionId\n spans(first: 1000) {\n edges {\n span: node {\n id\n spanId\n name\n spanKind\n statusCode\n startTime\n parentId\n latencyMs\n tokenCountTotal\n tokenCountPrompt\n tokenCountCompletion\n spanAnnotationSummaries {\n labels\n count\n labelCount\n labelFractions {\n fraction\n label\n }\n name\n scoreCount\n meanScore\n }\n }\n }\n }\n latencyMs\n }\n }\n __isNode: __typename\n id\n }\n}\n"
}
};
})();
-(node as any).hash = "e4165ac98e972c7c7491e1573a1aec1a";
+(node as any).hash = "b44e85fd7e07bd41226d4e87d8ba2b80";
export default node;
diff --git a/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsFragment.graphql.ts b/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsFragment.graphql.ts
new file mode 100644
index 0000000000..1843951a40
--- /dev/null
+++ b/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsFragment.graphql.ts
@@ -0,0 +1,40 @@
+/**
+ * @generated SignedSource<<08fd4dbe127fe1a836c5d08148b20acd>>
+ * @lightSyntaxTransform
+ * @nogrep
+ */
+
+/* tslint:disable */
+/* eslint-disable */
+// @ts-nocheck
+
+import { ReaderFragment } from 'relay-runtime';
+import { FragmentRefs } from "relay-runtime";
+export type TraceHeaderRootSpanAnnotationsFragment$data = {
+ readonly " $fragmentSpreads": FragmentRefs<"AnnotationSummaryGroup">;
+ readonly " $fragmentType": "TraceHeaderRootSpanAnnotationsFragment";
+};
+export type TraceHeaderRootSpanAnnotationsFragment$key = {
+ readonly " $data"?: TraceHeaderRootSpanAnnotationsFragment$data;
+ readonly " $fragmentSpreads": FragmentRefs<"TraceHeaderRootSpanAnnotationsFragment">;
+};
+
+const node: ReaderFragment = {
+ "argumentDefinitions": [],
+ "kind": "Fragment",
+ "metadata": null,
+ "name": "TraceHeaderRootSpanAnnotationsFragment",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "AnnotationSummaryGroup"
+ }
+ ],
+ "type": "Span",
+ "abstractKey": null
+};
+
+(node as any).hash = "1926def8a20ce2131c00ccd01efe9fb6";
+
+export default node;
diff --git a/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsQuery.graphql.ts b/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsQuery.graphql.ts
index f042ffae3d..e4ec9bf43a 100644
--- a/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsQuery.graphql.ts
+++ b/app/src/pages/trace/__generated__/TraceHeaderRootSpanAnnotationsQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<<28d7b1e76d4b4eb02f45faf80ed76984>>
+ * @generated SignedSource<<3c48e16909051051d96f8bf283e6b51e>>
* @lightSyntaxTransform
* @nogrep
*/
@@ -9,18 +9,13 @@
// @ts-nocheck
import { ConcreteRequest } from 'relay-runtime';
-export type AnnotatorKind = "HUMAN" | "LLM";
+import { FragmentRefs } from "relay-runtime";
export type TraceHeaderRootSpanAnnotationsQuery$variables = {
spanId: string;
};
export type TraceHeaderRootSpanAnnotationsQuery$data = {
readonly span: {
- readonly spanAnnotations?: ReadonlyArray<{
- readonly annotatorKind: AnnotatorKind;
- readonly label: string | null;
- readonly name: string;
- readonly score: number | null;
- }>;
+ readonly " $fragmentSpreads": FragmentRefs<"TraceHeaderRootSpanAnnotationsFragment">;
};
};
export type TraceHeaderRootSpanAnnotationsQuery = {
@@ -44,50 +39,39 @@ v1 = [
}
],
v2 = {
- "kind": "InlineFragment",
- "selections": [
- {
- "alias": null,
- "args": null,
- "concreteType": "SpanAnnotation",
- "kind": "LinkedField",
- "name": "spanAnnotations",
- "plural": true,
- "selections": [
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "name",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "label",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "score",
- "storageKey": null
- },
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "annotatorKind",
- "storageKey": null
- }
- ],
- "storageKey": null
- }
- ],
- "type": "Span",
- "abstractKey": null
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "__typename",
+ "storageKey": null
+},
+v3 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "id",
+ "storageKey": null
+},
+v4 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "name",
+ "storageKey": null
+},
+v5 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "label",
+ "storageKey": null
+},
+v6 = {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "score",
+ "storageKey": null
};
return {
"fragment": {
@@ -104,7 +88,18 @@ return {
"name": "node",
"plural": false,
"selections": [
- (v2/*: any*/)
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "args": null,
+ "kind": "FragmentSpread",
+ "name": "TraceHeaderRootSpanAnnotationsFragment"
+ }
+ ],
+ "type": "Span",
+ "abstractKey": null
+ }
],
"storageKey": null
}
@@ -126,41 +121,224 @@ return {
"name": "node",
"plural": false,
"selections": [
+ (v2/*: any*/),
{
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "__typename",
- "storageKey": null
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "Project",
+ "kind": "LinkedField",
+ "name": "project",
+ "plural": false,
+ "selections": [
+ (v3/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigConnection",
+ "kind": "LinkedField",
+ "name": "annotationConfigs",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationConfigEdge",
+ "kind": "LinkedField",
+ "name": "edges",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": null,
+ "kind": "LinkedField",
+ "name": "node",
+ "plural": false,
+ "selections": [
+ (v2/*: any*/),
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotationType",
+ "storageKey": null
+ }
+ ],
+ "type": "AnnotationConfigBase",
+ "abstractKey": "__isAnnotationConfigBase"
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "optimizationDirection",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "CategoricalAnnotationValue",
+ "kind": "LinkedField",
+ "name": "values",
+ "plural": true,
+ "selections": [
+ (v5/*: any*/),
+ (v6/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "CategoricalAnnotationConfig",
+ "abstractKey": null
+ },
+ {
+ "kind": "InlineFragment",
+ "selections": [
+ (v3/*: any*/)
+ ],
+ "type": "Node",
+ "abstractKey": "__isNode"
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "SpanAnnotation",
+ "kind": "LinkedField",
+ "name": "spanAnnotations",
+ "plural": true,
+ "selections": [
+ (v3/*: any*/),
+ (v4/*: any*/),
+ (v5/*: any*/),
+ (v6/*: any*/),
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "annotatorKind",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "createdAt",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "User",
+ "kind": "LinkedField",
+ "name": "user",
+ "plural": false,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "username",
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "profilePictureUrl",
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ }
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "AnnotationSummary",
+ "kind": "LinkedField",
+ "name": "spanAnnotationSummaries",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "concreteType": "LabelFraction",
+ "kind": "LinkedField",
+ "name": "labelFractions",
+ "plural": true,
+ "selections": [
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "fraction",
+ "storageKey": null
+ },
+ (v5/*: any*/)
+ ],
+ "storageKey": null
+ },
+ {
+ "alias": null,
+ "args": null,
+ "kind": "ScalarField",
+ "name": "meanScore",
+ "storageKey": null
+ },
+ (v4/*: any*/)
+ ],
+ "storageKey": null
+ }
+ ],
+ "type": "Span",
+ "abstractKey": null
},
- (v2/*: any*/),
{
"kind": "TypeDiscriminator",
"abstractKey": "__isNode"
},
- {
- "alias": null,
- "args": null,
- "kind": "ScalarField",
- "name": "id",
- "storageKey": null
- }
+ (v3/*: any*/)
],
"storageKey": null
}
]
},
"params": {
- "cacheID": "660fcce54b17ff01056ef96c4dcb3c35",
+ "cacheID": "ee8f10ef644ff3e35069b6c9ceda4d79",
"id": null,
"metadata": {},
"name": "TraceHeaderRootSpanAnnotationsQuery",
"operationKind": "query",
- "text": "query TraceHeaderRootSpanAnnotationsQuery(\n $spanId: GlobalID!\n) {\n span: node(id: $spanId) {\n __typename\n ... on Span {\n spanAnnotations {\n name\n label\n score\n annotatorKind\n }\n }\n __isNode: __typename\n id\n }\n}\n"
+ "text": "query TraceHeaderRootSpanAnnotationsQuery(\n $spanId: GlobalID!\n) {\n span: node(id: $spanId) {\n __typename\n ... on Span {\n ...TraceHeaderRootSpanAnnotationsFragment\n }\n __isNode: __typename\n id\n }\n}\n\nfragment AnnotationSummaryGroup on Span {\n project {\n id\n annotationConfigs {\n edges {\n node {\n __typename\n ... on AnnotationConfigBase {\n __isAnnotationConfigBase: __typename\n annotationType\n }\n ... on CategoricalAnnotationConfig {\n id\n name\n optimizationDirection\n values {\n label\n score\n }\n }\n ... on Node {\n __isNode: __typename\n id\n }\n }\n }\n }\n }\n spanAnnotations {\n id\n name\n label\n score\n annotatorKind\n createdAt\n user {\n username\n profilePictureUrl\n }\n }\n spanAnnotationSummaries {\n labelFractions {\n fraction\n label\n }\n meanScore\n name\n }\n}\n\nfragment TraceHeaderRootSpanAnnotationsFragment on Span {\n ...AnnotationSummaryGroup\n}\n"
}
};
})();
-(node as any).hash = "d506d0235a1602f740af082698950c65";
+(node as any).hash = "2239bc8844c04c69f9686a9b72d8a7f7";
export default node;
diff --git a/app/src/pages/trace/utils.ts b/app/src/pages/trace/utils.ts
new file mode 100644
index 0000000000..729f2e29e7
--- /dev/null
+++ b/app/src/pages/trace/utils.ts
@@ -0,0 +1,25 @@
+/**
+ * Deduplicates annotations by name by keeping the latest one
+ */
+export const deduplicateAnnotationsByName = <
+ T extends { name: string; createdAt: string },
+>(
+ annotations: T[]
+) => {
+ return Object.values(
+ annotations.reduce(
+ (acc, annotation) => {
+ if (!acc[annotation.name]) {
+ acc[annotation.name] = annotation;
+ } else if (
+ new Date(acc[annotation.name].createdAt) <
+ new Date(annotation.createdAt)
+ ) {
+ acc[annotation.name] = annotation;
+ }
+ return acc;
+ },
+ {} as Record
+ )
+ );
+};
diff --git a/app/src/utils/retentionPolicyUtils.ts b/app/src/utils/retentionPolicyUtils.ts
new file mode 100644
index 0000000000..5cad82cef9
--- /dev/null
+++ b/app/src/utils/retentionPolicyUtils.ts
@@ -0,0 +1,29 @@
+/**
+ * Creates a summary text for a retention policy's deletion rule.
+ * @param numberOfDays - The number of days after which traces will be deleted.
+ * @param numberOfTraces - The maximum number of traces that will be deleted.
+ * @returns A string describing the deletion rule.
+ */
+export const createPolicyDeletionSummaryText = ({
+ numberOfDays,
+ numberOfTraces,
+}: {
+ numberOfDays?: number;
+ numberOfTraces?: number;
+}) => {
+ if (numberOfDays === 0 && !numberOfTraces) {
+ return "This policy will not delete any traces.";
+ }
+ const daysPolicyString =
+ typeof numberOfDays === "number" ? `older than ${numberOfDays} days` : "";
+ const tracesPolicyString =
+ typeof numberOfTraces === "number"
+ ? `when there are more than ${numberOfTraces} traces`
+ : "";
+
+ const policyString =
+ daysPolicyString && tracesPolicyString
+ ? `${daysPolicyString} or ${tracesPolicyString}`
+ : daysPolicyString || tracesPolicyString;
+ return `This policy will delete traces ${policyString}`;
+};
diff --git a/app/src/utils/timeFormatUtils.ts b/app/src/utils/timeFormatUtils.ts
index d74ece6373..35add8baf6 100644
--- a/app/src/utils/timeFormatUtils.ts
+++ b/app/src/utils/timeFormatUtils.ts
@@ -12,6 +12,7 @@ export const fullTimeFormatter = timeFormat("%x %H:%M:%S %p");
*/
export const shortTimeFormatter = timeFormat("%H:%M %p");
+export const shortDateTimeFormatter = timeFormat("%x %H:%M %p");
export const timeRangeFormatter = (timeRange: OpenTimeRange) => {
if (timeRange.start && timeRange.end) {
return `${fullTimeFormatter(timeRange.start)} - ${fullTimeFormatter(timeRange.end)}`;
diff --git a/app/stories/AnnotationInputs.stories.tsx b/app/stories/AnnotationInputs.stories.tsx
new file mode 100644
index 0000000000..c4cd00d8a9
--- /dev/null
+++ b/app/stories/AnnotationInputs.stories.tsx
@@ -0,0 +1,187 @@
+import React from "react";
+import { FocusScope } from "react-aria";
+import { Form } from "react-aria-components";
+import type { Meta, StoryObj } from "@storybook/react";
+
+import { Flex } from "@phoenix/components";
+import { AnnotationSaveButton } from "@phoenix/components/annotation/AnnotationSaveButton";
+import { CategoricalAnnotationInput } from "@phoenix/components/annotation/CategoricalAnnotationInput";
+import { ContinuousAnnotationInput } from "@phoenix/components/annotation/ContinuousAnnotationInput";
+import { FreeformAnnotationInput } from "@phoenix/components/annotation/FreeformAnnotationInput";
+import {
+ AnnotationConfigCategorical,
+ AnnotationConfigContinuous,
+ AnnotationConfigFreeform,
+} from "@phoenix/pages/settings/types";
+/**
+ * Stories showcasing the annotation input components in a form
+ * to test keyboard navigation between different input types.
+ *
+ * Components are wrapped in \ to provide focus management.
+ */
+const meta = {
+ title: "AnnotationInputs",
+ parameters: {
+ layout: "centered",
+ },
+ tags: ["autodocs"],
+} satisfies Meta;
+
+export default meta;
+type Story = StoryObj;
+
+// Mock annotation configs
+const categoricalConfig: AnnotationConfigCategorical = {
+ id: "category",
+ name: "Category",
+ annotationType: "CATEGORICAL",
+ values: [
+ { label: "Option 1", score: null },
+ { label: "Option 2", score: null },
+ { label: "Option 3", score: null },
+ ],
+};
+
+const continuousConfig: AnnotationConfigContinuous = {
+ id: "rating",
+ name: "Rating",
+ annotationType: "CONTINUOUS",
+ lowerBound: 0,
+ upperBound: 10,
+};
+
+const freeformConfig: AnnotationConfigFreeform = {
+ id: "comment",
+ name: "Comments",
+ annotationType: "FREEFORM",
+ description: "Add any additional comments here",
+};
+
+export const Default: Story = {
+ render: () => (
+
+
+
+ ),
+};
+
+export const WithValidation: Story = {
+ render: () => (
+
+
+
+ ),
+};
+
+export const Disabled: Story = {
+ render: () => (
+
+
+
+ ),
+};
+
+export const WithDefaultValues: Story = {
+ render: () => (
+
+
+
+ ),
+};
+
+export const MixedSizes: Story = {
+ render: () => (
+
+
+
+ ),
+};
diff --git a/app/stories/Group.stories.tsx b/app/stories/Group.stories.tsx
new file mode 100644
index 0000000000..767598a5b2
--- /dev/null
+++ b/app/stories/Group.stories.tsx
@@ -0,0 +1,82 @@
+import React from "react";
+import type { Meta, StoryObj } from "@storybook/react";
+
+import { Button } from "../src/components/button/Button";
+import { Group } from "../src/components/layout/Group";
+
+/**
+ * Group visually connects a set of buttons or controls, making them appear as a single component.
+ *
+ * ## Usage
+ * ```tsx
+ *
+ *
+ *
+ *
+ *
+ * ```
+ *
+ * The `size` prop can be used to control the sizing of the group and its children.
+ */
+const meta: Meta = {
+ title: "Layout/Group",
+ component: Group,
+ parameters: {
+ layout: "centered",
+ },
+ argTypes: {
+ size: {
+ control: "radio",
+ options: ["S", "M", "L"],
+ description: "Size of the group and its children",
+ table: {
+ type: { summary: "ComponentSize" },
+ defaultValue: { summary: "M" },
+ },
+ },
+ },
+};
+
+export default meta;
+type Story = StoryObj;
+
+export const Default: Story = {
+ args: {
+ size: "M",
+ children: (
+ <>
+
+
+
+ >
+ ),
+ },
+ parameters: {
+ docs: {
+ description: {
+ story:
+ "A group of three buttons visually connected as a single component.",
+ },
+ },
+ },
+};
+
+export const Small: Story = {
+ args: {
+ size: "S",
+ children: (
+ <>
+
+
+
+ >
+ ),
+ },
+ parameters: {
+ docs: {
+ description: {
+ story: "Small size group.",
+ },
+ },
+ },
+};
diff --git a/app/stories/KeyboardToken.stories.tsx b/app/stories/KeyboardToken.stories.tsx
new file mode 100644
index 0000000000..8704fc6f0b
--- /dev/null
+++ b/app/stories/KeyboardToken.stories.tsx
@@ -0,0 +1,88 @@
+import React from "react";
+import type { Meta, StoryObj } from "@storybook/react";
+
+import { KeyboardToken } from "../src/components/KeyboardToken";
+
+/**
+ * KeyboardToken visually represents a keyboard key or shortcut, styled to look like a keyboard key.
+ * Useful for documentation, tooltips, or UI hints where you want to show keyboard commands.
+ *
+ * ## Usage
+ * ```tsx
+ * ⌘
+ * Ctrl
+ * Shift + Enter
+ * ```
+ *
+ * ## Features
+ * - Styled with design tokens for consistency
+ * - Supports custom children (text or symbols)
+ * - Can be used inline with text
+ */
+const meta: Meta = {
+ title: "Content/KeyboardToken",
+ component: KeyboardToken,
+ parameters: {
+ layout: "centered",
+ docs: {
+ description: {
+ component:
+ "A component for displaying keyboard keys or shortcuts in a visually distinct way.",
+ },
+ },
+ },
+ tags: ["autodocs"],
+ argTypes: {
+ children: {
+ control: "text",
+ description: "The key or shortcut to display",
+ table: {
+ type: { summary: "ReactNode" },
+ },
+ },
+ },
+};
+
+export default meta;
+type Story = StoryObj;
+
+/**
+ * Default usage with a single key.
+ */
+export const Default: Story = {
+ args: {
+ children: "⌘",
+ },
+};
+
+/**
+ * Shows a common keyboard shortcut.
+ */
+export const Shortcut: Story = {
+ args: {
+ children: "Ctrl + S",
+ },
+};
+
+/**
+ * Shows a multi-key combination.
+ */
+export const MultiKey: Story = {
+ args: {
+ children: "Shift + Enter",
+ },
+};
+
+/**
+ * Shows the component inline with text.
+ */
+export const InlineWithText: Story = {
+ render: (args) => (
+
+ Press to save your work.
+
+ ),
+ args: {
+ children: "Ctrl + S",
+ },
+};
diff --git a/app/stories/Skeleton.stories.tsx b/app/stories/Skeleton.stories.tsx
index a0271a4ac9..e570302e65 100644
--- a/app/stories/Skeleton.stories.tsx
+++ b/app/stories/Skeleton.stories.tsx
@@ -1,7 +1,7 @@
import React from "react";
import type { Meta, StoryObj } from "@storybook/react";
-import { Flex, Skeleton } from "@phoenix/components";
+import { ContentSkeleton, Flex, Skeleton } from "@phoenix/components";
const meta: Meta = {
title: "Skeleton",
@@ -56,3 +56,7 @@ export const Card: Story = {
),
};
+
+export const Content: Story = {
+ render: () => ,
+};
diff --git a/js/packages/phoenix-client/src/__generated__/api/v1.ts b/js/packages/phoenix-client/src/__generated__/api/v1.ts
index 981d9b4cee..62c6c9177d 100644
--- a/js/packages/phoenix-client/src/__generated__/api/v1.ts
+++ b/js/packages/phoenix-client/src/__generated__/api/v1.ts
@@ -4,6 +4,79 @@
*/
export interface paths {
+ "/v1/annotation_configs": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ /**
+ * List annotation configurations
+ * @description Retrieve a paginated list of all annotation configurations in the system.
+ */
+ get: operations["list_annotation_configs_v1_annotation_configs_get"];
+ put?: never;
+ /** Create an annotation configuration */
+ post: operations["create_annotation_config_v1_annotation_configs_post"];
+ delete?: never;
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
+ };
+ "/v1/annotation_configs/{config_identifier}": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ /** Get an annotation configuration by ID or name */
+ get: operations["get_annotation_config_by_name_or_id_v1_annotation_configs__config_identifier__get"];
+ put?: never;
+ post?: never;
+ delete?: never;
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
+ };
+ "/v1/annotation_configs/{config_id}": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ get?: never;
+ /** Update an annotation configuration */
+ put: operations["update_annotation_config_v1_annotation_configs__config_id__put"];
+ post?: never;
+ /** Delete an annotation configuration */
+ delete: operations["delete_annotation_config_v1_annotation_configs__config_id__delete"];
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
+ };
+ "/v1/projects/{project_identifier}/span_annotations": {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ /** Get span annotations for a list of span_ids. */
+ get: operations["listSpanAnnotationsBySpanIds"];
+ put?: never;
+ post?: never;
+ delete?: never;
+ options?: never;
+ head?: never;
+ patch?: never;
+ trace?: never;
+ };
"/v1/datasets": {
parameters: {
query?: never;
@@ -219,7 +292,7 @@ export interface paths {
};
get?: never;
put?: never;
- /** Create or update span annotations */
+ /** Create span annotations */
post: operations["annotateSpans"];
delete?: never;
options?: never;
@@ -432,13 +505,95 @@ export interface components {
/** AnnotateSpansRequestBody */
AnnotateSpansRequestBody: {
/** Data */
- data: components["schemas"]["SpanAnnotation"][];
+ data: components["schemas"]["SpanAnnotationData"][];
};
/** AnnotateSpansResponseBody */
AnnotateSpansResponseBody: {
/** Data */
data: components["schemas"]["InsertedSpanAnnotation"][];
};
+ /** CategoricalAnnotationConfig */
+ CategoricalAnnotationConfig: {
+ /** Name */
+ name: string;
+ /**
+ * @description discriminator enum property added by openapi-typescript
+ * @enum {string}
+ */
+ type: "CATEGORICAL";
+ /** Description */
+ description?: string | null;
+ optimization_direction: components["schemas"]["OptimizationDirection"];
+ /** Values */
+ values: components["schemas"]["CategoricalAnnotationValue"][];
+ /** Id */
+ id: string;
+ };
+ /** CategoricalAnnotationConfigData */
+ CategoricalAnnotationConfigData: {
+ /** Name */
+ name: string;
+ /**
+ * @description discriminator enum property added by openapi-typescript
+ * @enum {string}
+ */
+ type: "CATEGORICAL";
+ /** Description */
+ description?: string | null;
+ optimization_direction: components["schemas"]["OptimizationDirection"];
+ /** Values */
+ values: components["schemas"]["CategoricalAnnotationValue"][];
+ };
+ /** CategoricalAnnotationValue */
+ CategoricalAnnotationValue: {
+ /** Label */
+ label: string;
+ /** Score */
+ score?: number | null;
+ };
+ /** ContinuousAnnotationConfig */
+ ContinuousAnnotationConfig: {
+ /** Name */
+ name: string;
+ /**
+ * @description discriminator enum property added by openapi-typescript
+ * @enum {string}
+ */
+ type: "CONTINUOUS";
+ /** Description */
+ description?: string | null;
+ optimization_direction: components["schemas"]["OptimizationDirection"];
+ /** Lower Bound */
+ lower_bound?: number | null;
+ /** Upper Bound */
+ upper_bound?: number | null;
+ /** Id */
+ id: string;
+ };
+ /** ContinuousAnnotationConfigData */
+ ContinuousAnnotationConfigData: {
+ /** Name */
+ name: string;
+ /**
+ * @description discriminator enum property added by openapi-typescript
+ * @enum {string}
+ */
+ type: "CONTINUOUS";
+ /** Description */
+ description?: string | null;
+ optimization_direction: components["schemas"]["OptimizationDirection"];
+ /** Lower Bound */
+ lower_bound?: number | null;
+ /** Upper Bound */
+ upper_bound?: number | null;
+ };
+ /** CreateAnnotationConfigData */
+ CreateAnnotationConfigData: components["schemas"]["CategoricalAnnotationConfigData"] | components["schemas"]["ContinuousAnnotationConfigData"] | components["schemas"]["FreeformAnnotationConfigData"];
+ /** CreateAnnotationConfigResponseBody */
+ CreateAnnotationConfigResponseBody: {
+ /** Data */
+ data: components["schemas"]["CategoricalAnnotationConfig"] | components["schemas"]["ContinuousAnnotationConfig"] | components["schemas"]["FreeformAnnotationConfig"];
+ };
/**
* CreateExperimentRequestBody
* @description Details of the experiment to be created
@@ -583,6 +738,11 @@ export interface components {
/** Example Count */
example_count: number;
};
+ /** DeleteAnnotationConfigResponseBody */
+ DeleteAnnotationConfigResponseBody: {
+ /** Data */
+ data: components["schemas"]["CategoricalAnnotationConfig"] | components["schemas"]["ContinuousAnnotationConfig"] | components["schemas"]["FreeformAnnotationConfig"];
+ };
/** Experiment */
Experiment: {
/**
@@ -630,6 +790,44 @@ export interface components {
*/
updated_at: string;
};
+ /** FreeformAnnotationConfig */
+ FreeformAnnotationConfig: {
+ /** Name */
+ name: string;
+ /**
+ * @description discriminator enum property added by openapi-typescript
+ * @enum {string}
+ */
+ type: "FREEFORM";
+ /** Description */
+ description?: string | null;
+ /** Id */
+ id: string;
+ };
+ /** FreeformAnnotationConfigData */
+ FreeformAnnotationConfigData: {
+ /** Name */
+ name: string;
+ /**
+ * @description discriminator enum property added by openapi-typescript
+ * @enum {string}
+ */
+ type: "FREEFORM";
+ /** Description */
+ description?: string | null;
+ };
+ /** GetAnnotationConfigResponseBody */
+ GetAnnotationConfigResponseBody: {
+ /** Data */
+ data: components["schemas"]["CategoricalAnnotationConfig"] | components["schemas"]["ContinuousAnnotationConfig"] | components["schemas"]["FreeformAnnotationConfig"];
+ };
+ /** GetAnnotationConfigsResponseBody */
+ GetAnnotationConfigsResponseBody: {
+ /** Data */
+ data: (components["schemas"]["CategoricalAnnotationConfig"] | components["schemas"]["ContinuousAnnotationConfig"] | components["schemas"]["FreeformAnnotationConfig"])[];
+ /** Next Cursor */
+ next_cursor: string | null;
+ };
/** GetDatasetResponseBody */
GetDatasetResponseBody: {
data: components["schemas"]["DatasetWithExampleCount"];
@@ -726,6 +924,11 @@ export interface components {
* @enum {string}
*/
ModelProvider: "OPENAI" | "AZURE_OPENAI" | "ANTHROPIC" | "GOOGLE";
+ /**
+ * OptimizationDirection
+ * @enum {string}
+ */
+ OptimizationDirection: "MINIMIZE" | "MAXIMIZE" | "NONE";
/** Project */
Project: {
/** Name */
@@ -1080,7 +1283,7 @@ export interface components {
* @description The kind of annotator used for the annotation
* @enum {string}
*/
- annotator_kind: "LLM" | "HUMAN";
+ annotator_kind: "LLM" | "CODE" | "HUMAN";
/** @description The result of the annotation */
result?: components["schemas"]["SpanAnnotationResult"] | null;
/**
@@ -1090,6 +1293,63 @@ export interface components {
metadata?: {
[key: string]: unknown;
} | null;
+ /**
+ * Identifier
+ * @description The identifier of the annotation. If provided, the annotation will be updated if it already exists.
+ */
+ identifier?: string | null;
+ /** Id */
+ id: string;
+ /**
+ * Created At
+ * Format: date-time
+ */
+ created_at: string;
+ /**
+ * Updated At
+ * Format: date-time
+ */
+ updated_at: string;
+ /**
+ * Source
+ * @enum {string}
+ */
+ source: "API" | "APP";
+ /** User Id */
+ user_id: string | null;
+ };
+ /** SpanAnnotationData */
+ SpanAnnotationData: {
+ /**
+ * Span Id
+ * @description OpenTelemetry Span ID (hex format w/o 0x prefix)
+ */
+ span_id: string;
+ /**
+ * Name
+ * @description The name of the annotation
+ */
+ name: string;
+ /**
+ * Annotator Kind
+ * @description The kind of annotator used for the annotation
+ * @enum {string}
+ */
+ annotator_kind: "LLM" | "CODE" | "HUMAN";
+ /** @description The result of the annotation */
+ result?: components["schemas"]["SpanAnnotationResult"] | null;
+ /**
+ * Metadata
+ * @description Metadata for the annotation
+ */
+ metadata?: {
+ [key: string]: unknown;
+ } | null;
+ /**
+ * Identifier
+ * @description The identifier of the annotation. If provided, the annotation will be updated if it already exists.
+ */
+ identifier?: string | null;
};
/** SpanAnnotationResult */
SpanAnnotationResult: {
@@ -1109,6 +1369,13 @@ export interface components {
*/
explanation?: string | null;
};
+ /** SpanAnnotationsResponseBody */
+ SpanAnnotationsResponseBody: {
+ /** Data */
+ data: components["schemas"]["SpanAnnotation"][];
+ /** Next Cursor */
+ next_cursor: string | null;
+ };
/** TextContentPart */
TextContentPart: {
/**
@@ -1157,6 +1424,11 @@ export interface components {
[key: string]: unknown;
} | unknown[] | null;
};
+ /** UpdateAnnotationConfigResponseBody */
+ UpdateAnnotationConfigResponseBody: {
+ /** Data */
+ data: components["schemas"]["CategoricalAnnotationConfig"] | components["schemas"]["ContinuousAnnotationConfig"] | components["schemas"]["FreeformAnnotationConfig"];
+ };
/** UpdateProjectRequestBody */
UpdateProjectRequestBody: {
/** Description */
@@ -1193,6 +1465,275 @@ export interface components {
}
export type $defs = Record;
export interface operations {
+ list_annotation_configs_v1_annotation_configs_get: {
+ parameters: {
+ query?: {
+ /** @description Cursor for pagination (base64-encoded annotation config ID) */
+ cursor?: string | null;
+ /** @description Maximum number of configs to return */
+ limit?: number;
+ };
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ requestBody?: never;
+ responses: {
+ /** @description A list of annotation configurations with pagination information */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["GetAnnotationConfigsResponseBody"];
+ };
+ };
+ /** @description Forbidden */
+ 403: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
+ create_annotation_config_v1_annotation_configs_post: {
+ parameters: {
+ query?: never;
+ header?: never;
+ path?: never;
+ cookie?: never;
+ };
+ requestBody: {
+ content: {
+ "application/json": components["schemas"]["CreateAnnotationConfigData"];
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["CreateAnnotationConfigResponseBody"];
+ };
+ };
+ /** @description Forbidden */
+ 403: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
+ get_annotation_config_by_name_or_id_v1_annotation_configs__config_identifier__get: {
+ parameters: {
+ query?: never;
+ header?: never;
+ path: {
+ /** @description ID or name of the annotation configuration */
+ config_identifier: string;
+ };
+ cookie?: never;
+ };
+ requestBody?: never;
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["GetAnnotationConfigResponseBody"];
+ };
+ };
+ /** @description Forbidden */
+ 403: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
+ update_annotation_config_v1_annotation_configs__config_id__put: {
+ parameters: {
+ query?: never;
+ header?: never;
+ path: {
+ /** @description ID of the annotation configuration */
+ config_id: string;
+ };
+ cookie?: never;
+ };
+ requestBody: {
+ content: {
+ "application/json": components["schemas"]["CreateAnnotationConfigData"];
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["UpdateAnnotationConfigResponseBody"];
+ };
+ };
+ /** @description Forbidden */
+ 403: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
+ delete_annotation_config_v1_annotation_configs__config_id__delete: {
+ parameters: {
+ query?: never;
+ header?: never;
+ path: {
+ /** @description ID of the annotation configuration */
+ config_id: string;
+ };
+ cookie?: never;
+ };
+ requestBody?: never;
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["DeleteAnnotationConfigResponseBody"];
+ };
+ };
+ /** @description Forbidden */
+ 403: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
+ listSpanAnnotationsBySpanIds: {
+ parameters: {
+ query: {
+ /** @description One or more span id to fetch annotations for */
+ span_ids: string[];
+ /** @description A cursor for pagination */
+ cursor?: string | null;
+ /** @description The maximum number of annotations to return in a single request */
+ limit?: number;
+ };
+ header?: never;
+ path: {
+ /** @description The project identifier: either project ID or project name. If using a project name as the identifier, it cannot contain slash (/), question mark (?), or pound sign (#) characters. */
+ project_identifier: string;
+ };
+ cookie?: never;
+ };
+ requestBody?: never;
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "application/json": components["schemas"]["SpanAnnotationsResponseBody"];
+ };
+ };
+ /** @description Forbidden */
+ 403: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Project or spans not found */
+ 404: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ /** @description Invalid parameters */
+ 422: {
+ headers: {
+ [name: string]: unknown;
+ };
+ content: {
+ "text/plain": string;
+ };
+ };
+ };
+ };
listDatasets: {
parameters: {
query?: {
diff --git a/packages/phoenix-client/scripts/codegen/transform.py b/packages/phoenix-client/scripts/codegen/transform.py
index 548b381467..fa7e38bbad 100644
--- a/packages/phoenix-client/scripts/codegen/transform.py
+++ b/packages/phoenix-client/scripts/codegen/transform.py
@@ -136,6 +136,19 @@ def transform_dataclass(code: str) -> ast.AST:
)
parsed_ast.body.insert(index, import_notrequired)
break
+
+ # Remove top-level Union type definitions
+ parsed_ast.body = [
+ node
+ for node in parsed_ast.body
+ if not (
+ isinstance(node, ast.Assign)
+ and isinstance(node.value, ast.Subscript)
+ and isinstance(node.value.value, ast.Name)
+ and node.value.value.id == "Union"
+ )
+ ]
+
transformer = ConvertDataClassToTypedDict()
transformed_ast = transformer.visit(parsed_ast)
return transformed_ast
@@ -149,6 +162,7 @@ def transform_dataclass(code: str) -> ast.AST:
PARENTS: Mapping[str, Sequence[str]] = {
"Prompt": ["PromptData"],
"PromptVersion": ["PromptVersionData"],
+ "SpanAnnotation": ["SpanAnnotationData"],
}
@@ -344,7 +358,7 @@ def rewrite_file(
directory: Path = Path(sys.argv[1])
rewrite_file(
directory,
- ".dataclass.txt",
+ ".dataclass.py",
"__init__.py",
transform_dataclass,
)
diff --git a/packages/phoenix-client/src/phoenix/client/__generated__/v1/.dataclass.txt b/packages/phoenix-client/src/phoenix/client/__generated__/v1/.dataclass.txt
new file mode 100644
index 0000000000..2b735985ae
--- /dev/null
+++ b/packages/phoenix-client/src/phoenix/client/__generated__/v1/.dataclass.txt
@@ -0,0 +1,418 @@
+# generated by datamodel-codegen:
+# filename: openapi.json
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any, Literal, Mapping, Optional, Sequence, Union
+
+
+@dataclass
+class CreateExperimentRequestBody:
+ name: Optional[str] = None
+ description: Optional[str] = None
+ metadata: Optional[Mapping[str, Any]] = None
+ version_id: Optional[str] = None
+ repetitions: Optional[int] = 1
+
+
+@dataclass
+class Dataset:
+ id: str
+ name: str
+ description: Optional[str]
+ metadata: Mapping[str, Any]
+ created_at: str
+ updated_at: str
+
+
+@dataclass
+class DatasetExample:
+ id: str
+ input: Mapping[str, Any]
+ output: Mapping[str, Any]
+ metadata: Mapping[str, Any]
+ updated_at: str
+
+
+@dataclass
+class DatasetVersion:
+ version_id: str
+ description: Optional[str]
+ metadata: Mapping[str, Any]
+ created_at: str
+
+
+@dataclass
+class DatasetWithExampleCount:
+ id: str
+ name: str
+ description: Optional[str]
+ metadata: Mapping[str, Any]
+ created_at: str
+ updated_at: str
+ example_count: int
+
+
+@dataclass
+class Experiment:
+ id: str
+ dataset_id: str
+ dataset_version_id: str
+ repetitions: int
+ metadata: Mapping[str, Any]
+ project_name: Optional[str]
+ created_at: str
+ updated_at: str
+
+
+@dataclass
+class GetDatasetResponseBody:
+ data: DatasetWithExampleCount
+
+
+@dataclass
+class GetExperimentResponseBody:
+ data: Experiment
+
+
+@dataclass
+class InsertedSpanAnnotation:
+ id: str
+
+
+@dataclass
+class ListDatasetExamplesData:
+ dataset_id: str
+ version_id: str
+ examples: Sequence[DatasetExample]
+
+
+@dataclass
+class ListDatasetExamplesResponseBody:
+ data: ListDatasetExamplesData
+
+
+@dataclass
+class ListDatasetVersionsResponseBody:
+ data: Sequence[DatasetVersion]
+ next_cursor: Optional[str]
+
+
+@dataclass
+class ListDatasetsResponseBody:
+ data: Sequence[Dataset]
+ next_cursor: Optional[str]
+
+
+@dataclass
+class ListExperimentsResponseBody:
+ data: Sequence[Experiment]
+
+
+@dataclass
+class Prompt:
+ name: str
+ id: str
+ description: Optional[str] = None
+ source_prompt_id: Optional[str] = None
+
+
+@dataclass
+class PromptAnthropicInvocationParametersContent:
+ max_tokens: int
+ temperature: Optional[float] = None
+ top_p: Optional[float] = None
+ stop_sequences: Optional[Sequence[str]] = None
+
+
+@dataclass
+class PromptAzureOpenAIInvocationParametersContent:
+ temperature: Optional[float] = None
+ max_tokens: Optional[int] = None
+ frequency_penalty: Optional[float] = None
+ presence_penalty: Optional[float] = None
+ top_p: Optional[float] = None
+ seed: Optional[int] = None
+ reasoning_effort: Optional[Literal["low", "medium", "high"]] = None
+
+
+@dataclass
+class PromptData:
+ name: str
+ description: Optional[str] = None
+ source_prompt_id: Optional[str] = None
+
+
+@dataclass
+class PromptGoogleInvocationParametersContent:
+ temperature: Optional[float] = None
+ max_output_tokens: Optional[int] = None
+ stop_sequences: Optional[Sequence[str]] = None
+ presence_penalty: Optional[float] = None
+ frequency_penalty: Optional[float] = None
+ top_p: Optional[float] = None
+ top_k: Optional[int] = None
+
+
+@dataclass
+class PromptOpenAIInvocationParametersContent:
+ temperature: Optional[float] = None
+ max_tokens: Optional[int] = None
+ frequency_penalty: Optional[float] = None
+ presence_penalty: Optional[float] = None
+ top_p: Optional[float] = None
+ seed: Optional[int] = None
+ reasoning_effort: Optional[Literal["low", "medium", "high"]] = None
+
+
+@dataclass
+class PromptResponseFormatJSONSchemaDefinition:
+ name: str
+ description: Optional[str] = None
+ schema_: Optional[Mapping[str, Any]] = None
+ strict: Optional[bool] = None
+
+
+@dataclass
+class PromptStringTemplate:
+ template: str
+ type: Literal["string"]
+
+
+@dataclass
+class PromptToolChoiceNone:
+ type: Literal["none"]
+
+
+@dataclass
+class PromptToolChoiceOneOrMore:
+ type: Literal["one_or_more"]
+
+
+@dataclass
+class PromptToolChoiceSpecificFunctionTool:
+ function_name: str
+ type: Literal["specific_function"]
+
+
+@dataclass
+class PromptToolChoiceZeroOrMore:
+ type: Literal["zero_or_more"]
+
+
+@dataclass
+class PromptToolFunctionDefinition:
+ name: str
+ description: Optional[str] = None
+ parameters: Optional[Mapping[str, Any]] = None
+ strict: Optional[bool] = None
+
+
+@dataclass
+class SpanAnnotationResult:
+ label: Optional[str] = None
+ score: Optional[float] = None
+ explanation: Optional[str] = None
+
+
+@dataclass
+class TextContentPart:
+ text: str
+ type: Literal["text"]
+
+
+@dataclass
+class ToolCallFunction:
+ name: str
+ arguments: str
+ type: Literal["function"]
+
+
+@dataclass
+class ToolResultContentPart:
+ tool_call_id: str
+ tool_result: Optional[Union[bool, int, float, str, Mapping[str, Any], Sequence[Any]]]
+ type: Literal["tool_result"]
+
+
+@dataclass
+class UploadDatasetData:
+ dataset_id: str
+
+
+@dataclass
+class UploadDatasetResponseBody:
+ data: UploadDatasetData
+
+
+@dataclass
+class ValidationError:
+ loc: Sequence[Union[str, int]]
+ msg: str
+ type: str
+
+
+@dataclass
+class AnnotateSpansResponseBody:
+ data: Sequence[InsertedSpanAnnotation]
+
+
+@dataclass
+class CreateExperimentResponseBody:
+ data: Experiment
+
+
+@dataclass
+class GetPromptsResponseBody:
+ data: Sequence[Prompt]
+
+
+@dataclass
+class HTTPValidationError:
+ detail: Optional[Sequence[ValidationError]] = None
+
+
+@dataclass
+class PromptAnthropicInvocationParameters:
+ anthropic: PromptAnthropicInvocationParametersContent
+ type: Literal["anthropic"]
+
+
+@dataclass
+class PromptAzureOpenAIInvocationParameters:
+ azure_openai: PromptAzureOpenAIInvocationParametersContent
+ type: Literal["azure_openai"]
+
+
+@dataclass
+class PromptGoogleInvocationParameters:
+ google: PromptGoogleInvocationParametersContent
+ type: Literal["google"]
+
+
+@dataclass
+class PromptOpenAIInvocationParameters:
+ openai: PromptOpenAIInvocationParametersContent
+ type: Literal["openai"]
+
+
+@dataclass
+class PromptResponseFormatJSONSchema:
+ json_schema: PromptResponseFormatJSONSchemaDefinition
+ type: str = "json_schema"
+
+
+@dataclass
+class PromptToolFunction:
+ function: PromptToolFunctionDefinition
+ type: str = "function"
+
+
+@dataclass
+class PromptTools:
+ tools: Sequence[PromptToolFunction]
+ type: str = "tools"
+ tool_choice: Optional[
+ Union[
+ PromptToolChoiceNone,
+ PromptToolChoiceZeroOrMore,
+ PromptToolChoiceOneOrMore,
+ PromptToolChoiceSpecificFunctionTool,
+ ]
+ ] = None
+ disable_parallel_tool_calls: Optional[bool] = None
+
+
+@dataclass
+class SpanAnnotation:
+ span_id: str
+ name: str
+ annotator_kind: Literal["LLM", "HUMAN"]
+ result: Optional[SpanAnnotationResult] = None
+ metadata: Optional[Mapping[str, Any]] = None
+
+
+@dataclass
+class ToolCallContentPart:
+ tool_call_id: str
+ tool_call: ToolCallFunction
+ type: Literal["tool_call"]
+
+
+@dataclass
+class AnnotateSpansRequestBody:
+ data: Sequence[SpanAnnotation]
+
+
+@dataclass
+class PromptMessage:
+ role: Literal["user", "assistant", "model", "ai", "tool", "system", "developer"]
+ content: Union[
+ str,
+ Sequence[Union[TextContentPart, ToolCallContentPart, ToolResultContentPart]],
+ ]
+
+
+@dataclass
+class PromptChatTemplate:
+ messages: Sequence[PromptMessage]
+ type: Literal["chat"]
+
+
+@dataclass
+class PromptVersion:
+ model_provider: Literal["OPENAI", "AZURE_OPENAI", "ANTHROPIC", "GOOGLE"]
+ model_name: str
+ template: Union[PromptChatTemplate, PromptStringTemplate]
+ template_type: Literal["STR", "CHAT"]
+ template_format: Literal["MUSTACHE", "F_STRING", "NONE"]
+ invocation_parameters: Union[
+ PromptOpenAIInvocationParameters,
+ PromptAzureOpenAIInvocationParameters,
+ PromptAnthropicInvocationParameters,
+ PromptGoogleInvocationParameters,
+ ]
+ id: str
+ description: Optional[str] = None
+ tools: Optional[PromptTools] = None
+ response_format: Optional[PromptResponseFormatJSONSchema] = None
+
+
+@dataclass
+class PromptVersionData:
+ model_provider: Literal["OPENAI", "AZURE_OPENAI", "ANTHROPIC", "GOOGLE"]
+ model_name: str
+ template: Union[PromptChatTemplate, PromptStringTemplate]
+ template_type: Literal["STR", "CHAT"]
+ template_format: Literal["MUSTACHE", "F_STRING", "NONE"]
+ invocation_parameters: Union[
+ PromptOpenAIInvocationParameters,
+ PromptAzureOpenAIInvocationParameters,
+ PromptAnthropicInvocationParameters,
+ PromptGoogleInvocationParameters,
+ ]
+ description: Optional[str] = None
+ tools: Optional[PromptTools] = None
+ response_format: Optional[PromptResponseFormatJSONSchema] = None
+
+
+@dataclass
+class CreatePromptRequestBody:
+ prompt: PromptData
+ version: PromptVersionData
+
+
+@dataclass
+class CreatePromptResponseBody:
+ data: PromptVersion
+
+
+@dataclass
+class GetPromptResponseBody:
+ data: PromptVersion
+
+
+@dataclass
+class GetPromptVersionsResponseBody:
+ data: Sequence[PromptVersion]
diff --git a/packages/phoenix-client/src/phoenix/client/__generated__/v1/.gitignore b/packages/phoenix-client/src/phoenix/client/__generated__/v1/.gitignore
index da7da1ad1d..6a2e0145fe 100644
--- a/packages/phoenix-client/src/phoenix/client/__generated__/v1/.gitignore
+++ b/packages/phoenix-client/src/phoenix/client/__generated__/v1/.gitignore
@@ -1 +1 @@
-/.dataclass.txt
+/.dataclass.py
diff --git a/packages/phoenix-client/src/phoenix/client/__generated__/v1/__init__.py b/packages/phoenix-client/src/phoenix/client/__generated__/v1/__init__.py
index 1bfc0ba06b..ffdb56069b 100644
--- a/packages/phoenix-client/src/phoenix/client/__generated__/v1/__init__.py
+++ b/packages/phoenix-client/src/phoenix/client/__generated__/v1/__init__.py
@@ -7,6 +7,11 @@
from typing_extensions import NotRequired
+class CategoricalAnnotationValue(TypedDict):
+ label: str
+ score: NotRequired[float]
+
+
class CreateExperimentRequestBody(TypedDict):
name: NotRequired[str]
description: NotRequired[str]
@@ -65,6 +70,19 @@ class Experiment(TypedDict):
updated_at: str
+class FreeformAnnotationConfig(TypedDict):
+ type: Literal["FREEFORM"]
+ name: str
+ id: str
+ description: NotRequired[str]
+
+
+class FreeformAnnotationConfigData(TypedDict):
+ type: Literal["FREEFORM"]
+ name: str
+ description: NotRequired[str]
+
+
class GetDatasetResponseBody(TypedDict):
data: DatasetWithExampleCount
@@ -254,6 +272,46 @@ class AnnotateSpansResponseBody(TypedDict):
data: Sequence[InsertedSpanAnnotation]
+class CategoricalAnnotationConfig(TypedDict):
+ type: Literal["CATEGORICAL"]
+ name: str
+ optimization_direction: Literal["MINIMIZE", "MAXIMIZE", "NONE"]
+ values: Sequence[CategoricalAnnotationValue]
+ id: str
+ description: NotRequired[str]
+
+
+class CategoricalAnnotationConfigData(TypedDict):
+ type: Literal["CATEGORICAL"]
+ name: str
+ optimization_direction: Literal["MINIMIZE", "MAXIMIZE", "NONE"]
+ values: Sequence[CategoricalAnnotationValue]
+ description: NotRequired[str]
+
+
+class ContinuousAnnotationConfig(TypedDict):
+ type: Literal["CONTINUOUS"]
+ name: str
+ optimization_direction: Literal["MINIMIZE", "MAXIMIZE", "NONE"]
+ id: str
+ description: NotRequired[str]
+ lower_bound: NotRequired[float]
+ upper_bound: NotRequired[float]
+
+
+class ContinuousAnnotationConfigData(TypedDict):
+ type: Literal["CONTINUOUS"]
+ name: str
+ optimization_direction: Literal["MINIMIZE", "MAXIMIZE", "NONE"]
+ description: NotRequired[str]
+ lower_bound: NotRequired[float]
+ upper_bound: NotRequired[float]
+
+
+class CreateAnnotationConfigResponseBody(TypedDict):
+ data: Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig]
+
+
class CreateExperimentResponseBody(TypedDict):
data: Experiment
@@ -262,6 +320,21 @@ class CreateProjectResponseBody(TypedDict):
data: Project
+class DeleteAnnotationConfigResponseBody(TypedDict):
+ data: Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig]
+
+
+class GetAnnotationConfigResponseBody(TypedDict):
+ data: Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig]
+
+
+class GetAnnotationConfigsResponseBody(TypedDict):
+ data: Sequence[
+ Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig]
+ ]
+ next_cursor: Optional[str]
+
+
class GetProjectResponseBody(TypedDict):
data: Project
@@ -334,12 +407,26 @@ class PromptTools(TypedDict):
disable_parallel_tool_calls: NotRequired[bool]
-class SpanAnnotation(TypedDict):
+class SpanAnnotationData(TypedDict):
span_id: str
name: str
- annotator_kind: Literal["LLM", "HUMAN"]
+ annotator_kind: Literal["LLM", "CODE", "HUMAN"]
result: NotRequired[SpanAnnotationResult]
metadata: NotRequired[Mapping[str, Any]]
+ identifier: NotRequired[str]
+
+
+class SpanAnnotation(SpanAnnotationData):
+ id: str
+ created_at: str
+ updated_at: str
+ source: Literal["API", "APP"]
+ user_id: Optional[str]
+
+
+class SpanAnnotationsResponseBody(TypedDict):
+ data: Sequence[SpanAnnotation]
+ next_cursor: Optional[str]
class ToolCallContentPart(TypedDict):
@@ -348,8 +435,12 @@ class ToolCallContentPart(TypedDict):
tool_call: ToolCallFunction
+class UpdateAnnotationConfigResponseBody(TypedDict):
+ data: Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig]
+
+
class AnnotateSpansRequestBody(TypedDict):
- data: Sequence[SpanAnnotation]
+ data: Sequence[SpanAnnotationData]
class PromptAnthropicInvocationParameters(TypedDict):
diff --git a/packages/phoenix-client/src/phoenix/client/client.py b/packages/phoenix-client/src/phoenix/client/client.py
index 4c7678a6a1..29a2483369 100644
--- a/packages/phoenix-client/src/phoenix/client/client.py
+++ b/packages/phoenix-client/src/phoenix/client/client.py
@@ -4,6 +4,7 @@
import httpx
+from phoenix.client.resources.annotations import Annotations, AsyncAnnotations
from phoenix.client.resources.projects import AsyncProjects, Projects
from phoenix.client.resources.prompts import AsyncPrompts, Prompts
from phoenix.client.resources.spans import AsyncSpans, Spans
@@ -53,6 +54,7 @@ def _client(self, value: httpx.Client) -> None:
self._prompts = Prompts(value)
self._projects = Projects(value)
self._spans = Spans(value)
+ self._annotations = Annotations(value)
@property
def prompts(self) -> Prompts:
@@ -85,6 +87,17 @@ def spans(self) -> Spans:
"""
return self._spans
+ @property
+ def annotations(self) -> Annotations:
+ """
+ Returns an instance of the Annotations class for interacting with annotation-related
+ API endpoints.
+
+ Returns:
+ Annotations: An instance of the Annotations class.
+ """ # noqa: E501
+ return self._annotations
+
class AsyncClient:
def __init__(
@@ -128,6 +141,7 @@ def _client(self, value: httpx.AsyncClient) -> None:
self._prompts = AsyncPrompts(value)
self._projects = AsyncProjects(value)
self._spans = AsyncSpans(value)
+ self._annotations = AsyncAnnotations(value)
@property
def prompts(self) -> AsyncPrompts:
@@ -162,6 +176,17 @@ def spans(self) -> AsyncSpans:
"""
return self._spans
+ @property
+ def annotations(self) -> AsyncAnnotations:
+ """
+ Returns an instance of the Asynchronous Annotations class for interacting with annotation-related
+ API endpoints.
+
+ Returns:
+ AsyncAnnotations: An instance of the Annotations class.
+ """ # noqa: E501
+ return self._annotations
+
def _update_headers(
headers: Optional[Mapping[str, str]],
diff --git a/packages/phoenix-client/src/phoenix/client/resources/annotations/__init__.py b/packages/phoenix-client/src/phoenix/client/resources/annotations/__init__.py
new file mode 100644
index 0000000000..988546c4c6
--- /dev/null
+++ b/packages/phoenix-client/src/phoenix/client/resources/annotations/__init__.py
@@ -0,0 +1,773 @@
+from __future__ import annotations
+
+import logging
+from typing import TYPE_CHECKING, Any, Iterable, Iterator, Literal, Optional, cast, get_args
+
+import httpx
+from typing_extensions import TypeAlias
+
+from phoenix.client.__generated__ import v1
+
+if TYPE_CHECKING:
+ import pandas as pd
+
+logger = logging.getLogger(__name__)
+
+_AnnotatorKind: TypeAlias = Literal["LLM", "CODE", "HUMAN"]
+_VALID_ANNOTATOR_KINDS: frozenset[_AnnotatorKind] = frozenset(get_args(_AnnotatorKind))
+_DATAFRAME_CHUNK_SIZE = 100
+
+
+class Annotations:
+ """Client for interacting with the Annotations API endpoints.
+
+ This class provides synchronous methods for creating and managing span annotations.
+
+ Example:
+ ```python
+ from phoenix.client import Client
+
+ client = Client()
+ annotation = client.annotations.add_span_annotation(
+ annotation_name="sentiment",
+ span_id="abc123",
+ label="positive",
+ score=0.9,
+ )
+ ```
+ """ # noqa: E501
+
+ def __init__(self, client: httpx.Client) -> None:
+ """Initialize the Annotations client.
+
+ Args:
+ client: The httpx client to use for making requests.
+ """
+ self._client = client
+
+ def add_span_annotation(
+ self,
+ *,
+ span_id: str,
+ annotation_name: str,
+ annotator_kind: Literal["LLM", "CODE", "HUMAN"] = "HUMAN",
+ label: Optional[str] = None,
+ score: Optional[float] = None,
+ explanation: Optional[str] = None,
+ metadata: Optional[dict[str, Any]] = None,
+ identifier: Optional[str] = None,
+ sync: bool = False,
+ ) -> Optional[v1.InsertedSpanAnnotation]:
+ """Add a single span annotation.
+
+ Args:
+ annotation_name: The name of the annotation.
+ span_id: The ID of the span to annotate.
+ annotator_kind: The kind of annotator used for the annotation. Must be one of "LLM", "CODE", or "HUMAN".
+ label: The label assigned by the annotation.
+ score: The score assigned by the annotation.
+ explanation: Explanation of the annotation result.
+ metadata: Additional metadata for the annotation.
+ identifier: An optional identifier for the annotation. Each annotation is uniquely identified by the combination
+ of name, span_id, and identifier (where a null identifier is equivalent to an empty string).
+ If an annotation with the same name, span_id, and identifier already exists, it will be updated.
+ Using a non-empty identifier allows you to have multiple annotations with the same name and span_id.
+ Most of the time, you can leave this as None - it will also update the record with identifier="" if it exists.
+ sync: If True, the request will be fulfilled synchronously and the response will contain
+ the inserted annotation ID. If False, the request will be processed asynchronously.
+
+ Returns:
+ If sync is True, the inserted span annotation containing an ID. If sync is False, None.
+
+ Raises:
+ httpx.HTTPError: If the request fails.
+ ValueError: If the response is invalid or if at least one of label, score, or explanation
+ is not provided.
+
+ Example:
+ ```python
+ from phoenix.client import Client
+
+ client = Client()
+ client.annotations.add_span_annotation(
+ annotation_name="sentiment",
+ span_id="abc123",
+ label="positive",
+ score=0.9,
+ explanation="The text expresses a positive sentiment.",
+ sync=True,
+ )
+ ```
+ """ # noqa: E501
+ anno = _get_span_annotation(
+ span_id=span_id,
+ annotation_name=annotation_name,
+ annotator_kind=annotator_kind,
+ label=label,
+ score=score,
+ explanation=explanation,
+ metadata=metadata,
+ identifier=identifier,
+ )
+ if res := self.log_span_annotations(span_annotations=[anno], sync=sync):
+ return res[0]
+ return None
+
+ def log_span_annotations_dataframe(
+ self,
+ *,
+ dataframe: pd.DataFrame,
+ annotator_kind: Optional[Literal["LLM", "CODE", "HUMAN"]] = None,
+ annotation_name: Optional[str] = None,
+ sync: bool = False,
+ ) -> Optional[list[v1.InsertedSpanAnnotation]]:
+ """Log multiple span annotations from a pandas DataFrame.
+
+ This method allows you to create multiple span annotations at once by providing the data in a pandas DataFrame.
+ The DataFrame can either include `name` or `annotation_name` columns (but not both) and `annotator_kind` column,
+ or you can specify global values for all rows. The data is processed in chunks of 100 rows for efficient batch processing.
+
+ Args:
+ dataframe: A pandas DataFrame containing the annotation data. Must include either a "name" or "annotation_name" column
+ (but not both) or provide a global annotation_name parameter. Similarly, must include an "annotator_kind" column
+ or provide a global annotator_kind. The `span_id` can be either a column in the DataFrame or will be taken from
+ the DataFrame index. Optional columns include: "label", "score", "explanation", "metadata", and "identifier".
+ annotator_kind: Optional. The kind of annotator used for all annotations. If provided, this value will be used
+ for all rows and the DataFrame does not need to include an "annotator_kind" column.
+ Must be one of "LLM", "CODE", or "HUMAN".
+ annotation_name: Optional. The name to use for all annotations. If provided, this value will be used
+ for all rows and the DataFrame does not need to include a "name" or "annotation_name" column.
+ sync: If True, the request will be fulfilled synchronously and the response will contain
+ the inserted annotation IDs. If False, the request will be processed asynchronously.
+
+ Returns:
+ If sync is True, a list of all inserted span annotations. If sync is False, None.
+
+ Raises:
+ ImportError: If pandas is not installed.
+ ValueError: If the DataFrame is missing required columns, if both "name" and "annotation_name" columns are present,
+ or if no valid annotation data is provided.
+
+ Example:
+ ```python
+ import pandas as pd
+
+ # Using name and annotator_kind from DataFrame
+ df1 = pd.DataFrame({
+ "name": ["sentiment", "toxicity"],
+ "annotator_kind": ["HUMAN", "LLM"],
+ "label": ["positive", "low"],
+ "score": [0.9, 0.1]
+ })
+ client.annotations.log_span_annotations_dataframe(dataframe=df1)
+
+ # Using annotation_name and annotator_kind from DataFrame
+ df2 = pd.DataFrame({
+ "annotation_name": ["sentiment", "toxicity"],
+ "annotator_kind": ["HUMAN", "LLM"],
+ "label": ["positive", "low"],
+ "score": [0.9, 0.1]
+ })
+ client.annotations.log_span_annotations_dataframe(dataframe=df2)
+
+ # Using global name and annotator_kind
+ df3 = pd.DataFrame({
+ "label": ["positive", "low"]
+ }, index=["span1", "span2"])
+ client.annotations.log_span_annotations_dataframe(
+ dataframe=df3,
+ annotation_name="sentiment", # applies to all rows
+ annotator_kind="HUMAN" # applies to all rows
+ )
+ ```
+ """ # noqa: E501
+ # Process DataFrame chunks using iterator
+ all_responses: list[v1.InsertedSpanAnnotation] = []
+ for chunk in _chunk_dataframe(
+ dataframe=dataframe,
+ annotation_name=annotation_name,
+ annotator_kind=annotator_kind,
+ chunk_size=_DATAFRAME_CHUNK_SIZE,
+ ):
+ # Delegate to log_span_annotations
+ response = self.log_span_annotations(span_annotations=chunk, sync=sync)
+ if sync and response:
+ all_responses.extend(response)
+
+ return all_responses if sync else None
+
+ def log_span_annotations(
+ self,
+ *,
+ span_annotations: Iterable[v1.SpanAnnotationData],
+ sync: bool = False,
+ ) -> Optional[list[v1.InsertedSpanAnnotation]]:
+ """Log multiple span annotations.
+
+ Args:
+ span_annotations: An iterable of span annotation data to log. Each annotation must include
+ at least a span_id, name, and annotator_kind, and at least one of label, score, or explanation.
+ sync: If True, the request will be fulfilled synchronously and the response will contain
+ the inserted annotation IDs. If False, the request will be processed asynchronously.
+
+ Returns:
+ If sync is True, a list of inserted span annotations, each containing an ID. If sync is False, None.
+
+ Raises:
+ httpx.HTTPError: If the request fails.
+ ValueError: If the response is invalid or if the input is invalid.
+ """ # noqa: E501
+ # Convert to list and validate input
+ annotations_list = list(span_annotations)
+ if not annotations_list:
+ raise ValueError("span_annotations cannot be empty")
+
+ url = "v1/span_annotations"
+ params = {"sync": sync} if sync else {}
+ json_ = v1.AnnotateSpansRequestBody(data=annotations_list)
+ response = self._client.post(url=url, json=json_, params=params)
+ response.raise_for_status()
+ if not sync:
+ return None
+ return list(cast(v1.AnnotateSpansResponseBody, response.json())["data"])
+
+
+class AsyncAnnotations:
+ """Asynchronous client for interacting with the Annotations API endpoints.
+
+ This class provides asynchronous methods for creating and managing span annotations.
+
+ Example:
+ ```python
+ from phoenix.client import AsyncClient
+
+ async_client = AsyncClient()
+ annotation = await async_client.annotations.add_span_annotation(
+ annotation_name="sentiment",
+ span_id="abc123",
+ label="positive",
+ score=0.9,
+ )
+ ```
+ """ # noqa: E501
+
+ def __init__(self, client: httpx.AsyncClient) -> None:
+ """Initialize the AsyncAnnotations client.
+
+ Args:
+ client: The httpx async client to use for making requests.
+ """
+ self._client = client
+
+ async def add_span_annotation(
+ self,
+ *,
+ span_id: str,
+ annotation_name: str,
+ annotator_kind: Literal["LLM", "CODE", "HUMAN"] = "HUMAN",
+ label: Optional[str] = None,
+ score: Optional[float] = None,
+ explanation: Optional[str] = None,
+ metadata: Optional[dict[str, Any]] = None,
+ identifier: Optional[str] = None,
+ sync: bool = False,
+ ) -> Optional[v1.InsertedSpanAnnotation]:
+ """Add a single span annotation asynchronously.
+
+ Args:
+ annotation_name: The name of the annotation.
+ span_id: The ID of the span to annotate.
+ annotator_kind: The kind of annotator used for the annotation. Must be one of "LLM", "CODE", or "HUMAN".
+ label: The label assigned by the annotation.
+ score: The score assigned by the annotation.
+ explanation: Explanation of the annotation result.
+ metadata: Additional metadata for the annotation.
+ identifier: An optional identifier for the annotation. Each annotation is uniquely identified by the combination
+ of name, span_id, and identifier (where a null identifier is equivalent to an empty string).
+ If an annotation with the same name, span_id, and identifier already exists, it will be updated.
+ Using a non-empty identifier allows you to have multiple annotations with the same name and span_id.
+ Most of the time, you can leave this as None - it will also update the record with identifier="" if it exists.
+ sync: If True, the request will be fulfilled synchronously and the response will contain
+ the inserted annotation ID. If False, the request will be processed asynchronously.
+
+ Returns:
+ If sync is True, the inserted span annotation containing an ID. If sync is False, None.
+
+ Raises:
+ httpx.HTTPError: If the request fails.
+ ValueError: If the response is invalid or if at least one of label, score, or explanation
+ is not provided.
+
+ Example:
+ ```python
+ from phoenix.client import AsyncClient
+
+ async_client = AsyncClient()
+ await async_client.annotations.add_span_annotation(
+ annotation_name="sentiment",
+ span_id="abc123",
+ label="positive",
+ score=0.9,
+ explanation="The text expresses a positive sentiment.",
+ sync=True,
+ )
+ ```
+ """ # noqa: E501
+ anno = _get_span_annotation(
+ span_id=span_id,
+ annotation_name=annotation_name,
+ annotator_kind=annotator_kind,
+ label=label,
+ score=score,
+ explanation=explanation,
+ metadata=metadata,
+ identifier=identifier,
+ )
+ if res := await self.log_span_annotations(span_annotations=[anno], sync=sync):
+ return res[0]
+ return None
+
+ async def log_span_annotations_dataframe(
+ self,
+ *,
+ dataframe: pd.DataFrame,
+ annotation_name: Optional[str] = None,
+ annotator_kind: Optional[Literal["LLM", "CODE", "HUMAN"]] = None,
+ sync: bool = False,
+ ) -> Optional[list[v1.InsertedSpanAnnotation]]:
+ """Log multiple span annotations from a pandas DataFrame asynchronously.
+
+ This method allows you to create multiple span annotations at once by providing the data in a pandas DataFrame.
+ The DataFrame can either include `name` and `annotator_kind` columns or you can specify global values for all rows.
+ The data is processed in chunks of 100 rows for efficient batch processing.
+
+ Args:
+ dataframe: A pandas DataFrame containing the annotation data. Must include either a "name" column or provide
+ a global name parameter. Similarly, must include an "annotator_kind" column or provide a global annotator_kind.
+ The `span_id` can be either a column in the DataFrame or will be taken from the DataFrame index.
+ Optional columns include: "label", "score", "explanation", "metadata", and "identifier".
+ annotator_kind: Optional. The kind of annotator used for all annotations. If provided, this value will be used
+ for all rows and the DataFrame does not need to include an "annotator_kind" column.
+ Must be one of "LLM", "CODE", or "HUMAN".
+ annotation_name: Optional. The name to use for all annotations. If provided, this value will be used
+ for all rows and the DataFrame does not need to include a "name" column.
+ sync: If True, the request will be fulfilled synchronously and the response will contain
+ the inserted annotation IDs. If False, the request will be processed asynchronously.
+
+ Returns:
+ If sync is True, a list of all inserted span annotations. If sync is False, None.
+
+ Raises:
+ ImportError: If pandas is not installed.
+ ValueError: If the DataFrame is missing required columns or if no valid annotation data is provided.
+
+ Example:
+ ```python
+ import pandas as pd
+
+ # Using name and annotator_kind from DataFrame
+ df1 = pd.DataFrame({
+ "name": ["sentiment", "toxicity"],
+ "annotator_kind": ["HUMAN", "LLM"],
+ "label": ["positive", "low"],
+ "score": [0.9, 0.1]
+ })
+ await async_client.annotations.log_span_annotations_dataframe(dataframe=df1)
+
+ # Using global name and annotator_kind
+ df2 = pd.DataFrame({
+ "label": ["positive", "low"]
+ }, index=["span1", "span2"])
+ await async_client.annotations.log_span_annotations_dataframe(
+ dataframe=df2,
+ annotation_name="sentiment", # applies to all rows
+ annotator_kind="HUMAN" # applies to all rows
+ )
+ ```
+ """ # noqa: E501
+ # Process DataFrame chunks using iterator
+ all_responses: list[v1.InsertedSpanAnnotation] = []
+ for chunk in _chunk_dataframe(
+ dataframe=dataframe,
+ annotation_name=annotation_name,
+ annotator_kind=annotator_kind,
+ chunk_size=_DATAFRAME_CHUNK_SIZE,
+ ):
+ # Delegate to log_span_annotations
+ response = await self.log_span_annotations(span_annotations=chunk, sync=sync)
+ if sync and response:
+ all_responses.extend(response)
+
+ return all_responses if sync else None
+
+ async def log_span_annotations(
+ self,
+ *,
+ span_annotations: Iterable[v1.SpanAnnotationData],
+ sync: bool = False,
+ ) -> Optional[list[v1.InsertedSpanAnnotation]]:
+ """Log multiple span annotations asynchronously.
+
+ Args:
+ span_annotations: An iterable of span annotation data to log. Each annotation must include
+ at least a span_id, name, and annotator_kind, and at least one of label, score, or explanation.
+ sync: If True, the request will be fulfilled synchronously and the response will contain
+ the inserted annotation IDs. If False, the request will be processed asynchronously.
+
+ Returns:
+ If sync is True, a list of inserted span annotations, each containing an ID. If sync is False, None.
+
+ Raises:
+ httpx.HTTPError: If the request fails.
+ ValueError: If the response is invalid.
+
+ Example:
+ ```python
+ from phoenix.client import AsyncClient
+
+ async_client = AsyncClient()
+
+ # Create span annotation data objects
+ annotation1 = {
+ "name": "sentiment",
+ "span_id": "span_123",
+ "annotator_kind": "HUMAN",
+ "result": {
+ "label": "positive",
+ "score": 0.9
+ },
+ "metadata": {"source": "user_feedback"}
+ }
+
+ annotation2 = {
+ "name": "toxicity",
+ "span_id": "span_456",
+ "annotator_kind": "LLM",
+ "result": {
+ "label": "low",
+ "score": 0.1,
+ "explanation": "No harmful content detected"
+ }
+ }
+
+ # Log multiple annotations at once
+ await async_client.annotations.log_span_annotations(
+ span_annotations=[annotation1, annotation2],
+ )
+ ```
+ """ # noqa: E501
+ url = "v1/span_annotations"
+ params = {"sync": sync} if sync else {}
+ json_ = v1.AnnotateSpansRequestBody(data=list(span_annotations))
+ response = await self._client.post(url=url, json=json_, params=params)
+ response.raise_for_status()
+ if not sync:
+ return None
+ return list(cast(v1.AnnotateSpansResponseBody, response.json())["data"])
+
+
+def _get_span_annotation(
+ *,
+ span_id: str,
+ annotation_name: str,
+ annotator_kind: Literal["LLM", "CODE", "HUMAN"] = "HUMAN",
+ label: Optional[str] = None,
+ score: Optional[float] = None,
+ explanation: Optional[str] = None,
+ metadata: Optional[dict[str, Any]] = None,
+ identifier: Optional[str] = None,
+) -> v1.SpanAnnotationData:
+ """Create a span annotation data object.
+
+ Args:
+ annotation_name: The name of the annotation.
+ span_id: The ID of the span to annotate.
+ annotator_kind: The kind of annotator used for the annotation. Must be one of "LLM", "CODE", or "HUMAN".
+ label: The label assigned by the annotation.
+ score: The score assigned by the annotation.
+ explanation: Explanation of the annotation result.
+ metadata: Additional metadata for the annotation.
+ identifier: An optional identifier for the annotation. Each annotation is uniquely identified by the combination
+ of name, span_id, and identifier (where a null identifier is equivalent to an empty string).
+ If an annotation with the same name, span_id, and identifier already exists, it will be updated.
+ Using a non-empty identifier allows you to have multiple annotations with the same name and span_id.
+ Most of the time, you can leave this as None - it will still update the record if it exists.
+ It will also update the record with identifier="" if it exists.
+
+ Returns:
+ A span annotation data object that can be used with the Annotations API.
+
+ Raises:
+ ValueError: If at least one of label, score, or explanation is not provided, or if required fields are invalid.
+ """ # noqa: E501
+ # Validate required fields
+ if not span_id or not isinstance(span_id, str): # pyright: ignore[reportUnnecessaryIsInstance]
+ raise ValueError("span_id must be a non-empty string")
+ if not annotation_name or not isinstance(annotation_name, str): # pyright: ignore[reportUnnecessaryIsInstance]
+ raise ValueError("annotation_name must be a non-empty string")
+ if annotator_kind not in _VALID_ANNOTATOR_KINDS:
+ raise ValueError(f"annotator_kind must be one of {_VALID_ANNOTATOR_KINDS}")
+
+ # Validate that at least one of label, score, or explanation is provided
+ if not label and score is None and not explanation:
+ raise ValueError("At least one of label, score, or explanation must be provided.")
+
+ # Validate score if provided
+ if score is not None and not isinstance(score, (int, float)): # pyright: ignore[reportUnnecessaryIsInstance]
+ raise ValueError("score must be a number")
+
+ # Validate metadata if provided
+ if metadata is not None and not isinstance(metadata, dict): # pyright: ignore[reportUnnecessaryIsInstance]
+ raise ValueError("metadata must be a dictionary")
+
+ result = v1.SpanAnnotationResult()
+ if label:
+ result["label"] = label
+ if score is not None:
+ result["score"] = score
+ if explanation:
+ result["explanation"] = explanation
+ anno = v1.SpanAnnotationData(
+ name=annotation_name,
+ span_id=span_id,
+ annotator_kind=annotator_kind,
+ result=result,
+ )
+ if metadata:
+ anno["metadata"] = metadata
+ if identifier and identifier.strip():
+ anno["identifier"] = identifier.strip()
+ return anno
+
+
+def _validate_dataframe(
+ *,
+ dataframe: pd.DataFrame,
+ annotation_name: Optional[str] = None,
+ annotator_kind: Optional[Literal["LLM", "CODE", "HUMAN"]] = None,
+) -> None:
+ """Internal function to validate that the DataFrame has the required columns and data.
+
+ This function performs comprehensive validation of the DataFrame structure and content,
+ including type checking, required columns, and value validation.
+
+ Args:
+ dataframe: The DataFrame to validate
+ annotation_name: Optional global name value. If provided, must be a non-empty string.
+ annotator_kind: Optional global annotator_kind value. Must be one of "LLM", "CODE", or "HUMAN".
+
+ Raises:
+ ValueError: If the DataFrame is missing required columns, if no valid annotation data is provided,
+ or if annotator_kind values are invalid.
+ TypeError: If the input is not a pandas DataFrame.
+ """ # noqa: E501
+ try:
+ import pandas as pd
+ except ImportError:
+ raise ImportError(
+ "Pandas is not installed. Please install pandas to use this method: "
+ "pip install pandas"
+ )
+
+ # Type check for DataFrame
+ if not isinstance(dataframe, pd.DataFrame): # pyright: ignore[reportUnnecessaryIsInstance]
+ raise TypeError(f"Expected pandas DataFrame, got {type(dataframe)}")
+
+ # Check if DataFrame is empty
+ if dataframe.empty:
+ raise ValueError("DataFrame cannot be empty")
+
+ # Validate global name if provided
+ if annotation_name is not None:
+ if not isinstance(annotation_name, str): # pyright: ignore[reportUnnecessaryIsInstance]
+ raise TypeError(f"Expected string for annotation_name, got {type(annotation_name)}")
+ if not annotation_name.strip():
+ raise ValueError("Annotation name cannot be empty or whitespace")
+
+ # Check for name/annotation_name columns
+ has_name = "name" in dataframe.columns
+ has_annotation_name = "annotation_name" in dataframe.columns
+ if has_name and has_annotation_name:
+ raise ValueError("DataFrame cannot have both 'name' and 'annotation_name' columns")
+ if not annotation_name and not has_name and not has_annotation_name:
+ raise ValueError(
+ "DataFrame must contain either 'name' or 'annotation_name' column, "
+ "or provide a global annotation_name parameter"
+ )
+
+ # Check for required columns
+ required_columns = set() # pyright: ignore[reportUnknownVariableType]
+ if annotator_kind is None:
+ required_columns.add("annotator_kind") # pyright: ignore[reportUnknownMemberType]
+
+ if not required_columns.issubset(dataframe.columns):
+ raise ValueError(
+ f"DataFrame must contain columns: {required_columns}. "
+ f"Found columns: {dataframe.columns.tolist()}"
+ )
+
+ # Check for non-null values in required columns
+ for col in required_columns: # pyright: ignore[reportUnknownVariableType]
+ if dataframe[col].isna().all(): # pyright: ignore[reportUnknownMemberType]
+ raise ValueError(f"Column '{col}' must contain at least one non-null value")
+
+ # Validate name values if no global name is provided
+ if annotation_name is None:
+ name_column = "annotation_name" if has_annotation_name else "name"
+ # Check for null/NaN values
+ if dataframe[name_column].isna().any(): # pyright: ignore[reportUnknownMemberType]
+ raise ValueError(f"{name_column} values cannot be None")
+ # Check for empty or whitespace-only strings
+ if (dataframe[name_column].str.strip() == "").any(): # pyright: ignore[reportUnknownMemberType]
+ raise ValueError(f"{name_column} values must be non-empty strings")
+ # Check for non-string values
+ if not all(isinstance(x, str) for x in dataframe[name_column]): # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType]
+ raise ValueError(f"{name_column} values must be strings")
+
+ # Check for span_id in either columns or index
+ if "span_id" not in dataframe.columns and not all(isinstance(x, str) for x in dataframe.index): # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType]
+ raise ValueError("DataFrame must have either a 'span_id' column or a string-based index")
+
+ # Validate span_id values if using column
+ if "span_id" in dataframe.columns:
+ # Check for None values
+ if dataframe["span_id"].isna().any(): # pyright: ignore[reportUnknownMemberType]
+ raise ValueError("span_id values cannot be None")
+ # Check for empty or whitespace-only strings
+ if (dataframe["span_id"].str.strip() == "").any(): # pyright: ignore[reportUnknownMemberType]
+ raise ValueError("span_id values must be non-empty strings")
+ # Check for non-string values
+ if not all(isinstance(x, str) for x in dataframe["span_id"]): # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType]
+ raise ValueError("span_id values must be strings")
+ # Validate index values if using index as span_id
+ else:
+ # Check for empty or whitespace-only strings
+ if (pd.Series(dataframe.index).str.strip() == "").any(): # pyright: ignore[reportUnknownMemberType]
+ raise ValueError("Index values must be non-empty strings when used as span_id")
+ # Check for non-string values
+ if not all(isinstance(x, str) for x in dataframe.index): # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType]
+ raise ValueError("Index values must be strings when used as span_id")
+
+ # Check global annotator_kind if provided
+ if annotator_kind is not None and annotator_kind not in _VALID_ANNOTATOR_KINDS:
+ raise ValueError(
+ f"Invalid annotator_kind value: {annotator_kind}. "
+ f"Must be one of: {_VALID_ANNOTATOR_KINDS}"
+ )
+
+ # Only check row-level annotator_kind values if no global value is provided
+ if annotator_kind is None and "annotator_kind" in dataframe.columns:
+ invalid_values = set(dataframe["annotator_kind"].dropna().unique()) - _VALID_ANNOTATOR_KINDS # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType]
+ if invalid_values:
+ raise ValueError(
+ f"Invalid annotator_kind values found in DataFrame: {invalid_values}. "
+ f"Must be one of: {_VALID_ANNOTATOR_KINDS}"
+ )
+
+
+def _chunk_dataframe(
+ *,
+ dataframe: pd.DataFrame,
+ annotation_name: Optional[str] = None,
+ annotator_kind: Optional[Literal["LLM", "CODE", "HUMAN"]] = None,
+ chunk_size: int = _DATAFRAME_CHUNK_SIZE,
+) -> Iterator[list[v1.SpanAnnotationData]]:
+ """Internal function to split a DataFrame into smaller chunks for batch processing.
+
+ This function processes the DataFrame in chunks of 100 rows for efficient batch processing.
+ It handles type conversion and validation of the data before creating span annotations.
+
+ Args:
+ dataframe: The DataFrame to split into chunks. Must contain either a 'span_id' column or have a non-empty index.
+ annotation_name: Optional. The name to use for all annotations. If provided, this value will be used
+ for all rows and the DataFrame does not need to include a "name" column.
+ annotator_kind: Optional. The kind of annotator used for all annotations. If provided, this value will be used
+ for all rows and the DataFrame does not need to include an "annotator_kind" column.
+ Must be one of "LLM", "CODE", or "HUMAN".
+
+ Yields:
+ Lists of SpanAnnotationData objects, one chunk at a time.
+
+ Raises:
+ ValueError: If the DataFrame is invalid or if required fields are missing.
+ TypeError: If score values cannot be converted to float.
+ """ # noqa: E501
+ # Validate DataFrame upfront
+ _validate_dataframe(
+ dataframe=dataframe,
+ annotation_name=annotation_name,
+ annotator_kind=annotator_kind,
+ )
+
+ span_annotations = []
+ for idx, row in dataframe.iterrows(): # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType]
+ try:
+ # Get required fields with null checks
+ row_name = annotation_name
+ if row_name is None:
+ if "name" in row and bool(row["name"]): # pyright: ignore[reportUnknownArgumentType]
+ row_name = str(row["name"]) # pyright: ignore[reportUnknownArgumentType]
+ elif "annotation_name" in row and bool(row["annotation_name"]): # pyright: ignore[reportUnknownArgumentType]
+ row_name = str(row["annotation_name"]) # pyright: ignore[reportUnknownArgumentType]
+ assert row_name
+ row_annotator_kind = annotator_kind
+ if row_annotator_kind is None:
+ row_annotator_kind = cast(
+ _AnnotatorKind,
+ str(row["annotator_kind"]) # pyright: ignore[reportUnknownArgumentType]
+ if "annotator_kind" in row and bool(row["annotator_kind"]) # pyright: ignore[reportUnknownArgumentType]
+ else None,
+ )
+
+ # Get span_id from either column or index
+ span_id = (
+ str(row["span_id"]) # pyright: ignore[reportUnknownArgumentType]
+ if "span_id" in dataframe.columns and bool(row["span_id"]) # pyright: ignore[reportUnknownArgumentType]
+ else str(idx)
+ )
+
+ # Get optional fields with proper type conversion
+ label = str(row["label"]) if "label" in row and bool(row["label"]) else None # pyright: ignore[reportUnknownArgumentType]
+ score = None
+ if "score" in row and row["score"] is not None:
+ try:
+ score = float(row["score"]) # pyright: ignore[reportUnknownArgumentType,reportArgumentType]
+ except (ValueError, TypeError):
+ raise TypeError(f"Score value '{row['score']}' cannot be converted to float")
+ explanation = (
+ str(row["explanation"]).strip() # pyright: ignore[reportUnknownArgumentType]
+ if "explanation" in row and bool(row["explanation"]) # pyright: ignore[reportUnknownArgumentType]
+ else None
+ )
+ metadata = cast(
+ dict[str, Any],
+ dict(row["metadata"]) if "metadata" in row and bool(row["metadata"]) else None, # pyright: ignore[reportUnknownArgumentType]
+ )
+ identifier = (
+ str(row["identifier"]) if "identifier" in row and bool(row["identifier"]) else None # pyright: ignore[reportUnknownArgumentType]
+ )
+
+ annotation = _get_span_annotation(
+ span_id=span_id,
+ annotation_name=row_name, # pyright: ignore[reportArgumentType]
+ annotator_kind=row_annotator_kind, # pyright: ignore[reportArgumentType]
+ label=label,
+ score=score,
+ explanation=explanation,
+ metadata=metadata, # pyright: ignore[reportArgumentType]
+ identifier=identifier,
+ )
+
+ span_annotations.append(annotation) # pyright: ignore[reportUnknownMemberType]
+
+ # Yield chunk when we reach chunk_size
+ if len(span_annotations) >= chunk_size: # pyright: ignore[reportUnknownArgumentType]
+ yield span_annotations
+ span_annotations = []
+
+ except Exception as e:
+ raise ValueError(f"Error processing row {idx}: {str(e)}")
+
+ # Yield any remaining annotations
+ if span_annotations:
+ yield span_annotations
diff --git a/packages/phoenix-client/src/phoenix/client/resources/projects/__init__.py b/packages/phoenix-client/src/phoenix/client/resources/projects/__init__.py
index ddf7283316..c0e368db2e 100644
--- a/packages/phoenix-client/src/phoenix/client/resources/projects/__init__.py
+++ b/packages/phoenix-client/src/phoenix/client/resources/projects/__init__.py
@@ -490,16 +490,3 @@ async def delete(
url = f"v1/projects/{encode_path_param(project_identifier)}"
response = await self._client.delete(url)
response.raise_for_status()
-
-
-def _encode_project_name(name: str) -> str:
- """
- Encode a project name using URL-safe hex encoding.
-
- Args:
- name: The project name to encode
-
- Returns:
- The hex-encoded project name
- """
- return name.encode().hex()
diff --git a/packages/phoenix-client/src/phoenix/client/resources/spans/__init__.py b/packages/phoenix-client/src/phoenix/client/resources/spans/__init__.py
index 2177b69259..a02e6a37e8 100644
--- a/packages/phoenix-client/src/phoenix/client/resources/spans/__init__.py
+++ b/packages/phoenix-client/src/phoenix/client/resources/spans/__init__.py
@@ -1,13 +1,14 @@
import logging
from datetime import datetime, timezone, tzinfo
from io import StringIO
-from typing import TYPE_CHECKING, Optional, cast
+from typing import TYPE_CHECKING, Iterable, Optional, Sequence, Union, cast
import httpx
if TYPE_CHECKING:
import pandas as pd
+from phoenix.client.__generated__ import v1
from phoenix.client.types.spans import (
SpanQuery,
)
@@ -16,6 +17,7 @@
DEFAULT_TIMEOUT_IN_SECONDS = 5
_LOCAL_TIMEZONE = datetime.now(timezone.utc).astimezone().tzinfo
+_MAX_SPAN_IDS_PER_REQUEST = 100
class Spans:
@@ -113,6 +115,154 @@ def get_spans_dataframe(
"Install it with 'pip install pandas'"
)
+ def get_span_annotations_dataframe(
+ self,
+ *,
+ spans_dataframe: Optional["pd.DataFrame"] = None,
+ span_ids: Optional[Iterable[str]] = None,
+ project: str = "default",
+ limit: int = 1000,
+ timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
+ ) -> "pd.DataFrame":
+ """
+ Fetches span annotations and returns them as a pandas DataFrame.
+
+ Exactly one of *spans_dataframe* or *span_ids* should be provided.
+
+ Args:
+ spans_dataframe: A DataFrame (typically returned by `get_spans_dataframe`) with a
+ `context.span_id` or `span_id` column.
+ span_ids: An iterable of span IDs.
+ project: The project identifier (name or ID) used in the API path.
+ limit: Maximum number of annotations returned per request page.
+ timeout: Optional request timeout in seconds.
+
+ Returns:
+ A DataFrame where each row corresponds to a single span annotation.
+
+ Raises:
+ ValueError: If neither or both of *spans_dataframe* and *span_ids* are provided, or if
+ the `context.span_id` or `span_id` column is missing from *spans_dataframe*.
+ ImportError: If pandas is not installed.
+ httpx.HTTPStatusError: If the API returns an error response.
+ """
+ try:
+ import pandas as pd
+ except ImportError: # pragma: no cover
+ raise ImportError(
+ "pandas is required to use get_span_annotations_dataframe. "
+ "Install it with 'pip install pandas'"
+ )
+
+ # Validate input parameters
+ if (spans_dataframe is None and span_ids is None) or (
+ spans_dataframe is not None and span_ids is not None
+ ):
+ raise ValueError("Provide exactly one of 'spans_dataframe' or 'span_ids'.")
+
+ if spans_dataframe is not None:
+ span_ids_raw: list[str] = cast(
+ list[str], spans_dataframe["context.span_id"].dropna().tolist()
+ )
+ span_ids_list = list({*span_ids_raw})
+ else:
+ assert span_ids is not None
+ span_ids_list = list({*span_ids})
+
+ if not span_ids_list:
+ return pd.DataFrame()
+
+ annotations: list[v1.SpanAnnotation] = []
+ path = f"v1/projects/{project}/span_annotations"
+
+ for i in range(0, len(span_ids_list), _MAX_SPAN_IDS_PER_REQUEST):
+ batch_ids = span_ids_list[i : i + _MAX_SPAN_IDS_PER_REQUEST]
+ cursor: Optional[str] = None
+ while True:
+ params: dict[str, Union[int, str, Sequence[str]]] = {
+ "span_ids": batch_ids,
+ "limit": limit,
+ }
+ if cursor:
+ params["cursor"] = cursor
+
+ response = self._client.get(
+ url=path,
+ params=params,
+ headers={"accept": "application/json"},
+ timeout=timeout,
+ )
+ response.raise_for_status()
+ payload = response.json()
+ payload = cast(v1.SpanAnnotationsResponseBody, payload)
+ batch = cast(list[v1.SpanAnnotation], payload.get("data", []))
+ annotations.extend(batch)
+ cursor = payload.get("next_cursor")
+ if not cursor:
+ break # finished paginating this batch
+
+ df = pd.DataFrame(annotations)
+ df.set_index("span_id", inplace=True) # type: ignore[unused-ignore]
+ return df
+
+ def get_span_annotations(
+ self,
+ *,
+ span_ids: Iterable[str],
+ project: str,
+ limit: int = 1000,
+ timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
+ ) -> list[v1.SpanAnnotation]:
+ """
+ Fetches span annotations and returns them as a list of SpanAnnotation objects.
+
+ Args:
+ span_ids: An iterable of span IDs.
+ project: The project identifier (name or ID) used in the API path.
+ limit: Maximum number of annotations returned per request page.
+ timeout: Optional request timeout in seconds.
+
+ Returns:
+ A list of SpanAnnotation objects.
+
+ Raises:
+ httpx.HTTPStatusError: If the API returns an error response.
+ """
+ span_ids_list = list({*span_ids})
+
+ if not span_ids_list:
+ return []
+
+ annotations: list[v1.SpanAnnotation] = []
+ path = f"v1/projects/{project}/span_annotations"
+
+ for i in range(0, len(span_ids_list), _MAX_SPAN_IDS_PER_REQUEST):
+ batch_ids = span_ids_list[i : i + _MAX_SPAN_IDS_PER_REQUEST]
+ cursor: Optional[str] = None
+ while True:
+ params: dict[str, Union[int, str, Sequence[str]]] = {
+ "span_ids": batch_ids,
+ "limit": limit,
+ }
+ if cursor:
+ params["cursor"] = cursor
+ response = self._client.get(
+ url=path,
+ params=params,
+ headers={"accept": "application/json"},
+ timeout=timeout,
+ )
+ response.raise_for_status()
+ payload = response.json()
+ payload = cast(v1.SpanAnnotationsResponseBody, payload)
+ batch = cast(list[v1.SpanAnnotation], payload.get("data", []))
+ annotations.extend(batch)
+ cursor = payload.get("next_cursor")
+ if not cursor:
+ break
+
+ return annotations
+
class AsyncSpans:
"""
@@ -209,6 +359,152 @@ async def get_spans_dataframe(
"Install it with 'pip install pandas'"
)
+ async def get_span_annotations_dataframe(
+ self,
+ *,
+ spans_dataframe: Optional["pd.DataFrame"] = None,
+ span_ids: Optional[Iterable[str]] = None,
+ project: str,
+ limit: int = 1000,
+ timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
+ ) -> "pd.DataFrame":
+ """
+ Fetches span annotations and returns them as a pandas DataFrame.
+
+ Exactly one of *spans_dataframe* or *span_ids* should be provided.
+
+ Args:
+ spans_dataframe: A DataFrame (typically returned by `get_spans_dataframe`) with a
+ `context.span_id` or `span_id` column.
+ span_ids: An iterable of span IDs.
+ project: The project identifier (name or ID) used in the API path.
+ limit: Maximum number of annotations returned per request page.
+ timeout: Optional request timeout in seconds.
+
+ Returns:
+ A DataFrame where each row corresponds to a single span annotation.
+
+ Raises:
+ ValueError: If neither or both of *spans_dataframe* and *span_ids* are provided, or if
+ the `context.span_id` or `span_id` column is missing from *spans_dataframe*.
+ ImportError: If pandas is not installed.
+ httpx.HTTPStatusError: If the API returns an error response.
+ """
+ try:
+ import pandas as pd
+ except ImportError: # pragma: no cover
+ raise ImportError(
+ "pandas is required to use get_span_annotations_dataframe. "
+ "Install it with 'pip install pandas'"
+ )
+
+ if (spans_dataframe is None and span_ids is None) or (
+ spans_dataframe is not None and span_ids is not None
+ ):
+ raise ValueError("Provide exactly one of 'spans_dataframe' or 'span_ids'.")
+
+ if spans_dataframe is not None:
+ span_ids_raw: list[str] = cast(
+ list[str], spans_dataframe["context.span_id"].dropna().tolist()
+ )
+ span_ids_list = list({*span_ids_raw})
+ else:
+ assert span_ids is not None
+ span_ids_list = list({*span_ids})
+
+ if not span_ids_list:
+ return pd.DataFrame()
+
+ annotations: list[v1.SpanAnnotation] = []
+ path = f"v1/projects/{project}/span_annotations"
+
+ for i in range(0, len(span_ids_list), _MAX_SPAN_IDS_PER_REQUEST):
+ batch_ids = span_ids_list[i : i + _MAX_SPAN_IDS_PER_REQUEST]
+ cursor: Optional[str] = None
+ while True:
+ params: dict[str, Union[int, str, Sequence[str]]] = {
+ "span_ids": batch_ids,
+ "limit": limit,
+ }
+ if cursor:
+ params["cursor"] = cursor
+ response = await self._client.get(
+ url=path,
+ params=params,
+ headers={"accept": "application/json"},
+ timeout=timeout,
+ )
+ response.raise_for_status()
+ payload = response.json()
+ payload = cast(v1.SpanAnnotationsResponseBody, payload)
+ batch = cast(list[v1.SpanAnnotation], payload.get("data", []))
+ annotations.extend(batch)
+ cursor = payload.get("next_cursor")
+ if not cursor:
+ break
+
+ df = pd.DataFrame(annotations)
+ df.set_index("span_id", inplace=True) # type: ignore[unused-ignore]
+ return df
+
+ async def get_span_annotations(
+ self,
+ *,
+ span_ids: Iterable[str],
+ project: str,
+ limit: int = 1000,
+ timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
+ ) -> list[v1.SpanAnnotation]:
+ """
+ Fetches span annotations and returns them as a list of SpanAnnotation objects.
+
+ Args:
+ span_ids: An iterable of span IDs.
+ project: The project identifier (name or ID) used in the API path.
+ limit: Maximum number of annotations returned per request page.
+ timeout: Optional request timeout in seconds.
+
+ Returns:
+ A list of SpanAnnotation objects.
+
+ Raises:
+ httpx.HTTPStatusError: If the API returns an error response.
+ """
+ span_ids_list = list({*span_ids}) # remove duplicates while preserving type
+
+ if not span_ids_list:
+ return []
+
+ annotations: list[v1.SpanAnnotation] = []
+ path = f"v1/projects/{project}/span_annotations"
+
+ for i in range(0, len(span_ids_list), _MAX_SPAN_IDS_PER_REQUEST):
+ batch_ids = span_ids_list[i : i + _MAX_SPAN_IDS_PER_REQUEST]
+ cursor: Optional[str] = None
+ while True:
+ params: dict[str, Union[int, str, Sequence[str]]] = {
+ "span_ids": batch_ids,
+ "limit": limit,
+ }
+ if cursor:
+ params["cursor"] = cursor
+ response = await self._client.get(
+ url=path,
+ params=params,
+ headers={"accept": "application/json"},
+ timeout=timeout,
+ )
+ response.raise_for_status()
+ payload = response.json()
+ payload = cast(v1.SpanAnnotationsResponseBody, payload)
+ batch = cast(list[v1.SpanAnnotation], payload.get("data", []))
+ annotations.extend(batch)
+ cursor = payload.get("next_cursor")
+ if not cursor:
+ break
+
+ return annotations
+
def _to_iso_format(value: Optional[datetime]) -> Optional[str]:
return value.isoformat() if value else None
diff --git a/tests/integration/projects/__init__.py b/packages/phoenix-client/tests/client/__init__.py
similarity index 100%
rename from tests/integration/projects/__init__.py
rename to packages/phoenix-client/tests/client/__init__.py
diff --git a/tests/integration/prompts/__init__.py b/packages/phoenix-client/tests/client/resources/__init__.py
similarity index 100%
rename from tests/integration/prompts/__init__.py
rename to packages/phoenix-client/tests/client/resources/__init__.py
diff --git a/packages/phoenix-client/tests/client/resources/annotations/__init__.py b/packages/phoenix-client/tests/client/resources/annotations/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/phoenix-client/tests/client/resources/annotations/test_annotations.py b/packages/phoenix-client/tests/client/resources/annotations/test_annotations.py
new file mode 100644
index 0000000000..bb1fe6a076
--- /dev/null
+++ b/packages/phoenix-client/tests/client/resources/annotations/test_annotations.py
@@ -0,0 +1,385 @@
+# pyright: reportPrivateUsage=false
+
+import pandas as pd
+import pytest
+
+from phoenix.client.resources.annotations import _chunk_dataframe, _validate_dataframe
+
+
+class TestDataFrameValidation:
+ """Test suite for the _validate_dataframe helper function."""
+
+ def test_empty(self) -> None:
+ """Test validation of empty DataFrame."""
+ df = pd.DataFrame()
+ with pytest.raises(ValueError, match="DataFrame cannot be empty"):
+ _validate_dataframe(dataframe=df)
+
+ def test_not_pandas(self) -> None:
+ """Test validation with non-pandas input."""
+ with pytest.raises(TypeError, match="Expected pandas DataFrame"):
+ _validate_dataframe(dataframe="not a dataframe") # type: ignore[arg-type] # pyright: ignore[reportArgumentType]
+
+ def test_missing_required_columns(self) -> None:
+ """Test validation with missing required columns."""
+ df = pd.DataFrame({"label": ["positive"]})
+ with pytest.raises(
+ ValueError, match="DataFrame must contain either 'name' or 'annotation_name' column"
+ ):
+ _validate_dataframe(dataframe=df)
+
+ def test_both_name_columns(self) -> None:
+ """Test validation when both name and annotation_name columns are present."""
+ df = pd.DataFrame(
+ {"name": ["sentiment"], "annotation_name": ["sentiment"], "annotator_kind": ["HUMAN"]}
+ )
+ with pytest.raises(
+ ValueError, match="DataFrame cannot have both 'name' and 'annotation_name' columns"
+ ):
+ _validate_dataframe(dataframe=df)
+
+ def test_invalid_annotator_kind(self) -> None:
+ """Test validation of invalid annotator_kind values."""
+ df = pd.DataFrame(
+ {
+ "name": ["sentiment"],
+ "annotator_kind": ["INVALID"],
+ "span_id": ["span1"],
+ }
+ )
+ with pytest.raises(ValueError, match="Invalid annotator_kind values found in DataFrame"):
+ _validate_dataframe(dataframe=df)
+
+ def test_valid_with_name(self) -> None:
+ """Test validation with valid DataFrame using name column."""
+ df = pd.DataFrame(
+ {"name": ["sentiment"], "annotator_kind": ["HUMAN"], "span_id": ["span1"]}
+ )
+ _validate_dataframe(dataframe=df) # Should not raise
+
+ def test_valid_with_annotation_name(self) -> None:
+ """Test validation with valid DataFrame using annotation_name column."""
+ df = pd.DataFrame(
+ {"annotation_name": ["sentiment"], "annotator_kind": ["HUMAN"], "span_id": ["span1"]}
+ )
+ _validate_dataframe(dataframe=df) # Should not raise
+
+ def test_valid_with_global_name(self) -> None:
+ """Test validation with valid DataFrame and global annotation_name."""
+ df = pd.DataFrame({"annotator_kind": ["HUMAN"], "span_id": ["span1"]})
+ _validate_dataframe(dataframe=df, annotation_name="sentiment") # Should not raise
+
+ def test_valid_with_global_annotator_kind(self) -> None:
+ """Test validation with valid DataFrame and global annotator_kind."""
+ df = pd.DataFrame({"name": ["sentiment"], "span_id": ["span1"]})
+ _validate_dataframe(dataframe=df, annotator_kind="HUMAN") # Should not raise
+
+ def test_invalid_global_annotator_kind(self) -> None:
+ """Test validation with invalid global annotator_kind."""
+ df = pd.DataFrame({"name": ["sentiment"], "span_id": ["span1"]})
+ with pytest.raises(ValueError, match="Invalid annotator_kind value"):
+ _validate_dataframe(
+ dataframe=df,
+ annotator_kind="INVALID", # type: ignore
+ )
+
+ def test_invalid_global_name(self) -> None:
+ """Test validation with invalid global annotation_name."""
+ df = pd.DataFrame({"annotator_kind": ["HUMAN"], "span_id": ["span1"]})
+ with pytest.raises(ValueError, match="Annotation name cannot be empty or whitespace"):
+ _validate_dataframe(dataframe=df, annotation_name="") # Empty string
+
+ def test_missing_span_id(self) -> None:
+ """Test validation with missing span_id."""
+ df = pd.DataFrame({"name": ["sentiment"], "annotator_kind": ["HUMAN"]})
+ with pytest.raises(
+ ValueError,
+ match="DataFrame must have either a 'span_id' column or a string-based index",
+ ):
+ _validate_dataframe(dataframe=df)
+
+ def test_valid_with_index(self) -> None:
+ """Test validation with valid DataFrame using index as span_id."""
+ df = pd.DataFrame({"name": ["sentiment"], "annotator_kind": ["HUMAN"]}, index=["span1"])
+ _validate_dataframe(dataframe=df) # Should not raise
+
+ def test_invalid_name_values(self) -> None:
+ """Test validation with invalid name values."""
+ df = pd.DataFrame(
+ {
+ "name": ["", " "], # Empty strings
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["span1", "span2"],
+ }
+ )
+ with pytest.raises(ValueError, match="name values must be non-empty strings"):
+ _validate_dataframe(dataframe=df)
+
+ def test_invalid_annotation_name_values(self) -> None:
+ """Test validation with invalid annotation_name values."""
+ df = pd.DataFrame(
+ {
+ "annotation_name": ["", " "], # Empty strings
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["span1", "span2"],
+ }
+ )
+ with pytest.raises(ValueError, match="annotation_name values must be non-empty strings"):
+ _validate_dataframe(dataframe=df)
+
+ def test_invalid_span_id_values(self) -> None:
+ """Test validation with invalid span_id values."""
+ df = pd.DataFrame(
+ {
+ "name": ["sentiment", "sentiment"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["", " "], # Empty strings
+ }
+ )
+ with pytest.raises(ValueError, match="span_id values must be non-empty strings"):
+ _validate_dataframe(dataframe=df)
+
+ def test_invalid_index_values(self) -> None:
+ """Test validation with invalid index values when using index as span_id."""
+ df = pd.DataFrame(
+ {
+ "name": ["sentiment", "sentiment"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ },
+ index=["", " "], # Empty strings
+ )
+ with pytest.raises(
+ ValueError, match="Index values must be non-empty strings when used as span_id"
+ ):
+ _validate_dataframe(dataframe=df)
+
+ def test_none_span_id_values(self) -> None:
+ """Test validation with None values in span_id column."""
+ df = pd.DataFrame(
+ {
+ "name": ["sentiment", "sentiment"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": [None, "valid_id"], # None value
+ }
+ )
+ with pytest.raises(ValueError, match="span_id values cannot be None"):
+ _validate_dataframe(dataframe=df)
+
+ def test_mixed_valid_invalid_values(self) -> None:
+ """Test validation with mixed valid and invalid values in columns."""
+ df = pd.DataFrame(
+ {
+ "name": ["valid", ""], # Mixed valid/invalid
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["valid_id", " "], # Mixed valid/invalid
+ }
+ )
+ with pytest.raises(ValueError, match="name values must be non-empty strings"):
+ _validate_dataframe(dataframe=df)
+
+ def test_non_string_span_id(self) -> None:
+ """Test validation with non-string values in span_id column."""
+ df = pd.DataFrame(
+ {
+ "name": ["sentiment", "sentiment"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": [123, "valid_id"], # Non-string value
+ }
+ )
+ with pytest.raises(ValueError, match="span_id values must be strings"):
+ _validate_dataframe(dataframe=df)
+
+ def test_whitespace_only_index(self) -> None:
+ """Test validation with whitespace-only values in index."""
+ df = pd.DataFrame(
+ {
+ "name": ["sentiment", "sentiment"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ },
+ index=[" ", "\t"], # Whitespace-only values
+ )
+ with pytest.raises(
+ ValueError, match="Index values must be non-empty strings when used as span_id"
+ ):
+ _validate_dataframe(dataframe=df)
+
+
+class TestChunkDataFrame:
+ """Test suite for the _chunk_dataframe helper function."""
+
+ def test_empty_dataframe(self) -> None:
+ """Test chunking an empty DataFrame."""
+ df = pd.DataFrame()
+ with pytest.raises(ValueError, match="DataFrame cannot be empty"):
+ list(_chunk_dataframe(dataframe=df))
+
+ def test_single_row(self) -> None:
+ """Test chunking a DataFrame with a single row."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1"],
+ "annotator_kind": ["HUMAN"],
+ "span_id": ["id1"],
+ "label": ["label1"],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 1
+ assert len(chunks[0]) == 1
+
+ def test_default_chunk_size(self) -> None:
+ """Test that the default chunk size is used when not specified."""
+ # Create a DataFrame with 101 rows to test default chunk size of 100
+ df = pd.DataFrame(
+ {
+ "name": [f"test{i}" for i in range(101)],
+ "annotator_kind": ["HUMAN"] * 101,
+ "span_id": [f"id{i}" for i in range(101)],
+ "label": [f"label{i}" for i in range(101)],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 2
+ assert len(chunks[0]) == 100 # First chunk should be full
+ assert len(chunks[1]) == 1 # Second chunk should have remaining row
+
+ def test_exact_chunk_size(self) -> None:
+ """Test chunking a DataFrame that is exactly the chunk size."""
+ # Create a DataFrame with 100 rows (exactly the default chunk size)
+ df = pd.DataFrame(
+ {
+ "name": [f"test{i}" for i in range(100)],
+ "annotator_kind": ["HUMAN"] * 100,
+ "span_id": [f"id{i}" for i in range(100)],
+ "label": [f"label{i}" for i in range(100)],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 1
+ assert len(chunks[0]) == 100
+
+ def test_custom_chunk_size(self) -> None:
+ """Test chunking with a custom chunk size."""
+ # Create a DataFrame with 10 rows and use chunk size of 3
+ df = pd.DataFrame(
+ {
+ "name": [f"test{i}" for i in range(10)],
+ "annotator_kind": ["HUMAN"] * 10,
+ "span_id": [f"id{i}" for i in range(10)],
+ "label": [f"label{i}" for i in range(10)],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df, chunk_size=3))
+ assert len(chunks) == 4 # 3 full chunks + 1 partial chunk
+ assert len(chunks[0]) == 3
+ assert len(chunks[1]) == 3
+ assert len(chunks[2]) == 3
+ assert len(chunks[3]) == 1
+
+ def test_global_annotation_name(self) -> None:
+ """Test chunking with global annotation_name."""
+ df = pd.DataFrame(
+ {
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["id1", "id2"],
+ "label": ["label1", "label2"],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df, annotation_name="global_name"))
+ assert len(chunks) == 1
+ assert all(anno["name"] == "global_name" for anno in chunks[0])
+
+ def test_global_annotator_kind(self) -> None:
+ """Test chunking with global annotator_kind."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1", "test2"],
+ "span_id": ["id1", "id2"],
+ "label": ["label1", "label2"],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df, annotator_kind="HUMAN"))
+ assert len(chunks) == 1
+ assert all(anno["annotator_kind"] == "HUMAN" for anno in chunks[0])
+
+ def test_optional_fields(self) -> None:
+ """Test chunking with optional fields (label, score, explanation)."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1", "test2"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["id1", "id2"],
+ "label": ["label1", "label2"],
+ "score": [0.5, 0.8],
+ "explanation": ["expl1", "expl2"],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 1
+ assert chunks[0][0]["result"]["label"] == "label1" # pyright: ignore[reportTypedDictNotRequiredAccess]
+ assert chunks[0][0]["result"]["score"] == 0.5 # pyright: ignore[reportTypedDictNotRequiredAccess]
+ assert chunks[0][0]["result"]["explanation"] == "expl1" # pyright: ignore[reportTypedDictNotRequiredAccess]
+
+ def test_index_as_span_id(self) -> None:
+ """Test chunking when using index as span_id."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1", "test2"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "label": ["label1", "label2"],
+ },
+ index=["id1", "id2"],
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 1
+ assert chunks[0][0]["span_id"] == "id1"
+ assert chunks[0][1]["span_id"] == "id2"
+
+ def test_metadata(self) -> None:
+ """Test chunking with metadata field."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1", "test2"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["id1", "id2"],
+ "label": ["label1", "label2"],
+ "metadata": [{"key1": "value1"}, {"key2": "value2"}],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 1
+ assert chunks[0][0]["metadata"] == {"key1": "value1"} # pyright: ignore[reportTypedDictNotRequiredAccess]
+ assert chunks[0][1]["metadata"] == {"key2": "value2"} # pyright: ignore[reportTypedDictNotRequiredAccess]
+
+ def test_identifier(self) -> None:
+ """Test chunking with identifier field."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1", "test2"],
+ "annotator_kind": ["HUMAN", "HUMAN"],
+ "span_id": ["id1", "id2"],
+ "label": ["label1", "label2"],
+ "identifier": ["id1", "id2"],
+ }
+ )
+ chunks = list(_chunk_dataframe(dataframe=df))
+ assert len(chunks) == 1
+ assert chunks[0][0]["identifier"] == "id1" # pyright: ignore[reportTypedDictNotRequiredAccess]
+ assert chunks[0][1]["identifier"] == "id2" # pyright: ignore[reportTypedDictNotRequiredAccess]
+
+ def test_invalid_score_type(self) -> None:
+ """Test chunking with invalid score type."""
+ df = pd.DataFrame(
+ {
+ "name": ["test1"],
+ "annotator_kind": ["HUMAN"],
+ "span_id": ["id1"],
+ "label": ["label1"],
+ "score": ["not_a_number"],
+ }
+ )
+ with pytest.raises(
+ ValueError,
+ match="Error processing row 0: Score value 'not_a_number' cannot be converted to float",
+ ):
+ list(_chunk_dataframe(dataframe=df))
diff --git a/requirements/unit-tests.txt b/requirements/unit-tests.txt
index 287244747f..fca6b806fb 100644
--- a/requirements/unit-tests.txt
+++ b/requirements/unit-tests.txt
@@ -26,3 +26,4 @@ typing-extensions
vcrpy
aiohttp>=3.0; python_version < "3.10"
urllib3<2.0; python_version < "3.10"
+freezegun
diff --git a/schemas/openapi.json b/schemas/openapi.json
index 5c9653b511..8ea9c7a1ae 100644
--- a/schemas/openapi.json
+++ b/schemas/openapi.json
@@ -6,6 +6,411 @@
"version": "1.0"
},
"paths": {
+ "/v1/annotation_configs": {
+ "get": {
+ "tags": [
+ "annotation_configs"
+ ],
+ "summary": "List annotation configurations",
+ "description": "Retrieve a paginated list of all annotation configurations in the system.",
+ "operationId": "list_annotation_configs_v1_annotation_configs_get",
+ "parameters": [
+ {
+ "name": "cursor",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Cursor for pagination (base64-encoded annotation config ID)",
+ "title": "Cursor"
+ },
+ "description": "Cursor for pagination (base64-encoded annotation config ID)"
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "exclusiveMinimum": 0,
+ "description": "Maximum number of configs to return",
+ "default": 100,
+ "title": "Limit"
+ },
+ "description": "Maximum number of configs to return"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "A list of annotation configurations with pagination information",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/GetAnnotationConfigsResponseBody"
+ }
+ }
+ }
+ },
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ },
+ "post": {
+ "tags": [
+ "annotation_configs"
+ ],
+ "summary": "Create an annotation configuration",
+ "operationId": "create_annotation_config_v1_annotation_configs_post",
+ "requestBody": {
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateAnnotationConfigData"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateAnnotationConfigResponseBody"
+ }
+ }
+ }
+ },
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v1/annotation_configs/{config_identifier}": {
+ "get": {
+ "tags": [
+ "annotation_configs"
+ ],
+ "summary": "Get an annotation configuration by ID or name",
+ "operationId": "get_annotation_config_by_name_or_id_v1_annotation_configs__config_identifier__get",
+ "parameters": [
+ {
+ "name": "config_identifier",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "description": "ID or name of the annotation configuration",
+ "title": "Config Identifier"
+ },
+ "description": "ID or name of the annotation configuration"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/GetAnnotationConfigResponseBody"
+ }
+ }
+ }
+ },
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v1/annotation_configs/{config_id}": {
+ "put": {
+ "tags": [
+ "annotation_configs"
+ ],
+ "summary": "Update an annotation configuration",
+ "operationId": "update_annotation_config_v1_annotation_configs__config_id__put",
+ "parameters": [
+ {
+ "name": "config_id",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "description": "ID of the annotation configuration",
+ "title": "Config Id"
+ },
+ "description": "ID of the annotation configuration"
+ }
+ ],
+ "requestBody": {
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/CreateAnnotationConfigData"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/UpdateAnnotationConfigResponseBody"
+ }
+ }
+ }
+ },
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ },
+ "delete": {
+ "tags": [
+ "annotation_configs"
+ ],
+ "summary": "Delete an annotation configuration",
+ "operationId": "delete_annotation_config_v1_annotation_configs__config_id__delete",
+ "parameters": [
+ {
+ "name": "config_id",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "description": "ID of the annotation configuration",
+ "title": "Config Id"
+ },
+ "description": "ID of the annotation configuration"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeleteAnnotationConfigResponseBody"
+ }
+ }
+ }
+ },
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v1/projects/{project_identifier}/span_annotations": {
+ "get": {
+ "tags": [
+ "annotations"
+ ],
+ "summary": "Get span annotations for a list of span_ids.",
+ "operationId": "listSpanAnnotationsBySpanIds",
+ "parameters": [
+ {
+ "name": "project_identifier",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "description": "The project identifier: either project ID or project name. If using a project name as the identifier, it cannot contain slash (/), question mark (?), or pound sign (#) characters.",
+ "title": "Project Identifier"
+ },
+ "description": "The project identifier: either project ID or project name. If using a project name as the identifier, it cannot contain slash (/), question mark (?), or pound sign (#) characters."
+ },
+ {
+ "name": "span_ids",
+ "in": "query",
+ "required": true,
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "minItems": 1,
+ "description": "One or more span id to fetch annotations for",
+ "title": "Span Ids"
+ },
+ "description": "One or more span id to fetch annotations for"
+ },
+ {
+ "name": "cursor",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "A cursor for pagination",
+ "title": "Cursor"
+ },
+ "description": "A cursor for pagination"
+ },
+ {
+ "name": "limit",
+ "in": "query",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "maximum": 10000,
+ "exclusiveMinimum": 0,
+ "description": "The maximum number of annotations to return in a single request",
+ "default": 10,
+ "title": "Limit"
+ },
+ "description": "The maximum number of annotations to return in a single request"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/SpanAnnotationsResponseBody"
+ }
+ }
+ }
+ },
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "404": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Project or spans not found"
+ },
+ "422": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Invalid parameters"
+ }
+ }
+ }
+ },
"/v1/datasets": {
"get": {
"tags": [
@@ -1078,7 +1483,7 @@
"tags": [
"spans"
],
- "summary": "Create or update span annotations",
+ "summary": "Create span annotations",
"operationId": "annotateSpans",
"parameters": [
{
@@ -2197,73 +2602,356 @@
"204": {
"description": "No content returned on successful deletion"
},
- "403": {
- "content": {
- "text/plain": {
- "schema": {
- "type": "string"
- }
+ "403": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Forbidden"
+ },
+ "404": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Not Found"
+ },
+ "422": {
+ "content": {
+ "text/plain": {
+ "schema": {
+ "type": "string"
+ }
+ }
+ },
+ "description": "Unprocessable Entity"
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "AnnotateSpansRequestBody": {
+ "properties": {
+ "data": {
+ "items": {
+ "$ref": "#/components/schemas/SpanAnnotationData"
+ },
+ "type": "array",
+ "title": "Data"
+ }
+ },
+ "type": "object",
+ "required": [
+ "data"
+ ],
+ "title": "AnnotateSpansRequestBody"
+ },
+ "AnnotateSpansResponseBody": {
+ "properties": {
+ "data": {
+ "items": {
+ "$ref": "#/components/schemas/InsertedSpanAnnotation"
+ },
+ "type": "array",
+ "title": "Data"
+ }
+ },
+ "type": "object",
+ "required": [
+ "data"
+ ],
+ "title": "AnnotateSpansResponseBody"
+ },
+ "CategoricalAnnotationConfig": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "title": "Name"
+ },
+ "type": {
+ "type": "string",
+ "const": "CATEGORICAL",
+ "title": "Type"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Description"
+ },
+ "optimization_direction": {
+ "$ref": "#/components/schemas/OptimizationDirection"
+ },
+ "values": {
+ "items": {
+ "$ref": "#/components/schemas/CategoricalAnnotationValue"
+ },
+ "type": "array",
+ "title": "Values"
+ },
+ "id": {
+ "type": "string",
+ "title": "Id"
+ }
+ },
+ "type": "object",
+ "required": [
+ "name",
+ "type",
+ "optimization_direction",
+ "values",
+ "id"
+ ],
+ "title": "CategoricalAnnotationConfig"
+ },
+ "CategoricalAnnotationConfigData": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "title": "Name"
+ },
+ "type": {
+ "type": "string",
+ "const": "CATEGORICAL",
+ "title": "Type"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Description"
+ },
+ "optimization_direction": {
+ "$ref": "#/components/schemas/OptimizationDirection"
+ },
+ "values": {
+ "items": {
+ "$ref": "#/components/schemas/CategoricalAnnotationValue"
+ },
+ "type": "array",
+ "title": "Values"
+ }
+ },
+ "type": "object",
+ "required": [
+ "name",
+ "type",
+ "optimization_direction",
+ "values"
+ ],
+ "title": "CategoricalAnnotationConfigData"
+ },
+ "CategoricalAnnotationValue": {
+ "properties": {
+ "label": {
+ "type": "string",
+ "title": "Label"
+ },
+ "score": {
+ "anyOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Score"
+ }
+ },
+ "type": "object",
+ "required": [
+ "label"
+ ],
+ "title": "CategoricalAnnotationValue"
+ },
+ "ContinuousAnnotationConfig": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "title": "Name"
+ },
+ "type": {
+ "type": "string",
+ "const": "CONTINUOUS",
+ "title": "Type"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Description"
+ },
+ "optimization_direction": {
+ "$ref": "#/components/schemas/OptimizationDirection"
+ },
+ "lower_bound": {
+ "anyOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Lower Bound"
+ },
+ "upper_bound": {
+ "anyOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Upper Bound"
+ },
+ "id": {
+ "type": "string",
+ "title": "Id"
+ }
+ },
+ "type": "object",
+ "required": [
+ "name",
+ "type",
+ "optimization_direction",
+ "id"
+ ],
+ "title": "ContinuousAnnotationConfig"
+ },
+ "ContinuousAnnotationConfigData": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "title": "Name"
+ },
+ "type": {
+ "type": "string",
+ "const": "CONTINUOUS",
+ "title": "Type"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
}
- },
- "description": "Forbidden"
+ ],
+ "title": "Description"
},
- "404": {
- "content": {
- "text/plain": {
- "schema": {
- "type": "string"
- }
+ "optimization_direction": {
+ "$ref": "#/components/schemas/OptimizationDirection"
+ },
+ "lower_bound": {
+ "anyOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
}
- },
- "description": "Not Found"
+ ],
+ "title": "Lower Bound"
},
- "422": {
- "content": {
- "text/plain": {
- "schema": {
- "type": "string"
- }
+ "upper_bound": {
+ "anyOf": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "null"
}
- },
- "description": "Unprocessable Entity"
- }
- }
- }
- }
- },
- "components": {
- "schemas": {
- "AnnotateSpansRequestBody": {
- "properties": {
- "data": {
- "items": {
- "$ref": "#/components/schemas/SpanAnnotation"
- },
- "type": "array",
- "title": "Data"
+ ],
+ "title": "Upper Bound"
}
},
"type": "object",
"required": [
- "data"
+ "name",
+ "type",
+ "optimization_direction"
],
- "title": "AnnotateSpansRequestBody"
+ "title": "ContinuousAnnotationConfigData"
},
- "AnnotateSpansResponseBody": {
+ "CreateAnnotationConfigData": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/CategoricalAnnotationConfigData"
+ },
+ {
+ "$ref": "#/components/schemas/ContinuousAnnotationConfigData"
+ },
+ {
+ "$ref": "#/components/schemas/FreeformAnnotationConfigData"
+ }
+ ],
+ "title": "CreateAnnotationConfigData",
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "CATEGORICAL": "#/components/schemas/CategoricalAnnotationConfigData",
+ "CONTINUOUS": "#/components/schemas/ContinuousAnnotationConfigData",
+ "FREEFORM": "#/components/schemas/FreeformAnnotationConfigData"
+ }
+ }
+ },
+ "CreateAnnotationConfigResponseBody": {
"properties": {
"data": {
- "items": {
- "$ref": "#/components/schemas/InsertedSpanAnnotation"
- },
- "type": "array",
- "title": "Data"
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/CategoricalAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/ContinuousAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ ],
+ "title": "Data",
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "CATEGORICAL": "#/components/schemas/CategoricalAnnotationConfig",
+ "CONTINUOUS": "#/components/schemas/ContinuousAnnotationConfig",
+ "FREEFORM": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ }
}
},
"type": "object",
"required": [
"data"
],
- "title": "AnnotateSpansResponseBody"
+ "title": "CreateAnnotationConfigResponseBody"
},
"CreateExperimentRequestBody": {
"properties": {
@@ -2569,45 +3257,237 @@
},
"type": "object",
"required": [
- "id",
+ "id",
+ "name",
+ "description",
+ "metadata",
+ "created_at",
+ "updated_at",
+ "example_count"
+ ],
+ "title": "DatasetWithExampleCount"
+ },
+ "DeleteAnnotationConfigResponseBody": {
+ "properties": {
+ "data": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/CategoricalAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/ContinuousAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ ],
+ "title": "Data",
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "CATEGORICAL": "#/components/schemas/CategoricalAnnotationConfig",
+ "CONTINUOUS": "#/components/schemas/ContinuousAnnotationConfig",
+ "FREEFORM": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ }
+ }
+ },
+ "type": "object",
+ "required": [
+ "data"
+ ],
+ "title": "DeleteAnnotationConfigResponseBody"
+ },
+ "Experiment": {
+ "properties": {
+ "id": {
+ "type": "string",
+ "title": "Id",
+ "description": "The ID of the experiment"
+ },
+ "dataset_id": {
+ "type": "string",
+ "title": "Dataset Id",
+ "description": "The ID of the dataset associated with the experiment"
+ },
+ "dataset_version_id": {
+ "type": "string",
+ "title": "Dataset Version Id",
+ "description": "The ID of the dataset version associated with the experiment"
+ },
+ "repetitions": {
+ "type": "integer",
+ "title": "Repetitions",
+ "description": "Number of times the experiment is repeated"
+ },
+ "metadata": {
+ "additionalProperties": true,
+ "type": "object",
+ "title": "Metadata",
+ "description": "Metadata of the experiment"
+ },
+ "project_name": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Project Name",
+ "description": "The name of the project associated with the experiment"
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time",
+ "title": "Created At",
+ "description": "The creation timestamp of the experiment"
+ },
+ "updated_at": {
+ "type": "string",
+ "format": "date-time",
+ "title": "Updated At",
+ "description": "The last update timestamp of the experiment"
+ }
+ },
+ "type": "object",
+ "required": [
+ "id",
+ "dataset_id",
+ "dataset_version_id",
+ "repetitions",
+ "metadata",
+ "project_name",
+ "created_at",
+ "updated_at"
+ ],
+ "title": "Experiment"
+ },
+ "FreeformAnnotationConfig": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "title": "Name"
+ },
+ "type": {
+ "type": "string",
+ "const": "FREEFORM",
+ "title": "Type"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Description"
+ },
+ "id": {
+ "type": "string",
+ "title": "Id"
+ }
+ },
+ "type": "object",
+ "required": [
+ "name",
+ "type",
+ "id"
+ ],
+ "title": "FreeformAnnotationConfig"
+ },
+ "FreeformAnnotationConfigData": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "title": "Name"
+ },
+ "type": {
+ "type": "string",
+ "const": "FREEFORM",
+ "title": "Type"
+ },
+ "description": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Description"
+ }
+ },
+ "type": "object",
+ "required": [
"name",
- "description",
- "metadata",
- "created_at",
- "updated_at",
- "example_count"
+ "type"
],
- "title": "DatasetWithExampleCount"
+ "title": "FreeformAnnotationConfigData"
},
- "Experiment": {
+ "GetAnnotationConfigResponseBody": {
"properties": {
- "id": {
- "type": "string",
- "title": "Id",
- "description": "The ID of the experiment"
- },
- "dataset_id": {
- "type": "string",
- "title": "Dataset Id",
- "description": "The ID of the dataset associated with the experiment"
- },
- "dataset_version_id": {
- "type": "string",
- "title": "Dataset Version Id",
- "description": "The ID of the dataset version associated with the experiment"
- },
- "repetitions": {
- "type": "integer",
- "title": "Repetitions",
- "description": "Number of times the experiment is repeated"
- },
- "metadata": {
- "additionalProperties": true,
- "type": "object",
- "title": "Metadata",
- "description": "Metadata of the experiment"
+ "data": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/CategoricalAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/ContinuousAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ ],
+ "title": "Data",
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "CATEGORICAL": "#/components/schemas/CategoricalAnnotationConfig",
+ "CONTINUOUS": "#/components/schemas/ContinuousAnnotationConfig",
+ "FREEFORM": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ }
+ }
+ },
+ "type": "object",
+ "required": [
+ "data"
+ ],
+ "title": "GetAnnotationConfigResponseBody"
+ },
+ "GetAnnotationConfigsResponseBody": {
+ "properties": {
+ "data": {
+ "items": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/CategoricalAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/ContinuousAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ ],
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "CATEGORICAL": "#/components/schemas/CategoricalAnnotationConfig",
+ "CONTINUOUS": "#/components/schemas/ContinuousAnnotationConfig",
+ "FREEFORM": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ }
+ },
+ "type": "array",
+ "title": "Data"
},
- "project_name": {
+ "next_cursor": {
"anyOf": [
{
"type": "string"
@@ -2616,34 +3496,15 @@
"type": "null"
}
],
- "title": "Project Name",
- "description": "The name of the project associated with the experiment"
- },
- "created_at": {
- "type": "string",
- "format": "date-time",
- "title": "Created At",
- "description": "The creation timestamp of the experiment"
- },
- "updated_at": {
- "type": "string",
- "format": "date-time",
- "title": "Updated At",
- "description": "The last update timestamp of the experiment"
+ "title": "Next Cursor"
}
},
"type": "object",
"required": [
- "id",
- "dataset_id",
- "dataset_version_id",
- "repetitions",
- "metadata",
- "project_name",
- "created_at",
- "updated_at"
+ "data",
+ "next_cursor"
],
- "title": "Experiment"
+ "title": "GetAnnotationConfigsResponseBody"
},
"GetDatasetResponseBody": {
"properties": {
@@ -2957,6 +3818,15 @@
],
"title": "ModelProvider"
},
+ "OptimizationDirection": {
+ "type": "string",
+ "enum": [
+ "MINIMIZE",
+ "MAXIMIZE",
+ "NONE"
+ ],
+ "title": "OptimizationDirection"
+ },
"Project": {
"properties": {
"name": {
@@ -3981,6 +4851,112 @@
"type": "string",
"enum": [
"LLM",
+ "CODE",
+ "HUMAN"
+ ],
+ "title": "Annotator Kind",
+ "description": "The kind of annotator used for the annotation"
+ },
+ "result": {
+ "anyOf": [
+ {
+ "$ref": "#/components/schemas/SpanAnnotationResult"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "The result of the annotation"
+ },
+ "metadata": {
+ "anyOf": [
+ {
+ "additionalProperties": true,
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Metadata",
+ "description": "Metadata for the annotation"
+ },
+ "identifier": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Identifier",
+ "description": "The identifier of the annotation. If provided, the annotation will be updated if it already exists."
+ },
+ "id": {
+ "type": "string",
+ "title": "Id"
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time",
+ "title": "Created At"
+ },
+ "updated_at": {
+ "type": "string",
+ "format": "date-time",
+ "title": "Updated At"
+ },
+ "source": {
+ "type": "string",
+ "enum": [
+ "API",
+ "APP"
+ ],
+ "title": "Source"
+ },
+ "user_id": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "User Id"
+ }
+ },
+ "type": "object",
+ "required": [
+ "span_id",
+ "name",
+ "annotator_kind",
+ "id",
+ "created_at",
+ "updated_at",
+ "source",
+ "user_id"
+ ],
+ "title": "SpanAnnotation"
+ },
+ "SpanAnnotationData": {
+ "properties": {
+ "span_id": {
+ "type": "string",
+ "title": "Span Id",
+ "description": "OpenTelemetry Span ID (hex format w/o 0x prefix)"
+ },
+ "name": {
+ "type": "string",
+ "title": "Name",
+ "description": "The name of the annotation"
+ },
+ "annotator_kind": {
+ "type": "string",
+ "enum": [
+ "LLM",
+ "CODE",
"HUMAN"
],
"title": "Annotator Kind",
@@ -4009,6 +4985,18 @@
],
"title": "Metadata",
"description": "Metadata for the annotation"
+ },
+ "identifier": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Identifier",
+ "description": "The identifier of the annotation. If provided, the annotation will be updated if it already exists."
}
},
"type": "object",
@@ -4017,7 +5005,7 @@
"name",
"annotator_kind"
],
- "title": "SpanAnnotation"
+ "title": "SpanAnnotationData"
},
"SpanAnnotationResult": {
"properties": {
@@ -4061,6 +5049,34 @@
"type": "object",
"title": "SpanAnnotationResult"
},
+ "SpanAnnotationsResponseBody": {
+ "properties": {
+ "data": {
+ "items": {
+ "$ref": "#/components/schemas/SpanAnnotation"
+ },
+ "type": "array",
+ "title": "Data"
+ },
+ "next_cursor": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "title": "Next Cursor"
+ }
+ },
+ "type": "object",
+ "required": [
+ "data",
+ "next_cursor"
+ ],
+ "title": "SpanAnnotationsResponseBody"
+ },
"TextContentPart": {
"properties": {
"type": {
@@ -4190,6 +5206,37 @@
],
"title": "ToolResultContentPart"
},
+ "UpdateAnnotationConfigResponseBody": {
+ "properties": {
+ "data": {
+ "oneOf": [
+ {
+ "$ref": "#/components/schemas/CategoricalAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/ContinuousAnnotationConfig"
+ },
+ {
+ "$ref": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ ],
+ "title": "Data",
+ "discriminator": {
+ "propertyName": "type",
+ "mapping": {
+ "CATEGORICAL": "#/components/schemas/CategoricalAnnotationConfig",
+ "CONTINUOUS": "#/components/schemas/ContinuousAnnotationConfig",
+ "FREEFORM": "#/components/schemas/FreeformAnnotationConfig"
+ }
+ }
+ }
+ },
+ "type": "object",
+ "required": [
+ "data"
+ ],
+ "title": "UpdateAnnotationConfigResponseBody"
+ },
"UpdateProjectRequestBody": {
"properties": {
"description": {
diff --git a/scripts/generate_data_via_plpgsql/generate_span_annotations.py b/scripts/generate_data_via_plpgsql/generate_span_annotations.py
new file mode 100644
index 0000000000..5ad0b10d08
--- /dev/null
+++ b/scripts/generate_data_via_plpgsql/generate_span_annotations.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python3
+"""
+Span Annotations Generation Script
+
+This script executes the generate_span_annotations.sql script to create random annotations
+for spans in the database. It provides a convenient way to run the SQL script with
+configurable parameters.
+
+The script generates random annotations with the following characteristics:
+- Randomly sampled spans (approximately 1% of total spans)
+- Between 1 and max_annotations_per_span annotations per span
+- Randomly assigned names from the provided list
+- Randomly assigned labels: "YES" or "NO"
+- Random scores between 0 and 1
+- Empty metadata JSON objects
+- Random annotator kind: "HUMAN" or "LLM"
+- Random explanation text
+
+Usage:
+ python generate_span_annotations.py [options]
+
+Options:
+ --db-name NAME Database name (default: postgres)
+ --db-user USER Database user (default: postgres)
+ --db-host HOST Database host (default: localhost)
+ --db-port PORT Database port (default: 5432)
+ --db-password PASS Database password (default: phoenix)
+ --limit LIMIT Number of spans to sample (default: 10000)
+ --max-annotations-per-span MAX
+ Maximum number of annotations per span (default: 10)
+ --label-missing-prob PROB
+ Probability of label being missing (default: 0.1)
+ --score-missing-prob PROB
+ Probability of score being missing (default: 0.1)
+ --explanation-missing-prob PROB
+ Probability of explanation being missing (default: 0.1)
+ --metadata-missing-prob PROB
+ Probability of metadata being missing (default: 0.1)
+ --annotation-names NAMES
+ Comma-separated list of annotation names (default: correctness,helpfulness,relevance,safety,coherence)
+
+Example:
+ # Use default parameters
+ python generate_span_annotations.py
+
+ # Specify custom parameters
+ python generate_span_annotations.py \
+ --db-name mydb \
+ --db-user myuser \
+ --db-host localhost \
+ --db-port 5432 \
+ --db-password mypass \
+ --limit 10000 \
+ --max-annotations-per-span 10 \
+ --label-missing-prob 0.1 \
+ --score-missing-prob 0.1 \
+ --explanation-missing-prob 0.1 \
+ --metadata-missing-prob 0.1 \
+ --annotation-names "correctness,helpfulness,relevance,safety,coherence"
+
+Dependencies:
+ - Python 3.x
+ - psql command-line tool
+ - PostgreSQL database with the following tables:
+ - public.spans
+ - public.span_annotations
+
+The script uses a single bulk INSERT operation for efficiency and maintains referential
+integrity by using the span's id as span_rowid in the annotations.
+""" # noqa: E501
+
+import argparse
+import os
+import subprocess
+import sys
+import time
+from datetime import timedelta
+
+
+def parse_arguments():
+ """Parse command line arguments.
+
+ Returns:
+ argparse.Namespace: Parsed command line arguments
+ """
+ parser = argparse.ArgumentParser(description="Generate span annotations")
+ parser.add_argument(
+ "--db-name",
+ type=str,
+ default="postgres",
+ help="Database name (default: postgres)",
+ )
+ parser.add_argument(
+ "--db-user",
+ type=str,
+ default="postgres",
+ help="Database user (default: postgres)",
+ )
+ parser.add_argument(
+ "--db-host",
+ type=str,
+ default="localhost",
+ help="Database host (default: localhost)",
+ )
+ parser.add_argument(
+ "--db-port",
+ type=int,
+ default=5432,
+ help="Database port (default: 5432)",
+ )
+ parser.add_argument(
+ "--db-password",
+ type=str,
+ default="phoenix",
+ help="Database password (default: phoenix)",
+ )
+ parser.add_argument(
+ "--limit",
+ type=int,
+ default=10_000,
+ help="Number of spans to sample (default: 10000)",
+ )
+ parser.add_argument(
+ "--max-annotations-per-span",
+ type=int,
+ default=10,
+ help="Maximum number of annotations per span (default: 10)",
+ )
+ parser.add_argument(
+ "--label-missing-prob",
+ type=float,
+ default=0.1,
+ help="Probability of label being missing (default: 0.1)",
+ )
+ parser.add_argument(
+ "--score-missing-prob",
+ type=float,
+ default=0.1,
+ help="Probability of score being missing (default: 0.1)",
+ )
+ parser.add_argument(
+ "--explanation-missing-prob",
+ type=float,
+ default=0.1,
+ help="Probability of explanation being missing (default: 0.1)",
+ )
+ parser.add_argument(
+ "--metadata-missing-prob",
+ type=float,
+ default=0.1,
+ help="Probability of metadata being missing (default: 0.1)",
+ )
+ parser.add_argument(
+ "--annotation-names",
+ type=str,
+ default="correctness,helpfulness,relevance,safety,coherence,note",
+ help="Comma-separated list of annotation names (default: correctness,helpfulness,relevance,safety,coherence,note)", # noqa: E501
+ )
+ return parser.parse_args()
+
+
+def run_sql_script(
+ db_name,
+ db_user,
+ db_host,
+ db_port,
+ db_password,
+ script_path,
+ print_output=True,
+ limit=10000,
+ max_annotations_per_span=10,
+ label_missing_prob=0.1,
+ score_missing_prob=0.1,
+ explanation_missing_prob=0.1,
+ metadata_missing_prob=0.1,
+ annotation_names="correctness,helpfulness,relevance,safety,coherence",
+):
+ """Run a SQL script file using psql.
+
+ Args:
+ db_name (str): Database name
+ db_user (str): Database user
+ db_host (str): Database host
+ db_port (int): Database port
+ db_password (str): Database password
+ script_path (str): Path to SQL script file
+ print_output (bool): Whether to print the output (default: True)
+ limit (int): Number of spans to sample and annotate (default: 10000)
+ max_annotations_per_span (int): Maximum number of annotations per span (default: 10)
+ label_missing_prob (float): Probability of label being missing (default: 0.1)
+ score_missing_prob (float): Probability of score being missing (default: 0.1)
+ explanation_missing_prob (float): Probability of explanation being missing (default: 0.1)
+ metadata_missing_prob (float): Probability of metadata being missing (default: 0.1)
+ annotation_names (str): Comma-separated list of annotation names (default: correctness,helpfulness,relevance,safety,coherence)
+
+ Returns:
+ bool: True if successful, False otherwise
+
+ Raises:
+ subprocess.CalledProcessError: If the psql command fails
+ """ # noqa: E501
+ # Set up environment with password
+ env = os.environ.copy()
+ env["PGPASSWORD"] = db_password
+
+ # Escape single quotes in annotation names
+ escaped_names = annotation_names.replace("'", "''")
+ cmd = [
+ "psql",
+ "-h",
+ db_host,
+ "-p",
+ str(db_port),
+ "-d",
+ db_name,
+ "-U",
+ db_user,
+ "-v",
+ f"limit={limit}",
+ "-v",
+ f"max_annotations_per_span={max_annotations_per_span}",
+ "-v",
+ f"label_missing_prob={label_missing_prob}",
+ "-v",
+ f"score_missing_prob={score_missing_prob}",
+ "-v",
+ f"explanation_missing_prob={explanation_missing_prob}",
+ "-v",
+ f"metadata_missing_prob={metadata_missing_prob}",
+ "-v",
+ f"annotation_names={escaped_names}",
+ "-f",
+ script_path,
+ ]
+
+ # Execute the command
+ result = subprocess.run(cmd, capture_output=True, text=True, env=env)
+
+ # Check if the command was successful
+ if result.returncode != 0:
+ print("Error executing SQL script:")
+ print(result.stderr)
+ return False
+
+ # Print the output if requested
+ if print_output and result.stdout:
+ print("\nSQL Output:")
+ print(result.stdout)
+ if result.stderr:
+ print("\nSQL Errors:")
+ print(result.stderr)
+
+ return True
+
+
+def main():
+ """Main function to execute the span annotations generation.
+
+ This function:
+ 1. Parses command line arguments
+ 2. Locates the SQL script
+ 3. Executes the script with the provided database connection parameters
+ 4. Reports success or failure with timing information
+ """
+ args = parse_arguments()
+
+ # Get the directory of the current script
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+
+ # Hard-coded script paths
+ sql_script_path = os.path.join(script_dir, "generate_span_annotations.sql")
+
+ try:
+ print("Generating span annotations...", end="", flush=True)
+
+ # Record start time
+ start_time = time.time()
+
+ if not run_sql_script(
+ args.db_name,
+ args.db_user,
+ args.db_host,
+ args.db_port,
+ args.db_password,
+ sql_script_path,
+ limit=args.limit,
+ max_annotations_per_span=args.max_annotations_per_span,
+ label_missing_prob=args.label_missing_prob,
+ score_missing_prob=args.score_missing_prob,
+ explanation_missing_prob=args.explanation_missing_prob,
+ metadata_missing_prob=args.metadata_missing_prob,
+ annotation_names=args.annotation_names,
+ ):
+ print(" failed")
+ print("Error generating annotations. Aborting.")
+ sys.exit(1)
+
+ # Report completion
+ total_time = time.time() - start_time
+ total_time_str = str(timedelta(seconds=int(total_time)))
+ print(f" done (took {total_time_str})")
+
+ except Exception as e:
+ print(f"Error: {e}")
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/generate_data_via_plpgsql/generate_span_annotations.sql b/scripts/generate_data_via_plpgsql/generate_span_annotations.sql
new file mode 100644
index 0000000000..c9fe854c5a
--- /dev/null
+++ b/scripts/generate_data_via_plpgsql/generate_span_annotations.sql
@@ -0,0 +1,105 @@
+/*
+ * Generate Span Annotations
+ *
+ * This script generates random annotations for spans in the database. It:
+ * 1. Samples random spans using TABLESAMPLE SYSTEM (1)
+ * 2. For each sampled span, generates between 1 and max_annotations_per_span annotations
+ * 3. Each annotation has:
+ * - Random name from the provided list
+ * - Random label: either "YES" or "NO"
+ * - Random score: an integer between -100,000,000 and 100,000,000
+ * - Detailed metadata JSON object with model parameters and context
+ * - Random annotator kind: either "HUMAN" or "LLM"
+ * - Multi-paragraph explanation text
+ *
+ * The script uses a single bulk INSERT operation for efficiency.
+ * TABLESAMPLE SYSTEM (1) samples approximately 1% of the table randomly.
+ * When a duplicate annotation (same name and span_rowid) is found, it is skipped.
+ *
+ * Note: This script assumes the existence of the following tables:
+ * - public.spans: Contains the spans to be annotated
+ * - public.span_annotations: Where the annotations will be stored
+ *
+ * The script maintains referential integrity by using the span's id as span_rowid.
+ */
+
+-- Main insert with optimized sampling and data generation
+INSERT INTO public.span_annotations (
+ span_rowid,
+ identifier,
+ source,
+ name,
+ label,
+ score,
+ metadata,
+ annotator_kind,
+ explanation
+)
+WITH annotation_names AS (
+ SELECT array_agg(name) as names_array
+ FROM unnest(string_to_array(:'annotation_names', ',')) as name
+),
+sampled_spans AS (
+ SELECT
+ id,
+ 1 + floor(random() * :max_annotations_per_span)::int as num_annotations
+ FROM public.spans
+ TABLESAMPLE SYSTEM (1)
+ LIMIT :limit
+),
+span_repeats AS (
+ SELECT
+ s.id,
+ generate_series(1, s.num_annotations) as annotation_num,
+ random() < :label_missing_prob as label_missing,
+ random() < :score_missing_prob as score_missing,
+ random() < :explanation_missing_prob as explanation_missing,
+ random() < :metadata_missing_prob as metadata_missing
+ FROM sampled_spans s
+)
+SELECT
+ s.id,
+ CASE WHEN s.annotation_num = 1 THEN '' ELSE gen_random_uuid()::text END,
+ CASE WHEN random() < 0.5 THEN 'APP' ELSE 'API' END,
+ a.names_array[1 + floor(random() * array_length(a.names_array, 1))::int],
+ CASE
+ WHEN s.label_missing THEN NULL
+ ELSE CASE WHEN random() < 0.5 THEN 'YES' ELSE 'NO' END
+ END,
+ CASE
+ WHEN s.score_missing THEN NULL
+ ELSE floor(random() * 200000001 - 100000000)::int
+ END,
+ CASE
+ WHEN s.metadata_missing THEN '{}'::jsonb
+ ELSE jsonb_build_object(
+ 'confidence', random(),
+ 'timestamp', extract(epoch from now())::bigint,
+ 'version', '1.0',
+ 'model', CASE WHEN random() < 0.5 THEN 'gpt-4' ELSE 'gpt-3.5-turbo' END,
+ 'temperature', random() * 2,
+ 'max_tokens', floor(random() * 1000)::int,
+ 'context_length', floor(random() * 4000)::int,
+ 'top_p', random(),
+ 'frequency_penalty', random() * 2 - 1,
+ 'presence_penalty', random() * 2 - 1,
+ 'stop_sequences', ARRAY['\n', '.', '?', '!'],
+ 'logprobs', floor(random() * 5)::int,
+ 'best_of', floor(random() * 3 + 1)::int,
+ 'echo', random() < 0.5,
+ 'stream', random() < 0.5,
+ 'user', 'user_' || floor(random() * 1000)::text,
+ 'organization', 'org_' || floor(random() * 100)::text,
+ 'deployment', 'deploy_' || floor(random() * 10)::text
+ )
+ END,
+ CASE WHEN random() < 0.5 THEN 'HUMAN' ELSE 'LLM' END,
+ CASE
+ WHEN s.explanation_missing THEN NULL
+ ELSE 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+
+Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem.'
+ END
+FROM span_repeats s
+CROSS JOIN annotation_names a
+ON CONFLICT (name, span_rowid, identifier) DO NOTHING;
diff --git a/src/phoenix/db/constants.py b/src/phoenix/db/constants.py
new file mode 100644
index 0000000000..29ff95f91e
--- /dev/null
+++ b/src/phoenix/db/constants.py
@@ -0,0 +1 @@
+DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID = 0
diff --git a/src/phoenix/db/facilitator.py b/src/phoenix/db/facilitator.py
index a011ccffd9..2196f2b1be 100644
--- a/src/phoenix/db/facilitator.py
+++ b/src/phoenix/db/facilitator.py
@@ -9,6 +9,7 @@
from sqlalchemy import (
distinct,
+ exists,
insert,
select,
)
@@ -27,7 +28,13 @@
get_env_default_admin_initial_password,
)
from phoenix.db import models
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
from phoenix.db.enums import COLUMN_ENUMS, UserRole
+from phoenix.db.types.trace_retention import (
+ MaxDaysRule,
+ TraceRetentionCronExpression,
+ TraceRetentionRule,
+)
from phoenix.server.email.types import WelcomeEmailSender
from phoenix.server.types import DbSessionFactory
@@ -57,6 +64,7 @@ async def __call__(self) -> None:
_ensure_user_roles,
_get_system_user_id,
partial(_ensure_admins, email_sender=self._email_sender),
+ _ensure_default_project_trace_retention_policy,
):
await fn(self._db)
@@ -205,3 +213,50 @@ async def _ensure_admins(
):
if isinstance(exc, Exception):
logger.error(f"Failed to send welcome email: {exc}")
+
+
+async def _ensure_default_project_trace_retention_policy(db: DbSessionFactory) -> None:
+ """
+ Ensures the default trace retention policy (id=1) exists in the database. Default policy
+ applies to all projects without a specific policy (i.e. foreign key is null).
+
+ This function checks for the presence of the default trace retention policy and
+ creates it if missing. The default trace retention policy:
+
+ - Has ID=0
+ - Is named "Default"
+ - Runs every Sunday at midnight UTC (cron: "0 0 * * 0")
+ - Retains traces indefinitely
+
+ If the default policy already exists, this function makes no changes.
+
+ Args:
+ db (DbSessionFactory): An async SQLAlchemy session factory.
+
+ Returns:
+ None
+ """
+ assert DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID == 0
+ async with db() as session:
+ if await session.scalar(
+ select(
+ exists().where(
+ models.ProjectTraceRetentionPolicy.id
+ == DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+ )
+ )
+ ):
+ return
+ cron_expression = TraceRetentionCronExpression(root="0 0 * * 0")
+ rule = TraceRetentionRule(root=MaxDaysRule(max_days=0))
+ await session.execute(
+ insert(models.ProjectTraceRetentionPolicy),
+ [
+ {
+ "id": DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID,
+ "name": "Default",
+ "cron_expression": cron_expression,
+ "rule": rule,
+ }
+ ],
+ )
diff --git a/src/phoenix/db/insertion/document_annotation.py b/src/phoenix/db/insertion/document_annotation.py
index ee98221ede..b9b856c2ab 100644
--- a/src/phoenix/db/insertion/document_annotation.py
+++ b/src/phoenix/db/insertion/document_annotation.py
@@ -1,13 +1,13 @@
from collections.abc import Mapping
from datetime import datetime
-from typing import Any, NamedTuple, Optional
+from typing import NamedTuple, Optional
-from sqlalchemy import Row, Select, and_, select, tuple_
+from sqlalchemy import insert, select
from sqlalchemy.ext.asyncio import AsyncSession
from typing_extensions import TypeAlias
from phoenix.db import models
-from phoenix.db.helpers import dedup, num_docs_col
+from phoenix.db.helpers import num_docs_col
from phoenix.db.insertion.helpers import as_kv
from phoenix.db.insertion.types import (
Insertables,
@@ -46,7 +46,7 @@ class DocumentAnnotationQueueInserter(
DocumentAnnotationDmlEvent,
],
table=models.DocumentAnnotation,
- unique_by=("name", "span_rowid", "document_position"),
+ unique_by=(),
):
async def _events(
self,
@@ -54,7 +54,7 @@ async def _events(
*insertions: Insertables.DocumentAnnotation,
) -> list[DocumentAnnotationDmlEvent]:
records = [dict(as_kv(ins.row)) for ins in insertions]
- stmt = self._insert_on_conflict(*records).returning(self.table.id)
+ stmt = insert(self.table).values(records).returning(self.table.id)
ids = tuple([_ async for _ in await session.stream_scalars(stmt)])
return [DocumentAnnotationDmlEvent(ids)]
@@ -71,35 +71,19 @@ async def _partition(
to_postpone: list[Postponed[Precursors.DocumentAnnotation]] = []
to_discard: list[Received[Precursors.DocumentAnnotation]] = []
- stmt = self._select_existing(*map(_key, parcels))
- existing: list[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
+ span_ids = {p.item.span_id for p in parcels}
+ stmt = select(models.Span.id, models.Span.span_id, num_docs_col(self._db.dialect)).where(
+ models.Span.span_id.in_(span_ids)
+ )
+ result = await session.execute(stmt)
+ spans = result.all()
existing_spans: Mapping[str, _SpanAttr] = {
- e.span_id: _SpanAttr(e.span_rowid, e.num_docs) for e in existing
- }
- existing_annos: Mapping[_Key, _AnnoAttr] = {
- (e.name, e.span_id, e.document_position): _AnnoAttr(e.span_rowid, e.id, e.updated_at)
- for e in existing
- if e.id is not None
- and e.name is not None
- and e.document_position is not None
- and e.updated_at is not None
+ row.span_id: _SpanAttr(row.id, row.num_docs) for row in spans
}
for p in parcels:
- if (anno := existing_annos.get(_key(p))) is not None:
- if p.received_at <= anno.updated_at:
- to_discard.append(p)
- else:
- to_insert.append(
- Received(
- received_at=p.received_at,
- item=p.item.as_insertable(
- span_rowid=anno.span_rowid,
- id_=anno.id_,
- ),
- )
- )
- elif (span := existing_spans.get(p.item.span_id)) is not None:
+ if p.item.span_id in existing_spans:
+ span = existing_spans[p.item.span_id]
if 0 <= p.item.document_position < span.num_docs:
to_insert.append(
Received(
@@ -122,50 +106,9 @@ async def _partition(
to_discard.append(p)
assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
- to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
return to_insert, to_postpone, to_discard
- def _select_existing(self, *keys: _Key) -> Select[_Existing]:
- anno = self.table
- span = (
- select(models.Span.id, models.Span.span_id, num_docs_col(self._db.dialect))
- .where(models.Span.span_id.in_({span_id for _, span_id, *_ in keys}))
- .cte()
- )
- onclause = and_(
- span.c.id == anno.span_rowid,
- anno.name.in_({name for name, *_ in keys}),
- tuple_(anno.name, span.c.span_id, anno.document_position).in_(keys),
- )
- return select(
- span.c.id.label("span_rowid"),
- span.c.span_id,
- span.c.num_docs,
- anno.id,
- anno.name,
- anno.document_position,
- anno.updated_at,
- ).outerjoin_from(span, anno, onclause)
-
class _SpanAttr(NamedTuple):
span_rowid: _SpanRowId
num_docs: _NumDocs
-
-
-class _AnnoAttr(NamedTuple):
- span_rowid: _SpanRowId
- id_: _AnnoRowId
- updated_at: datetime
-
-
-def _key(p: Received[Precursors.DocumentAnnotation]) -> _Key:
- return p.item.obj.name, p.item.span_id, p.item.document_position
-
-
-def _unique_by(p: Received[Insertables.DocumentAnnotation]) -> _UniqueBy:
- return p.item.obj.name, p.item.span_rowid, p.item.document_position
-
-
-def _time(p: Received[Any]) -> datetime:
- return p.received_at
diff --git a/src/phoenix/db/insertion/evaluation.py b/src/phoenix/db/insertion/evaluation.py
index 9a6ae27a14..d9cdc33c18 100644
--- a/src/phoenix/db/insertion/evaluation.py
+++ b/src/phoenix/db/insertion/evaluation.py
@@ -86,13 +86,15 @@ async def _insert_trace_evaluation(
explanation=explanation,
metadata_={}, # `metadata_` must match ORM
annotator_kind="LLM",
+ identifier="",
+ source="API",
)
await session.execute(
insert_on_conflict(
values,
dialect=dialect,
table=models.TraceAnnotation,
- unique_by=("name", "trace_rowid"),
+ unique_by=("name", "trace_rowid", "identifier"),
)
)
return TraceEvaluationInsertionEvent(project_rowid, evaluation_name)
@@ -128,13 +130,15 @@ async def _insert_span_evaluation(
explanation=explanation,
metadata_={}, # `metadata_` must match ORM
annotator_kind="LLM",
+ identifier="",
+ source="API",
)
await session.execute(
insert_on_conflict(
values,
dialect=dialect,
table=models.SpanAnnotation,
- unique_by=("name", "span_rowid"),
+ unique_by=("name", "span_rowid", "identifier"),
)
)
return SpanEvaluationInsertionEvent(project_rowid, evaluation_name)
@@ -179,13 +183,21 @@ async def _insert_document_evaluation(
explanation=explanation,
metadata_={}, # `metadata_` must match ORM
annotator_kind="LLM",
+ identifier="",
+ source="API",
)
await session.execute(
insert_on_conflict(
values,
dialect=dialect,
table=models.DocumentAnnotation,
- unique_by=("name", "span_rowid", "document_position"),
+ unique_by=(
+ "name",
+ "span_rowid",
+ "document_position",
+ "identifier",
+ ),
+ constraint_name="uq_document_annotations_name_span_rowid_document_pos_identifier", # The name of the unique constraint is specified manually since the auto-generated name is longer than the Postgres limit of 63 characters # noqa: E501
)
)
return DocumentEvaluationInsertionEvent(project_rowid, evaluation_name)
diff --git a/src/phoenix/db/insertion/helpers.py b/src/phoenix/db/insertion/helpers.py
index c27e8ab3f8..76b77f3496 100644
--- a/src/phoenix/db/insertion/helpers.py
+++ b/src/phoenix/db/insertion/helpers.py
@@ -36,6 +36,7 @@ def insert_on_conflict(
unique_by: Sequence[str],
on_conflict: OnConflict = OnConflict.DO_UPDATE,
set_: Optional[Mapping[str, Any]] = None,
+ constraint_name: Optional[str] = None,
) -> Insert:
"""
Dialect specific insertion statement using ON CONFLICT DO syntax.
@@ -50,7 +51,7 @@ def insert_on_conflict(
unique_records.append(v)
seen.add(k)
records = tuple(reversed(unique_records))
- constraint = "_".join(("uq", table.__tablename__, *unique_by))
+ constraint = constraint_name or "_".join(("uq", table.__tablename__, *unique_by))
if dialect is SupportedSQLDialect.POSTGRESQL:
stmt_postgresql = insert_postgresql(table).values(records)
if on_conflict is OnConflict.DO_NOTHING:
diff --git a/src/phoenix/db/insertion/span_annotation.py b/src/phoenix/db/insertion/span_annotation.py
index 05a4121b16..688a0a6f09 100644
--- a/src/phoenix/db/insertion/span_annotation.py
+++ b/src/phoenix/db/insertion/span_annotation.py
@@ -1,13 +1,12 @@
from collections.abc import Mapping
from datetime import datetime
-from typing import Any, NamedTuple, Optional
+from typing import Optional
-from sqlalchemy import Row, Select, and_, select, tuple_
+from sqlalchemy import insert, select
from sqlalchemy.ext.asyncio import AsyncSession
from typing_extensions import TypeAlias
from phoenix.db import models
-from phoenix.db.helpers import dedup
from phoenix.db.insertion.helpers import as_kv
from phoenix.db.insertion.types import (
Insertables,
@@ -42,7 +41,7 @@ class SpanAnnotationQueueInserter(
SpanAnnotationDmlEvent,
],
table=models.SpanAnnotation,
- unique_by=("name", "span_rowid"),
+ unique_by=(),
):
async def _events(
self,
@@ -50,7 +49,7 @@ async def _events(
*insertions: Insertables.SpanAnnotation,
) -> list[SpanAnnotationDmlEvent]:
records = [dict(as_kv(ins.row)) for ins in insertions]
- stmt = self._insert_on_conflict(*records).returning(self.table.id)
+ stmt = insert(self.table).values(records).returning(self.table.id)
ids = tuple([_ async for _ in await session.stream_scalars(stmt)])
return [SpanAnnotationDmlEvent(ids)]
@@ -67,38 +66,18 @@ async def _partition(
to_postpone: list[Postponed[Precursors.SpanAnnotation]] = []
to_discard: list[Received[Precursors.SpanAnnotation]] = []
- stmt = self._select_existing(*map(_key, parcels))
- existing: list[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
- existing_spans: Mapping[str, _SpanAttr] = {
- e.span_id: _SpanAttr(e.span_rowid) for e in existing
- }
- existing_annos: Mapping[_Key, _AnnoAttr] = {
- (e.name, e.span_id): _AnnoAttr(e.span_rowid, e.id, e.updated_at)
- for e in existing
- if e.id is not None and e.name is not None and e.updated_at is not None
- }
+ span_ids = {p.item.span_id for p in parcels}
+ stmt = select(models.Span.id, models.Span.span_id).where(models.Span.span_id.in_(span_ids))
+ result = await session.execute(stmt)
+ spans = result.all()
+ existing_spans: Mapping[str, int] = {row.span_id: row.id for row in spans}
for p in parcels:
- if (anno := existing_annos.get(_key(p))) is not None:
- if p.received_at <= anno.updated_at:
- to_discard.append(p)
- else:
- to_insert.append(
- Received(
- received_at=p.received_at,
- item=p.item.as_insertable(
- span_rowid=anno.span_rowid,
- id_=anno.id_,
- ),
- )
- )
- elif (span := existing_spans.get(p.item.span_id)) is not None:
+ if p.item.span_id in existing_spans:
to_insert.append(
Received(
received_at=p.received_at,
- item=p.item.as_insertable(
- span_rowid=span.span_rowid,
- ),
+ item=p.item.as_insertable(span_rowid=existing_spans[p.item.span_id]),
)
)
elif isinstance(p, Postponed):
@@ -112,47 +91,4 @@ async def _partition(
to_discard.append(p)
assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
- to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
return to_insert, to_postpone, to_discard
-
- def _select_existing(self, *keys: _Key) -> Select[_Existing]:
- anno = self.table
- span = (
- select(models.Span.id, models.Span.span_id)
- .where(models.Span.span_id.in_({span_id for _, span_id in keys}))
- .cte()
- )
- onclause = and_(
- span.c.id == anno.span_rowid,
- anno.name.in_({name for name, _ in keys}),
- tuple_(anno.name, span.c.span_id).in_(keys),
- )
- return select(
- span.c.id.label("span_rowid"),
- span.c.span_id,
- anno.id,
- anno.name,
- anno.updated_at,
- ).outerjoin_from(span, anno, onclause)
-
-
-class _SpanAttr(NamedTuple):
- span_rowid: _SpanRowId
-
-
-class _AnnoAttr(NamedTuple):
- span_rowid: _SpanRowId
- id_: _AnnoRowId
- updated_at: datetime
-
-
-def _key(p: Received[Precursors.SpanAnnotation]) -> _Key:
- return p.item.obj.name, p.item.span_id
-
-
-def _unique_by(p: Received[Insertables.SpanAnnotation]) -> _UniqueBy:
- return p.item.obj.name, p.item.span_rowid
-
-
-def _time(p: Received[Any]) -> datetime:
- return p.received_at
diff --git a/src/phoenix/db/insertion/trace_annotation.py b/src/phoenix/db/insertion/trace_annotation.py
index 3d83a6fc85..db0871aa5f 100644
--- a/src/phoenix/db/insertion/trace_annotation.py
+++ b/src/phoenix/db/insertion/trace_annotation.py
@@ -1,13 +1,11 @@
-from collections.abc import Mapping
from datetime import datetime
-from typing import Any, NamedTuple, Optional
+from typing import Optional
-from sqlalchemy import Row, Select, and_, select, tuple_
+from sqlalchemy import insert, select
from sqlalchemy.ext.asyncio import AsyncSession
from typing_extensions import TypeAlias
from phoenix.db import models
-from phoenix.db.helpers import dedup
from phoenix.db.insertion.helpers import as_kv
from phoenix.db.insertion.types import (
Insertables,
@@ -42,7 +40,7 @@ class TraceAnnotationQueueInserter(
TraceAnnotationDmlEvent,
],
table=models.TraceAnnotation,
- unique_by=("name", "trace_rowid"),
+ unique_by=(),
):
async def _events(
self,
@@ -50,7 +48,7 @@ async def _events(
*insertions: Insertables.TraceAnnotation,
) -> list[TraceAnnotationDmlEvent]:
records = [dict(as_kv(ins.row)) for ins in insertions]
- stmt = self._insert_on_conflict(*records).returning(self.table.id)
+ stmt = insert(self.table).values(records).returning(self.table.id)
ids = tuple([_ async for _ in await session.stream_scalars(stmt)])
return [TraceAnnotationDmlEvent(ids)]
@@ -67,37 +65,20 @@ async def _partition(
to_postpone: list[Postponed[Precursors.TraceAnnotation]] = []
to_discard: list[Received[Precursors.TraceAnnotation]] = []
- stmt = self._select_existing(*map(_key, parcels))
- existing: list[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
- existing_traces: Mapping[str, _TraceAttr] = {
- e.trace_id: _TraceAttr(e.trace_rowid) for e in existing
- }
- existing_annos: Mapping[_Key, _AnnoAttr] = {
- (e.name, e.trace_id): _AnnoAttr(e.trace_rowid, e.id, e.updated_at)
- for e in existing
- if e.id is not None and e.name is not None and e.updated_at is not None
- }
+ stmt = select(models.Trace.id, models.Trace.trace_id).where(
+ models.Trace.trace_id.in_({p.item.trace_id for p in parcels})
+ )
+ result = await session.execute(stmt)
+ traces = result.all()
+ existing_traces = {row.trace_id: row.id for row in traces}
for p in parcels:
- if (anno := existing_annos.get(_key(p))) is not None:
- if p.received_at <= anno.updated_at:
- to_discard.append(p)
- else:
- to_insert.append(
- Received(
- received_at=p.received_at,
- item=p.item.as_insertable(
- trace_rowid=anno.trace_rowid,
- id_=anno.id_,
- ),
- )
- )
- elif (trace := existing_traces.get(p.item.trace_id)) is not None:
+ if p.item.trace_id in existing_traces:
to_insert.append(
Received(
received_at=p.received_at,
item=p.item.as_insertable(
- trace_rowid=trace.trace_rowid,
+ trace_rowid=existing_traces[p.item.trace_id],
),
)
)
@@ -112,47 +93,4 @@ async def _partition(
to_discard.append(p)
assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
- to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
return to_insert, to_postpone, to_discard
-
- def _select_existing(self, *keys: _Key) -> Select[_Existing]:
- anno = self.table
- trace = (
- select(models.Trace.id, models.Trace.trace_id)
- .where(models.Trace.trace_id.in_({trace_id for _, trace_id in keys}))
- .cte()
- )
- onclause = and_(
- trace.c.id == anno.trace_rowid,
- anno.name.in_({name for name, _ in keys}),
- tuple_(anno.name, trace.c.trace_id).in_(keys),
- )
- return select(
- trace.c.id.label("trace_rowid"),
- trace.c.trace_id,
- anno.id,
- anno.name,
- anno.updated_at,
- ).outerjoin_from(trace, anno, onclause)
-
-
-class _TraceAttr(NamedTuple):
- trace_rowid: _TraceRowId
-
-
-class _AnnoAttr(NamedTuple):
- trace_rowid: _TraceRowId
- id_: _AnnoRowId
- updated_at: datetime
-
-
-def _key(p: Received[Precursors.TraceAnnotation]) -> _Key:
- return p.item.obj.name, p.item.trace_id
-
-
-def _unique_by(p: Received[Insertables.TraceAnnotation]) -> _UniqueBy:
- return p.item.obj.name, p.item.trace_rowid
-
-
-def _time(p: Received[Any]) -> datetime:
- return p.received_at
diff --git a/src/phoenix/db/migrations/versions/2f9d1a65945f_annotation_config_migration.py b/src/phoenix/db/migrations/versions/2f9d1a65945f_annotation_config_migration.py
new file mode 100644
index 0000000000..ad7ccf2991
--- /dev/null
+++ b/src/phoenix/db/migrations/versions/2f9d1a65945f_annotation_config_migration.py
@@ -0,0 +1,316 @@
+"""Annotation config migrations
+
+Revision ID: 2f9d1a65945f
+Revises: bc8fea3c2bc8
+Create Date: 2025-02-06 10:17:15.726197
+
+"""
+
+from typing import Any, Sequence, Union
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import JSON, text
+from sqlalchemy.dialects import postgresql
+from sqlalchemy.ext.compiler import compiles
+
+# revision identifiers, used by Alembic.
+revision: str = "2f9d1a65945f"
+down_revision: Union[str, None] = "bc8fea3c2bc8"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+class JSONB(JSON):
+ # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ __visit_name__ = "JSONB"
+
+
+@compiles(JSONB, "sqlite")
+def _(*args: Any, **kwargs: Any) -> str:
+ # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ return "JSONB"
+
+
+JSON_ = (
+ JSON()
+ .with_variant(
+ postgresql.JSONB(),
+ "postgresql",
+ )
+ .with_variant(
+ JSONB(),
+ "sqlite",
+ )
+)
+
+
+def upgrade() -> None:
+ with op.batch_alter_table("span_annotations") as batch_op:
+ batch_op.add_column(
+ sa.Column(
+ "user_id",
+ sa.Integer,
+ sa.ForeignKey("users.id", ondelete="SET NULL"),
+ nullable=True,
+ ),
+ )
+ batch_op.add_column(
+ sa.Column(
+ "identifier",
+ sa.String,
+ nullable=True, # must initially be nullable before backfill
+ index=False, # the index must be added in a separate step
+ ),
+ )
+ batch_op.add_column(
+ sa.Column(
+ "source",
+ sa.String,
+ nullable=True,
+ ),
+ )
+ batch_op.drop_constraint(
+ constraint_name="valid_annotator_kind",
+ type_="check",
+ )
+ batch_op.create_check_constraint(
+ constraint_name="valid_annotator_kind",
+ condition="annotator_kind IN ('LLM', 'CODE', 'HUMAN')",
+ )
+ batch_op.drop_constraint("uq_span_annotations_name_span_rowid", type_="unique")
+ batch_op.create_unique_constraint(
+ "uq_span_annotations_name_span_rowid_identifier",
+ ["name", "span_rowid", "identifier"],
+ )
+ with op.batch_alter_table("span_annotations") as batch_op:
+ batch_op.execute(text("UPDATE span_annotations SET identifier = ''"))
+ batch_op.alter_column("identifier", nullable=False, existing_nullable=True)
+ batch_op.execute(
+ text(
+ """
+ UPDATE span_annotations
+ SET source = CASE
+ WHEN annotator_kind = 'HUMAN' THEN 'APP'
+ ELSE 'API'
+ END
+ """
+ )
+ )
+ batch_op.alter_column(
+ "source",
+ nullable=False,
+ existing_nullable=True,
+ )
+ batch_op.create_check_constraint(
+ constraint_name="valid_source",
+ condition="source IN ('API', 'APP')",
+ )
+
+ with op.batch_alter_table("trace_annotations") as batch_op:
+ batch_op.add_column(
+ sa.Column(
+ "user_id",
+ sa.Integer,
+ sa.ForeignKey("users.id", ondelete="SET NULL"),
+ nullable=True,
+ ),
+ )
+ batch_op.add_column(
+ sa.Column(
+ "identifier",
+ sa.String,
+ nullable=True, # must initially be nullable before backfill
+ index=False, # the index must be added in a separate step
+ ),
+ )
+ batch_op.add_column(
+ sa.Column(
+ "source",
+ sa.String,
+ nullable=True, # must initially be nullable before backfill
+ ),
+ )
+ batch_op.drop_constraint(
+ constraint_name="valid_annotator_kind",
+ type_="check",
+ )
+ batch_op.create_check_constraint(
+ constraint_name="valid_annotator_kind",
+ condition="annotator_kind IN ('LLM', 'CODE', 'HUMAN')",
+ )
+ batch_op.drop_constraint("uq_trace_annotations_name_trace_rowid", type_="unique")
+ batch_op.create_unique_constraint(
+ "uq_trace_annotations_name_trace_rowid_identifier",
+ ["name", "trace_rowid", "identifier"],
+ )
+ with op.batch_alter_table("trace_annotations") as batch_op:
+ batch_op.execute(text("UPDATE trace_annotations SET identifier = ''"))
+ batch_op.alter_column("identifier", nullable=False, existing_nullable=True)
+ batch_op.execute(
+ text(
+ """
+ UPDATE trace_annotations
+ SET source = CASE
+ WHEN annotator_kind = 'HUMAN' THEN 'APP'
+ ELSE 'API'
+ END
+ """
+ )
+ )
+ batch_op.alter_column(
+ "source",
+ nullable=False,
+ existing_nullable=True,
+ )
+ batch_op.create_check_constraint(
+ constraint_name="valid_source",
+ condition="source IN ('API', 'APP')",
+ )
+
+ with op.batch_alter_table("document_annotations") as batch_op:
+ batch_op.add_column(
+ sa.Column(
+ "user_id",
+ sa.Integer,
+ sa.ForeignKey("users.id", ondelete="SET NULL"),
+ nullable=True,
+ ),
+ )
+ batch_op.add_column(
+ sa.Column(
+ "identifier",
+ sa.String,
+ nullable=True, # must initially be nullable before backfill
+ index=False, # the index must be added in a separate step
+ ),
+ )
+ batch_op.add_column(
+ sa.Column(
+ "source",
+ sa.String,
+ nullable=True,
+ ),
+ )
+ batch_op.drop_constraint(
+ constraint_name="valid_annotator_kind",
+ type_="check",
+ )
+ batch_op.create_check_constraint(
+ constraint_name="valid_annotator_kind",
+ condition="annotator_kind IN ('LLM', 'CODE', 'HUMAN')",
+ )
+ batch_op.drop_constraint(
+ "uq_document_annotations_name_span_rowid_document_position",
+ type_="unique",
+ )
+ batch_op.create_unique_constraint(
+ "uq_document_annotations_name_span_rowid_document_pos_identifier", # this name does not conform to the auto-generated pattern, which results in a name longer than the Postgres limit of 63 characters # noqa: E501
+ ["name", "span_rowid", "document_position", "identifier"],
+ )
+ with op.batch_alter_table("document_annotations") as batch_op:
+ batch_op.execute(text("UPDATE document_annotations SET identifier = ''"))
+ batch_op.alter_column("identifier", nullable=False, existing_nullable=True)
+ batch_op.execute(
+ text(
+ """
+ UPDATE document_annotations
+ SET source = CASE
+ WHEN annotator_kind = 'HUMAN' THEN 'APP'
+ ELSE 'API'
+ END
+ """
+ )
+ )
+ batch_op.alter_column(
+ "source",
+ nullable=False,
+ existing_nullable=True,
+ )
+ batch_op.create_check_constraint(
+ constraint_name="valid_source",
+ condition="source IN ('API', 'APP')",
+ )
+
+ op.create_table(
+ "annotation_configs",
+ sa.Column("id", sa.Integer, primary_key=True),
+ sa.Column("name", sa.String, nullable=False, unique=True),
+ sa.Column("config", JSON_, nullable=False),
+ )
+
+ op.create_table(
+ "project_annotation_configs",
+ sa.Column("id", sa.Integer, primary_key=True),
+ sa.Column(
+ "project_id",
+ sa.Integer,
+ sa.ForeignKey("projects.id", ondelete="CASCADE"),
+ nullable=False,
+ index=True,
+ ),
+ sa.Column(
+ "annotation_config_id",
+ sa.Integer,
+ sa.ForeignKey("annotation_configs.id", ondelete="CASCADE"),
+ nullable=False,
+ index=True,
+ ),
+ sa.UniqueConstraint(
+ "project_id",
+ "annotation_config_id",
+ ),
+ )
+
+
+def downgrade() -> None:
+ op.drop_table("project_annotation_configs")
+ op.drop_table("annotation_configs")
+
+ with op.batch_alter_table("document_annotations") as batch_op:
+ batch_op.drop_constraint(
+ "uq_document_annotations_name_span_rowid_document_pos_identifier", type_="unique"
+ )
+ batch_op.create_unique_constraint(
+ "uq_document_annotations_name_span_rowid_document_position",
+ ["name", "span_rowid", "document_position"],
+ )
+ batch_op.drop_constraint("valid_annotator_kind", type_="check")
+ batch_op.create_check_constraint(
+ "valid_annotator_kind",
+ condition="annotator_kind IN ('LLM', 'HUMAN')",
+ )
+ batch_op.drop_constraint("valid_source", type_="check")
+ batch_op.drop_column("source")
+ batch_op.drop_column("identifier")
+ batch_op.drop_column("user_id")
+
+ with op.batch_alter_table("trace_annotations") as batch_op:
+ batch_op.drop_constraint("uq_trace_annotations_name_trace_rowid_identifier", type_="unique")
+ batch_op.create_unique_constraint(
+ "uq_trace_annotations_name_trace_rowid", ["name", "trace_rowid"]
+ )
+ batch_op.drop_constraint("valid_annotator_kind", type_="check")
+ batch_op.create_check_constraint(
+ "valid_annotator_kind",
+ condition="annotator_kind IN ('LLM', 'HUMAN')",
+ )
+ batch_op.drop_constraint("valid_source", type_="check")
+ batch_op.drop_column("source")
+ batch_op.drop_column("identifier")
+ batch_op.drop_column("user_id")
+
+ with op.batch_alter_table("span_annotations") as batch_op:
+ batch_op.drop_constraint("uq_span_annotations_name_span_rowid_identifier", type_="unique")
+ batch_op.create_unique_constraint(
+ "uq_span_annotations_name_span_rowid", ["name", "span_rowid"]
+ )
+ batch_op.drop_constraint("valid_annotator_kind", type_="check")
+ batch_op.create_check_constraint(
+ "valid_annotator_kind",
+ condition="annotator_kind IN ('LLM', 'HUMAN')",
+ )
+ batch_op.drop_constraint("valid_source", type_="check")
+ batch_op.drop_column("source")
+ batch_op.drop_column("identifier")
+ batch_op.drop_column("user_id")
diff --git a/src/phoenix/db/migrations/versions/8a3764fe7f1a_change_jsonb_to_json_for_prompts.py b/src/phoenix/db/migrations/versions/8a3764fe7f1a_change_jsonb_to_json_for_prompts.py
new file mode 100644
index 0000000000..b53798c52a
--- /dev/null
+++ b/src/phoenix/db/migrations/versions/8a3764fe7f1a_change_jsonb_to_json_for_prompts.py
@@ -0,0 +1,76 @@
+"""change jsonb to json for prompts
+
+Revision ID: 8a3764fe7f1a
+Revises: bb8139330879
+Create Date: 2025-04-25 07:04:26.102957
+
+"""
+
+from typing import Any, Sequence, Union
+
+from alembic import op
+from sqlalchemy import JSON
+from sqlalchemy.dialects import postgresql
+from sqlalchemy.ext.compiler import compiles
+
+# revision identifiers, used by Alembic.
+revision: str = "8a3764fe7f1a"
+down_revision: Union[str, None] = "bb8139330879"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+class JSONB(JSON):
+ # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ __visit_name__ = "JSONB"
+
+
+@compiles(JSONB, "sqlite")
+def _(*args: Any, **kwargs: Any) -> str:
+ # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ return "JSONB"
+
+
+JSON_ = (
+ JSON()
+ .with_variant(
+ postgresql.JSONB(),
+ "postgresql",
+ )
+ .with_variant(
+ JSONB(),
+ "sqlite",
+ )
+)
+
+
+def upgrade() -> None:
+ with op.batch_alter_table("prompt_versions") as batch_op:
+ batch_op.alter_column(
+ "tools",
+ type_=JSON,
+ existing_type=JSON_,
+ postgresql_using="tools::json",
+ )
+ batch_op.alter_column(
+ "response_format",
+ type_=JSON,
+ existing_type=JSON_,
+ postgresql_using="response_format::json",
+ )
+
+
+def downgrade() -> None:
+ with op.batch_alter_table("prompt_versions") as batch_op:
+ batch_op.alter_column(
+ "tools",
+ type_=JSON_,
+ existing_type=JSON,
+ postgresql_using="tools::jsonb",
+ )
+ batch_op.alter_column(
+ "response_format",
+ type_=JSON_,
+ existing_type=JSON,
+ postgresql_using="response_format::jsonb",
+ )
diff --git a/src/phoenix/db/migrations/versions/bb8139330879_create_project_trace_retention_policies_table.py b/src/phoenix/db/migrations/versions/bb8139330879_create_project_trace_retention_policies_table.py
new file mode 100644
index 0000000000..596aa8709c
--- /dev/null
+++ b/src/phoenix/db/migrations/versions/bb8139330879_create_project_trace_retention_policies_table.py
@@ -0,0 +1,77 @@
+"""create project trace retention policies table
+
+Revision ID: bb8139330879
+Revises: 2f9d1a65945f
+Create Date: 2025-02-27 15:57:18.752472
+
+"""
+
+from typing import Any, Sequence, Union
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import JSON
+from sqlalchemy.dialects import postgresql
+from sqlalchemy.ext.compiler import compiles
+
+
+class JSONB(JSON):
+ # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ __visit_name__ = "JSONB"
+
+
+@compiles(JSONB, "sqlite")
+def _(*args: Any, **kwargs: Any) -> str:
+ # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ return "JSONB"
+
+
+JSON_ = (
+ JSON()
+ .with_variant(
+ postgresql.JSONB(),
+ "postgresql",
+ )
+ .with_variant(
+ JSONB(),
+ "sqlite",
+ )
+)
+
+
+# revision identifiers, used by Alembic.
+revision: str = "bb8139330879"
+down_revision: Union[str, None] = "2f9d1a65945f"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ op.create_table(
+ "project_trace_retention_policies",
+ sa.Column("id", sa.Integer, primary_key=True),
+ sa.Column("name", sa.String, nullable=False),
+ sa.Column("cron_expression", sa.String, nullable=False),
+ sa.Column("rule", JSON_, nullable=False),
+ )
+ with op.batch_alter_table("projects") as batch_op:
+ batch_op.add_column(
+ sa.Column(
+ "trace_retention_policy_id",
+ sa.Integer,
+ sa.ForeignKey("project_trace_retention_policies.id", ondelete="SET NULL"),
+ nullable=True,
+ ),
+ )
+ op.create_index(
+ "ix_projects_trace_retention_policy_id",
+ "projects",
+ ["trace_retention_policy_id"],
+ )
+
+
+def downgrade() -> None:
+ op.drop_index("ix_projects_trace_retention_policy_id")
+ with op.batch_alter_table("projects") as batch_op:
+ batch_op.drop_column("trace_retention_policy_id")
+ op.drop_table("project_trace_retention_policies")
diff --git a/src/phoenix/db/models.py b/src/phoenix/db/models.py
index 707e67060c..63906d9318 100644
--- a/src/phoenix/db/models.py
+++ b/src/phoenix/db/models.py
@@ -1,6 +1,6 @@
from datetime import datetime, timezone
from enum import Enum
-from typing import Any, Iterable, Optional, Sequence, TypedDict, cast
+from typing import Any, Iterable, Literal, Optional, Sequence, TypedDict, cast
import sqlalchemy.sql as sql
from openinference.semconv.trace import RerankerAttributes, SpanAttributes
@@ -45,8 +45,15 @@
from phoenix.config import get_env_database_schema
from phoenix.datetime_utils import normalize_datetime
+from phoenix.db.types.annotation_configs import (
+ AnnotationConfig as AnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ AnnotationConfigType,
+)
from phoenix.db.types.identifier import Identifier
from phoenix.db.types.model_provider import ModelProvider
+from phoenix.db.types.trace_retention import TraceRetentionCronExpression, TraceRetentionRule
from phoenix.server.api.helpers.prompts.models import (
PromptInvocationParameters,
PromptInvocationParametersRootModel,
@@ -267,7 +274,7 @@ def process_result_value(
class _Tools(TypeDecorator[PromptTools]):
# See # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
cache_ok = True
- impl = JSON_
+ impl = JSON
def process_bind_param(
self, value: Optional[PromptTools], _: Dialect
@@ -283,7 +290,7 @@ def process_result_value(
class _PromptResponseFormat(TypeDecorator[PromptResponseFormat]):
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
cache_ok = True
- impl = JSON_
+ impl = JSON
def process_bind_param(
self, value: Optional[PromptResponseFormat], _: Dialect
@@ -332,6 +339,60 @@ def process_result_value(
return None if value is None else PromptTemplateFormat(value)
+class _TraceRetentionCronExpression(TypeDecorator[TraceRetentionCronExpression]):
+ # See # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ cache_ok = True
+ impl = String
+
+ def process_bind_param(
+ self, value: Optional[TraceRetentionCronExpression], _: Dialect
+ ) -> Optional[str]:
+ assert isinstance(value, TraceRetentionCronExpression)
+ assert isinstance(ans := value.model_dump(), str)
+ return ans
+
+ def process_result_value(
+ self, value: Optional[str], _: Dialect
+ ) -> Optional[TraceRetentionCronExpression]:
+ assert value and isinstance(value, str)
+ return TraceRetentionCronExpression.model_validate(value)
+
+
+class _TraceRetentionRule(TypeDecorator[TraceRetentionRule]):
+ # See # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ cache_ok = True
+ impl = JSON_
+
+ def process_bind_param(
+ self, value: Optional[TraceRetentionRule], _: Dialect
+ ) -> Optional[dict[str, Any]]:
+ assert isinstance(value, TraceRetentionRule)
+ assert isinstance(ans := value.model_dump(), dict)
+ return ans
+
+ def process_result_value(
+ self, value: Optional[dict[str, Any]], _: Dialect
+ ) -> Optional[TraceRetentionRule]:
+ assert value and isinstance(value, dict)
+ return TraceRetentionRule.model_validate(value)
+
+
+class _AnnotationConfig(TypeDecorator[AnnotationConfigType]):
+ # See # See https://docs.sqlalchemy.org/en/20/core/custom_types.html
+ cache_ok = True
+ impl = JSON_
+
+ def process_bind_param(
+ self, value: Optional[AnnotationConfigType], _: Dialect
+ ) -> Optional[dict[str, Any]]:
+ return AnnotationConfigModel(root=value).model_dump() if value is not None else None
+
+ def process_result_value(
+ self, value: Optional[str], _: Dialect
+ ) -> Optional[AnnotationConfigType]:
+ return AnnotationConfigModel.model_validate(value).root if value is not None else None
+
+
class ExperimentRunOutput(TypedDict, total=False):
task_output: Any
@@ -357,6 +418,19 @@ class Base(DeclarativeBase):
}
+class ProjectTraceRetentionPolicy(Base):
+ __tablename__ = "project_trace_retention_policies"
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ name: Mapped[str] = mapped_column(String, nullable=False)
+ cron_expression: Mapped[TraceRetentionCronExpression] = mapped_column(
+ _TraceRetentionCronExpression, nullable=False
+ )
+ rule: Mapped[TraceRetentionRule] = mapped_column(_TraceRetentionRule, nullable=False)
+ projects: Mapped[list["Project"]] = relationship(
+ "Project", back_populates="trace_retention_policy", uselist=True
+ )
+
+
class Project(Base):
__tablename__ = "projects"
name: Mapped[str]
@@ -374,7 +448,15 @@ class Project(Base):
updated_at: Mapped[datetime] = mapped_column(
UtcTimeStamp, server_default=func.now(), onupdate=func.now()
)
-
+ trace_retention_policy_id: Mapped[Optional[int]] = mapped_column(
+ ForeignKey("project_trace_retention_policies.id", ondelete="SET NULL"),
+ nullable=True,
+ index=True,
+ )
+ trace_retention_policy: Mapped[Optional[ProjectTraceRetentionPolicy]] = relationship(
+ "ProjectTraceRetentionPolicy",
+ back_populates="projects",
+ )
traces: WriteOnlyMapped[list["Trace"]] = relationship(
"Trace",
back_populates="project",
@@ -602,6 +684,7 @@ def _llm_token_count_total_expression(cls) -> ColumnElement[int]:
)
trace: Mapped["Trace"] = relationship("Trace", back_populates="spans")
+ span_annotations: Mapped[list["SpanAnnotation"]] = relationship(back_populates="span")
document_annotations: Mapped[list["DocumentAnnotation"]] = relationship(back_populates="span")
dataset_examples: Mapped[list["DatasetExample"]] = relationship(back_populates="span")
@@ -732,17 +815,30 @@ class SpanAnnotation(Base):
score: Mapped[Optional[float]] = mapped_column(Float, index=True)
explanation: Mapped[Optional[str]]
metadata_: Mapped[dict[str, Any]] = mapped_column("metadata")
- annotator_kind: Mapped[str] = mapped_column(
- CheckConstraint("annotator_kind IN ('LLM', 'HUMAN')", name="valid_annotator_kind"),
+ annotator_kind: Mapped[Literal["LLM", "CODE", "HUMAN"]] = mapped_column(
+ CheckConstraint("annotator_kind IN ('LLM', 'CODE', 'HUMAN')", name="valid_annotator_kind"),
)
created_at: Mapped[datetime] = mapped_column(UtcTimeStamp, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(
UtcTimeStamp, server_default=func.now(), onupdate=func.now()
)
+ identifier: Mapped[str] = mapped_column(
+ String,
+ nullable=False,
+ )
+ source: Mapped[Literal["API", "APP"]] = mapped_column(
+ CheckConstraint("source IN ('API', 'APP')", name="valid_source"),
+ )
+ user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id", ondelete="SET NULL"))
+
+ span: Mapped["Span"] = relationship(back_populates="span_annotations")
+ user: Mapped[Optional["User"]] = relationship("User")
+
__table_args__ = (
UniqueConstraint(
"name",
"span_rowid",
+ "identifier",
),
)
@@ -758,17 +854,27 @@ class TraceAnnotation(Base):
score: Mapped[Optional[float]] = mapped_column(Float, index=True)
explanation: Mapped[Optional[str]]
metadata_: Mapped[dict[str, Any]] = mapped_column("metadata")
- annotator_kind: Mapped[str] = mapped_column(
- CheckConstraint("annotator_kind IN ('LLM', 'HUMAN')", name="valid_annotator_kind"),
+ annotator_kind: Mapped[Literal["LLM", "CODE", "HUMAN"]] = mapped_column(
+ CheckConstraint("annotator_kind IN ('LLM', 'CODE', 'HUMAN')", name="valid_annotator_kind"),
)
created_at: Mapped[datetime] = mapped_column(UtcTimeStamp, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(
UtcTimeStamp, server_default=func.now(), onupdate=func.now()
)
+ identifier: Mapped[str] = mapped_column(
+ String,
+ nullable=False,
+ )
+ source: Mapped[Literal["API", "APP"]] = mapped_column(
+ CheckConstraint("source IN ('API', 'APP')", name="valid_source"),
+ )
+ user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id", ondelete="SET NULL"))
+
__table_args__ = (
UniqueConstraint(
"name",
"trace_rowid",
+ "identifier",
),
)
@@ -785,13 +891,22 @@ class DocumentAnnotation(Base):
score: Mapped[Optional[float]] = mapped_column(Float, index=True)
explanation: Mapped[Optional[str]]
metadata_: Mapped[dict[str, Any]] = mapped_column("metadata")
- annotator_kind: Mapped[str] = mapped_column(
- CheckConstraint("annotator_kind IN ('LLM', 'HUMAN')", name="valid_annotator_kind"),
+ annotator_kind: Mapped[Literal["LLM", "CODE", "HUMAN"]] = mapped_column(
+ CheckConstraint("annotator_kind IN ('LLM', 'CODE', 'HUMAN')", name="valid_annotator_kind"),
)
created_at: Mapped[datetime] = mapped_column(UtcTimeStamp, server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(
UtcTimeStamp, server_default=func.now(), onupdate=func.now()
)
+ identifier: Mapped[str] = mapped_column(
+ String,
+ nullable=False,
+ )
+ source: Mapped[Literal["API", "APP"]] = mapped_column(
+ CheckConstraint("source IN ('API', 'APP')", name="valid_source"),
+ )
+ user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("users.id", ondelete="SET NULL"))
+
span: Mapped["Span"] = relationship(back_populates="document_annotations")
__table_args__ = (
@@ -799,6 +914,7 @@ class DocumentAnnotation(Base):
"name",
"span_rowid",
"document_position",
+ "identifier",
),
)
@@ -1301,3 +1417,25 @@ class PromptVersionTag(Base):
)
__table_args__ = (UniqueConstraint("name", "prompt_id"),)
+
+
+class AnnotationConfig(Base):
+ __tablename__ = "annotation_configs"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ name: Mapped[str] = mapped_column(String, nullable=False, unique=True)
+ config: Mapped[AnnotationConfigType] = mapped_column(_AnnotationConfig, nullable=False)
+
+
+class ProjectAnnotationConfig(Base):
+ __tablename__ = "project_annotation_configs"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ project_id: Mapped[int] = mapped_column(
+ ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True
+ )
+ annotation_config_id: Mapped[int] = mapped_column(
+ ForeignKey("annotation_configs.id", ondelete="CASCADE"), nullable=False, index=True
+ )
+
+ __table_args__ = (UniqueConstraint("project_id", "annotation_config_id"),)
diff --git a/src/phoenix/db/types/annotation_configs.py b/src/phoenix/db/types/annotation_configs.py
new file mode 100644
index 0000000000..525b54561e
--- /dev/null
+++ b/src/phoenix/db/types/annotation_configs.py
@@ -0,0 +1,97 @@
+from enum import Enum
+from typing import Annotated, Literal, Optional, Union
+
+from pydantic import AfterValidator, Field, RootModel, model_validator
+from typing_extensions import Self, TypeAlias
+
+from .db_models import DBBaseModel
+
+
+class AnnotationType(Enum):
+ CATEGORICAL = "CATEGORICAL"
+ CONTINUOUS = "CONTINUOUS"
+ FREEFORM = "FREEFORM"
+
+
+class OptimizationDirection(Enum):
+ MINIMIZE = "MINIMIZE"
+ MAXIMIZE = "MAXIMIZE"
+ NONE = "NONE"
+
+
+class _BaseAnnotationConfig(DBBaseModel):
+ description: Optional[str] = None
+
+
+def _categorical_value_label_is_non_empty_string(label: str) -> str:
+ if not label:
+ raise ValueError("Label must be non-empty")
+ return label
+
+
+class CategoricalAnnotationValue(DBBaseModel):
+ label: Annotated[str, AfterValidator(_categorical_value_label_is_non_empty_string)]
+ score: Optional[float] = None
+
+
+def _categorical_values_are_non_empty_list(
+ values: list[CategoricalAnnotationValue],
+) -> list[CategoricalAnnotationValue]:
+ if not values:
+ raise ValueError("Values must be non-empty")
+ return values
+
+
+def _categorical_values_have_unique_labels(
+ values: list[CategoricalAnnotationValue],
+) -> list[CategoricalAnnotationValue]:
+ labels = set()
+ for value in values:
+ label = value.label
+ if label in labels:
+ raise ValueError(
+ f'Values for categorical annotation config has duplicate label: "{label}"'
+ )
+ labels.add(label)
+ return values
+
+
+class CategoricalAnnotationConfig(_BaseAnnotationConfig):
+ type: Literal[AnnotationType.CATEGORICAL.value] # type: ignore[name-defined]
+ optimization_direction: OptimizationDirection
+ values: Annotated[
+ list[CategoricalAnnotationValue],
+ AfterValidator(_categorical_values_are_non_empty_list),
+ AfterValidator(_categorical_values_have_unique_labels),
+ ]
+
+
+class ContinuousAnnotationConfig(_BaseAnnotationConfig):
+ type: Literal[AnnotationType.CONTINUOUS.value] # type: ignore[name-defined]
+ optimization_direction: OptimizationDirection
+ lower_bound: Optional[float] = None
+ upper_bound: Optional[float] = None
+
+ @model_validator(mode="after")
+ def check_bounds(self) -> Self:
+ if (
+ self.lower_bound is not None
+ and self.upper_bound is not None
+ and self.lower_bound >= self.upper_bound
+ ):
+ raise ValueError("Lower bound must be strictly less than upper bound")
+ return self
+
+
+class FreeformAnnotationConfig(_BaseAnnotationConfig):
+ type: Literal[AnnotationType.FREEFORM.value] # type: ignore[name-defined]
+
+
+AnnotationConfigType: TypeAlias = Annotated[
+ Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig],
+ Field(..., discriminator="type"),
+]
+
+
+class AnnotationConfig(RootModel[AnnotationConfigType]):
+ root: AnnotationConfigType
diff --git a/src/phoenix/db/types/db_models.py b/src/phoenix/db/types/db_models.py
new file mode 100644
index 0000000000..7a09d9d76b
--- /dev/null
+++ b/src/phoenix/db/types/db_models.py
@@ -0,0 +1,41 @@
+from typing import Any
+
+from pydantic import BaseModel, ConfigDict
+
+
+class DBBaseModel(BaseModel):
+ """
+ A base Pydantic model suitable for use with JSON columns in the database.
+ """
+
+ model_config = ConfigDict(
+ extra="forbid", # disallow extra attributes
+ use_enum_values=True,
+ validate_assignment=True,
+ )
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ kwargs = {k: v for k, v in kwargs.items() if v is not UNDEFINED}
+ super().__init__(*args, **kwargs)
+
+ def model_dump(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
+ return super().model_dump(*args, exclude_unset=True, by_alias=True, **kwargs)
+
+
+class Undefined:
+ """
+ A singleton class that represents an unset or undefined value. Needed since Pydantic
+ can't natively distinguish between an undefined value and a value that is set to
+ None.
+ """
+
+ def __new__(cls) -> Any:
+ if not hasattr(cls, "_instance"):
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __bool__(self) -> bool:
+ return False
+
+
+UNDEFINED: Any = Undefined()
diff --git a/src/phoenix/db/types/trace_retention.py b/src/phoenix/db/types/trace_retention.py
new file mode 100644
index 0000000000..ffef985bb1
--- /dev/null
+++ b/src/phoenix/db/types/trace_retention.py
@@ -0,0 +1,267 @@
+from __future__ import annotations
+
+from datetime import datetime, timedelta, timezone
+from typing import Annotated, Iterable, Literal, Optional, Union
+
+import sqlalchemy as sa
+from pydantic import AfterValidator, BaseModel, Field, RootModel
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from phoenix.utilities import hour_of_week
+
+
+class _MaxDays(BaseModel):
+ max_days: Annotated[float, Field(ge=0)]
+
+ @property
+ def max_days_filter(self) -> sa.ColumnElement[bool]:
+ if self.max_days <= 0:
+ return sa.literal(False)
+ from phoenix.db.models import Trace
+
+ return Trace.start_time < datetime.now(timezone.utc) - timedelta(days=self.max_days)
+
+
+class _MaxCount(BaseModel):
+ max_count: Annotated[int, Field(ge=0)]
+
+ @property
+ def max_count_filter(self) -> sa.ColumnElement[bool]:
+ if self.max_count <= 0:
+ return sa.literal(False)
+ from phoenix.db.models import Trace
+
+ return Trace.start_time < (
+ sa.select(Trace.start_time)
+ .order_by(Trace.start_time.desc())
+ .offset(self.max_count - 1)
+ .limit(1)
+ .scalar_subquery()
+ )
+
+
+class MaxDaysRule(_MaxDays, BaseModel):
+ type: Literal["max_days"] = "max_days"
+
+ def __bool__(self) -> bool:
+ return self.max_days > 0
+
+ async def delete_traces(
+ self,
+ session: AsyncSession,
+ project_rowids: Union[Iterable[int], sa.ScalarSelect[int]],
+ ) -> set[int]:
+ if self.max_days <= 0:
+ return set()
+ from phoenix.db.models import Trace
+
+ stmt = (
+ sa.delete(Trace)
+ .where(Trace.project_rowid.in_(project_rowids))
+ .where(self.max_days_filter)
+ .returning(Trace.project_rowid)
+ )
+ return set(await session.scalars(stmt))
+
+
+class MaxCountRule(_MaxCount, BaseModel):
+ type: Literal["max_count"] = "max_count"
+
+ def __bool__(self) -> bool:
+ return self.max_count > 0
+
+ async def delete_traces(
+ self,
+ session: AsyncSession,
+ project_rowids: Union[Iterable[int], sa.ScalarSelect[int]],
+ ) -> set[int]:
+ if self.max_count <= 0:
+ return set()
+ from phoenix.db.models import Trace
+
+ stmt = (
+ sa.delete(Trace)
+ .where(Trace.project_rowid.in_(project_rowids))
+ .where(self.max_count_filter)
+ .returning(Trace.project_rowid)
+ )
+ return set(await session.scalars(stmt))
+
+
+class MaxDaysOrCountRule(_MaxDays, _MaxCount, BaseModel):
+ type: Literal["max_days_or_count"] = "max_days_or_count"
+
+ def __bool__(self) -> bool:
+ return self.max_days > 0 or self.max_count > 0
+
+ async def delete_traces(
+ self,
+ session: AsyncSession,
+ project_rowids: Union[Iterable[int], sa.ScalarSelect[int]],
+ ) -> set[int]:
+ if self.max_days <= 0 and self.max_count <= 0:
+ return set()
+ from phoenix.db.models import Trace
+
+ stmt = (
+ sa.delete(Trace)
+ .where(Trace.project_rowid.in_(project_rowids))
+ .where(sa.or_(self.max_days_filter, self.max_count_filter))
+ .returning(Trace.project_rowid)
+ )
+ return set(await session.scalars(stmt))
+
+
+class TraceRetentionRule(RootModel[Union[MaxDaysRule, MaxCountRule, MaxDaysOrCountRule]]):
+ root: Annotated[
+ Union[MaxDaysRule, MaxCountRule, MaxDaysOrCountRule], Field(discriminator="type")
+ ]
+
+ def __bool__(self) -> bool:
+ return bool(self.root)
+
+ async def delete_traces(
+ self,
+ session: AsyncSession,
+ project_rowids: Union[Iterable[int], sa.ScalarSelect[int]],
+ ) -> set[int]:
+ return await self.root.delete_traces(session, project_rowids)
+
+
+def _time_of_next_run(
+ cron_expression: str,
+ after: Optional[datetime] = None,
+) -> datetime:
+ """
+ Parse a cron expression and calculate the UTC datetime of the next run.
+ Only processes hour, and day of week fields; day-of-month and
+ month fields must be '*'; minute field must be 0.
+
+ Args:
+ cron_expression (str): Standard cron expression with 5 fields:
+ minute hour day-of-month month day-of-week
+ (minute must be '0'; day-of-month and month must be '*')
+ after: Optional[datetime]: The datetime to start searching from. If None,
+ the current time is used. Must be timezone-aware.
+
+ Returns:
+ datetime: The datetime of the next run. Timezone is UTC.
+
+ Raises:
+ ValueError: If the expression has non-wildcard values for day-of-month or month, if the
+ minute field is not '0', or if no match is found within the next 7 days (168 hours).
+ """
+ fields: list[str] = cron_expression.strip().split()
+ if len(fields) != 5:
+ raise ValueError(
+ "Invalid cron expression. Expected 5 fields "
+ "(minute hour day-of-month month day-of-week)."
+ )
+ if fields[0] != "0":
+ raise ValueError("Invalid cron expression. Minute field must be '0'.")
+ if fields[2] != "*" or fields[3] != "*":
+ raise ValueError("Invalid cron expression. Day-of-month and month fields must be '*'.")
+ hours: set[int] = _parse_field(fields[1], 0, 23)
+ # Parse days of week (0-6, where 0 is Sunday)
+ days_of_week: set[int] = _parse_field(fields[4], 0, 6)
+ # Convert to Python's weekday format (0-6, where 0 is Monday)
+ # Sunday (0 in cron) becomes 6 in Python's weekday()
+ python_days_of_week = {(day_of_week + 6) % 7 for day_of_week in days_of_week}
+ t = after.replace(tzinfo=timezone.utc) if after else datetime.now(timezone.utc)
+ t = t.replace(minute=0, second=0, microsecond=0)
+ for _ in range(168): # Check up to 7 days (168 hours)
+ t += timedelta(hours=1)
+ if t.hour in hours and t.weekday() in python_days_of_week:
+ return t
+ raise ValueError("No matching execution time found within the next 7 days.")
+
+
+class TraceRetentionCronExpression(RootModel[str]):
+ root: Annotated[str, AfterValidator(lambda x: (_time_of_next_run(x), x)[1])]
+
+ def get_hour_of_prev_run(self) -> int:
+ """
+ Calculate the hour of the previous run before now.
+
+ Returns:
+ int: The hour of the previous run (0-167), where 0 is midnight Sunday UTC.
+ """
+ after = datetime.now(timezone.utc) - timedelta(hours=1)
+ return hour_of_week(_time_of_next_run(self.root, after))
+
+
+def _parse_field(field: str, min_val: int, max_val: int) -> set[int]:
+ """
+ Parse a cron field and return the set of matching values.
+
+ Args:
+ field (str): The cron field to parse
+ min_val (int): Minimum allowed value for this field
+ max_val (int): Maximum allowed value for this field
+
+ Returns:
+ set[int]: Set of all valid values represented by the field expression
+
+ Raises:
+ ValueError: If the field contains invalid values or formats
+ """
+ if field == "*":
+ return set(range(min_val, max_val + 1))
+ values: set[int] = set()
+ for part in field.split(","):
+ if "/" in part:
+ # Handle steps
+ range_part, step_str = part.split("/")
+ try:
+ step = int(step_str)
+ except ValueError:
+ raise ValueError(f"Invalid step value: {step_str}")
+ if step <= 0:
+ raise ValueError(f"Step value must be positive: {step}")
+ if range_part == "*":
+ start, end = min_val, max_val
+ elif "-" in range_part:
+ try:
+ start_str, end_str = range_part.split("-")
+ start, end = int(start_str), int(end_str)
+ except ValueError:
+ raise ValueError(f"Invalid range format: {range_part}")
+ if start < min_val or end > max_val:
+ raise ValueError(
+ f"Range {start}-{end} outside allowed values ({min_val}-{max_val})"
+ )
+ if start > end:
+ raise ValueError(f"Invalid range: {start}-{end} (start > end)")
+ else:
+ try:
+ start = int(range_part)
+ except ValueError:
+ raise ValueError(f"Invalid value: {range_part}")
+ if start < min_val or start > max_val:
+ raise ValueError(f"Value {start} out of range ({min_val}-{max_val})")
+ end = max_val
+ values.update(range(start, end + 1, step))
+ elif "-" in part:
+ # Handle ranges
+ try:
+ start_str, end_str = part.split("-")
+ start, end = int(start_str), int(end_str)
+ except ValueError:
+ raise ValueError(f"Invalid range format: {part}")
+ if start < min_val or end > max_val:
+ raise ValueError(
+ f"Range {start}-{end} outside allowed values ({min_val}-{max_val})"
+ )
+ if start > end:
+ raise ValueError(f"Invalid range: {start}-{end} (start > end)")
+ values.update(range(start, end + 1))
+ else:
+ # Handle single values
+ try:
+ value = int(part)
+ except ValueError:
+ raise ValueError(f"Invalid value: {part}")
+ if value < min_val or value > max_val:
+ raise ValueError(f"Value {value} out of range ({min_val}-{max_val})")
+ values.add(value)
+ return values
diff --git a/src/phoenix/server/api/auth.py b/src/phoenix/server/api/auth.py
index 0c816dcbed..d4a68464fe 100644
--- a/src/phoenix/server/api/auth.py
+++ b/src/phoenix/server/api/auth.py
@@ -42,3 +42,12 @@ def has_permission(self, source: Any, info: Info, **kwargs: Any) -> bool:
if not info.context.auth_enabled:
return False
return isinstance((user := info.context.user), PhoenixUser) and user.is_admin
+
+
+class IsAdminIfAuthEnabled(Authorization):
+ message = MSG_ADMIN_ONLY
+
+ def has_permission(self, source: Any, info: Info, **kwargs: Any) -> bool:
+ if not info.context.auth_enabled:
+ return True
+ return isinstance((user := info.context.user), PhoenixUser) and user.is_admin
diff --git a/src/phoenix/server/api/context.py b/src/phoenix/server/api/context.py
index 42306289ff..545c1f2243 100644
--- a/src/phoenix/server/api/context.py
+++ b/src/phoenix/server/api/context.py
@@ -32,6 +32,7 @@
NumChildSpansDataLoader,
NumSpansPerTraceDataLoader,
ProjectByNameDataLoader,
+ ProjectIdsByTraceRetentionPolicyIdDataLoader,
PromptVersionSequenceNumberDataLoader,
RecordCountDataLoader,
SessionIODataLoader,
@@ -47,6 +48,7 @@
TableFieldsDataLoader,
TokenCountDataLoader,
TraceByTraceIdsDataLoader,
+ TraceRetentionPolicyIdByProjectIdDataLoader,
TraceRootSpansDataLoader,
UserRolesDataLoader,
UsersDataLoader,
@@ -82,6 +84,7 @@ class DataLoaders:
num_child_spans: NumChildSpansDataLoader
num_spans_per_trace: NumSpansPerTraceDataLoader
project_fields: TableFieldsDataLoader
+ projects_by_trace_retention_policy_id: ProjectIdsByTraceRetentionPolicyIdDataLoader
prompt_version_sequence_number: PromptVersionSequenceNumberDataLoader
record_counts: RecordCountDataLoader
session_first_inputs: SessionIODataLoader
@@ -99,6 +102,8 @@ class DataLoaders:
token_counts: TokenCountDataLoader
trace_by_trace_ids: TraceByTraceIdsDataLoader
trace_fields: TableFieldsDataLoader
+ trace_retention_policy_id_by_project_id: TraceRetentionPolicyIdByProjectIdDataLoader
+ project_trace_retention_policy_fields: TableFieldsDataLoader
trace_root_spans: TraceRootSpansDataLoader
project_by_name: ProjectByNameDataLoader
users: UsersDataLoader
diff --git a/src/phoenix/server/api/dataloaders/__init__.py b/src/phoenix/server/api/dataloaders/__init__.py
index a6a14757b5..fabcd38b03 100644
--- a/src/phoenix/server/api/dataloaders/__init__.py
+++ b/src/phoenix/server/api/dataloaders/__init__.py
@@ -20,6 +20,7 @@
from .num_child_spans import NumChildSpansDataLoader
from .num_spans_per_trace import NumSpansPerTraceDataLoader
from .project_by_name import ProjectByNameDataLoader
+from .project_ids_by_trace_retention_policy_id import ProjectIdsByTraceRetentionPolicyIdDataLoader
from .prompt_version_sequence_number import PromptVersionSequenceNumberDataLoader
from .record_counts import RecordCountCache, RecordCountDataLoader
from .session_io import SessionIODataLoader
@@ -35,6 +36,7 @@
from .table_fields import TableFieldsDataLoader
from .token_counts import TokenCountCache, TokenCountDataLoader
from .trace_by_trace_ids import TraceByTraceIdsDataLoader
+from .trace_retention_policy_id_by_project_id import TraceRetentionPolicyIdByProjectIdDataLoader
from .trace_root_spans import TraceRootSpansDataLoader
from .user_roles import UserRolesDataLoader
from .users import UsersDataLoader
@@ -57,6 +59,7 @@
"MinStartOrMaxEndTimeDataLoader",
"NumChildSpansDataLoader",
"NumSpansPerTraceDataLoader",
+ "ProjectIdsByTraceRetentionPolicyIdDataLoader",
"PromptVersionSequenceNumberDataLoader",
"RecordCountDataLoader",
"SessionIODataLoader",
@@ -71,6 +74,7 @@
"TableFieldsDataLoader",
"TokenCountDataLoader",
"TraceByTraceIdsDataLoader",
+ "TraceRetentionPolicyIdByProjectIdDataLoader",
"TraceRootSpansDataLoader",
"ProjectByNameDataLoader",
"SpanAnnotationsDataLoader",
diff --git a/src/phoenix/server/api/dataloaders/annotation_summaries.py b/src/phoenix/server/api/dataloaders/annotation_summaries.py
index b5bbc2406a..9a20c77f26 100644
--- a/src/phoenix/server/api/dataloaders/annotation_summaries.py
+++ b/src/phoenix/server/api/dataloaders/annotation_summaries.py
@@ -1,11 +1,11 @@
from collections import defaultdict
from datetime import datetime
-from typing import Any, Literal, Optional
+from typing import Any, Literal, Optional, Type, Union, cast
import pandas as pd
from aioitertools.itertools import groupby
from cachetools import LFUCache, TTLCache
-from sqlalchemy import Select, func, or_, select
+from sqlalchemy import Select, and_, case, distinct, func, or_, select
from strawberry.dataloader import AbstractCache, DataLoader
from typing_extensions import TypeAlias, assert_never
@@ -92,7 +92,7 @@ async def _load_fn(self, keys: list[Key]) -> list[Result]:
async with self._db() as session:
data = await session.stream(stmt)
async for annotation_name, group in groupby(data, lambda row: row.name):
- summary = AnnotationSummary(pd.DataFrame(group))
+ summary = AnnotationSummary(name=annotation_name, df=pd.DataFrame(group))
for position in params[annotation_name]:
results[position] = summary
return results
@@ -103,23 +103,64 @@ def _get_stmt(
*annotation_names: Param,
) -> Select[Any]:
kind, project_rowid, (start_time, end_time), filter_condition = segment
- stmt = select()
+
+ annotation_model: Union[Type[models.SpanAnnotation], Type[models.TraceAnnotation]]
+ entity_model: Union[Type[models.Span], Type[models.Trace]]
+ entity_join_model: Optional[Type[models.Base]]
+ entity_id_column: Any
+
if kind == "span":
- msa = models.SpanAnnotation
- name_column, label_column, score_column = msa.name, msa.label, msa.score
- time_column = models.Span.start_time
- stmt = stmt.join(models.Span).join_from(models.Span, models.Trace)
- if filter_condition:
- sf = SpanFilter(filter_condition)
- stmt = sf(stmt)
+ annotation_model = models.SpanAnnotation
+ entity_model = models.Span
+ entity_join_model = models.Trace
+ entity_id_column = models.Span.id.label("entity_id")
elif kind == "trace":
- mta = models.TraceAnnotation
- name_column, label_column, score_column = mta.name, mta.label, mta.score
- time_column = models.Trace.start_time
- stmt = stmt.join(models.Trace)
+ annotation_model = models.TraceAnnotation
+ entity_model = models.Trace
+ entity_join_model = None
+ entity_id_column = models.Trace.id.label("entity_id")
else:
assert_never(kind)
- stmt = stmt.add_columns(
+
+ name_column = annotation_model.name
+ label_column = annotation_model.label
+ score_column = annotation_model.score
+ time_column = entity_model.start_time
+
+ # First query: count distinct entities per annotation name
+ # This is used later to calculate accurate fractions that account for entities without labels
+ entity_count_query = select(
+ name_column, func.count(distinct(entity_id_column)).label("entity_count")
+ )
+
+ if kind == "span":
+ entity_count_query = entity_count_query.join(cast(Type[models.Span], entity_model))
+ entity_count_query = entity_count_query.join_from(
+ cast(Type[models.Span], entity_model), cast(Type[models.Trace], entity_join_model)
+ )
+ entity_count_query = entity_count_query.where(models.Trace.project_rowid == project_rowid)
+ elif kind == "trace":
+ entity_count_query = entity_count_query.join(cast(Type[models.Trace], entity_model))
+ entity_count_query = entity_count_query.where(
+ cast(Type[models.Trace], entity_model).project_rowid == project_rowid
+ )
+
+ entity_count_query = entity_count_query.where(
+ or_(score_column.is_not(None), label_column.is_not(None))
+ )
+ entity_count_query = entity_count_query.where(name_column.in_(annotation_names))
+
+ if start_time:
+ entity_count_query = entity_count_query.where(start_time <= time_column)
+ if end_time:
+ entity_count_query = entity_count_query.where(time_column < end_time)
+
+ entity_count_query = entity_count_query.group_by(name_column)
+ entity_count_subquery = entity_count_query.subquery()
+
+ # Main query: gets raw annotation data with counts per (span/trace)+name+label
+ base_stmt = select(
+ entity_id_column,
name_column,
label_column,
func.count().label("record_count"),
@@ -127,13 +168,151 @@ def _get_stmt(
func.count(score_column).label("score_count"),
func.sum(score_column).label("score_sum"),
)
- stmt = stmt.group_by(name_column, label_column)
- stmt = stmt.order_by(name_column, label_column)
- stmt = stmt.where(models.Trace.project_rowid == project_rowid)
- stmt = stmt.where(or_(score_column.is_not(None), label_column.is_not(None)))
- stmt = stmt.where(name_column.in_(annotation_names))
+
+ if kind == "span":
+ base_stmt = base_stmt.join(cast(Type[models.Span], entity_model))
+ base_stmt = base_stmt.join_from(
+ cast(Type[models.Span], entity_model), cast(Type[models.Trace], entity_join_model)
+ )
+ base_stmt = base_stmt.where(models.Trace.project_rowid == project_rowid)
+ if filter_condition:
+ sf = SpanFilter(filter_condition)
+ base_stmt = sf(base_stmt)
+ elif kind == "trace":
+ base_stmt = base_stmt.join(cast(Type[models.Trace], entity_model))
+ base_stmt = base_stmt.where(
+ cast(Type[models.Trace], entity_model).project_rowid == project_rowid
+ )
+ else:
+ assert_never(kind)
+
+ base_stmt = base_stmt.where(or_(score_column.is_not(None), label_column.is_not(None)))
+ base_stmt = base_stmt.where(name_column.in_(annotation_names))
+
if start_time:
- stmt = stmt.where(start_time <= time_column)
+ base_stmt = base_stmt.where(start_time <= time_column)
if end_time:
- stmt = stmt.where(time_column < end_time)
- return stmt
+ base_stmt = base_stmt.where(time_column < end_time)
+
+ # Group to get one row per (span/trace)+name+label combination
+ base_stmt = base_stmt.group_by(entity_id_column, name_column, label_column)
+
+ base_subquery = base_stmt.subquery()
+
+ # Calculate total counts per (span/trace)+name for computing fractions
+ entity_totals = (
+ select(
+ base_subquery.c.entity_id,
+ base_subquery.c.name,
+ func.sum(base_subquery.c.label_count).label("total_label_count"),
+ func.sum(base_subquery.c.score_count).label("total_score_count"),
+ func.sum(base_subquery.c.score_sum).label("entity_score_sum"),
+ )
+ .group_by(base_subquery.c.entity_id, base_subquery.c.name)
+ .subquery()
+ )
+
+ per_entity_fractions = (
+ select(
+ base_subquery.c.entity_id,
+ base_subquery.c.name,
+ base_subquery.c.label,
+ base_subquery.c.record_count,
+ base_subquery.c.label_count,
+ base_subquery.c.score_count,
+ base_subquery.c.score_sum,
+ # Calculate label fraction, avoiding division by zero when total_label_count is 0
+ case(
+ (
+ entity_totals.c.total_label_count > 0,
+ base_subquery.c.label_count * 1.0 / entity_totals.c.total_label_count,
+ ),
+ else_=None,
+ ).label("label_fraction"),
+ # Calculate average score for the entity (if there are any scores)
+ case(
+ (
+ entity_totals.c.total_score_count > 0,
+ entity_totals.c.entity_score_sum * 1.0 / entity_totals.c.total_score_count,
+ ),
+ else_=None,
+ ).label("entity_avg_score"),
+ )
+ .join(
+ entity_totals,
+ and_(
+ base_subquery.c.entity_id == entity_totals.c.entity_id,
+ base_subquery.c.name == entity_totals.c.name,
+ ),
+ )
+ .subquery()
+ )
+
+ # Aggregate metrics across (spans/traces) for each name+label combination.
+ label_entity_metrics = (
+ select(
+ per_entity_fractions.c.name,
+ per_entity_fractions.c.label,
+ func.count(distinct(per_entity_fractions.c.entity_id)).label("entities_with_label"),
+ func.sum(per_entity_fractions.c.label_count).label("total_label_count"),
+ func.sum(per_entity_fractions.c.score_count).label("total_score_count"),
+ func.sum(per_entity_fractions.c.score_sum).label("total_score_sum"),
+ # Average of label fractions for entities that have this label
+ func.avg(per_entity_fractions.c.label_fraction).label("avg_label_fraction_present"),
+ # Average of per-entity average scores (but we handle overall aggregation separately)
+ )
+ .group_by(per_entity_fractions.c.name, per_entity_fractions.c.label)
+ .subquery()
+ )
+
+ # Compute distinct per-entity average scores to ensure each entity counts only once.
+ distinct_entity_scores = (
+ select(
+ per_entity_fractions.c.entity_id,
+ per_entity_fractions.c.name,
+ per_entity_fractions.c.entity_avg_score,
+ )
+ .distinct()
+ .subquery()
+ )
+
+ overall_score_aggregates = (
+ select(
+ distinct_entity_scores.c.name,
+ func.avg(distinct_entity_scores.c.entity_avg_score).label("overall_avg_score"),
+ )
+ .group_by(distinct_entity_scores.c.name)
+ .subquery()
+ )
+
+ # Final result: adjust label fractions by the proportion of entities reporting this label
+ # and include the overall average score per annotation name.
+ final_stmt = (
+ select(
+ label_entity_metrics.c.name,
+ label_entity_metrics.c.label,
+ # Adjust label fraction, guarding against division by zero in entity_count
+ case(
+ (
+ entity_count_subquery.c.entity_count > 0,
+ label_entity_metrics.c.avg_label_fraction_present
+ * label_entity_metrics.c.entities_with_label
+ / entity_count_subquery.c.entity_count,
+ ),
+ else_=None,
+ ).label("avg_label_fraction"),
+ overall_score_aggregates.c.overall_avg_score.label("avg_score"), # same for all labels
+ label_entity_metrics.c.total_label_count.label("label_count"),
+ label_entity_metrics.c.total_score_count.label("score_count"),
+ label_entity_metrics.c.total_score_sum.label("score_sum"),
+ label_entity_metrics.c.entities_with_label.label("record_count"),
+ )
+ .join(entity_count_subquery, label_entity_metrics.c.name == entity_count_subquery.c.name)
+ .join(
+ overall_score_aggregates,
+ label_entity_metrics.c.name == overall_score_aggregates.c.name,
+ )
+ .order_by(label_entity_metrics.c.name, label_entity_metrics.c.label)
+ )
+
+ return final_stmt
diff --git a/src/phoenix/server/api/dataloaders/project_ids_by_trace_retention_policy_id.py b/src/phoenix/server/api/dataloaders/project_ids_by_trace_retention_policy_id.py
new file mode 100644
index 0000000000..0d6de03b73
--- /dev/null
+++ b/src/phoenix/server/api/dataloaders/project_ids_by_trace_retention_policy_id.py
@@ -0,0 +1,42 @@
+from collections import defaultdict
+
+from sqlalchemy import or_, select
+from strawberry.dataloader import DataLoader
+from typing_extensions import TypeAlias
+
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+from phoenix.db.models import Project
+from phoenix.server.types import DbSessionFactory
+
+PolicyRowId: TypeAlias = int
+ProjectRowId: TypeAlias = int
+
+Key: TypeAlias = PolicyRowId
+Result: TypeAlias = list[ProjectRowId]
+
+
+class ProjectIdsByTraceRetentionPolicyIdDataLoader(DataLoader[Key, Result]):
+ def __init__(self, db: DbSessionFactory) -> None:
+ super().__init__(load_fn=self._load_fn)
+ self._db = db
+
+ async def _load_fn(self, keys: list[Key]) -> list[Result]:
+ ids = set(keys)
+ stmt = select(Project.trace_retention_policy_id, Project.id)
+ if DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID in ids:
+ stmt = stmt.where(
+ or_(
+ Project.trace_retention_policy_id.in_(ids),
+ Project.trace_retention_policy_id.is_(None),
+ )
+ )
+ else:
+ stmt = stmt.where(Project.trace_retention_policy_id.in_(ids))
+ projects: defaultdict[Key, Result] = defaultdict(list)
+ async with self._db() as session:
+ data = await session.stream(stmt)
+ async for policy_rowid, project_rowid in data:
+ projects[policy_rowid or DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID].append(
+ project_rowid
+ )
+ return [projects.get(project_name, []).copy() for project_name in keys]
diff --git a/src/phoenix/server/api/dataloaders/trace_retention_policy_id_by_project_id.py b/src/phoenix/server/api/dataloaders/trace_retention_policy_id_by_project_id.py
new file mode 100644
index 0000000000..312315278e
--- /dev/null
+++ b/src/phoenix/server/api/dataloaders/trace_retention_policy_id_by_project_id.py
@@ -0,0 +1,34 @@
+from sqlalchemy import select
+from strawberry.dataloader import DataLoader
+from typing_extensions import TypeAlias
+
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+from phoenix.db.models import Project
+from phoenix.server.types import DbSessionFactory
+
+PolicyRowId: TypeAlias = int
+ProjectRowId: TypeAlias = int
+
+Key: TypeAlias = ProjectRowId
+Result: TypeAlias = PolicyRowId
+
+
+class TraceRetentionPolicyIdByProjectIdDataLoader(DataLoader[Key, Result]):
+ def __init__(self, db: DbSessionFactory) -> None:
+ super().__init__(load_fn=self._load_fn)
+ self._db = db
+
+ async def _load_fn(self, keys: list[Key]) -> list[Result]:
+ ids = set(keys)
+ stmt = (
+ select(Project.id, Project.trace_retention_policy_id)
+ .where(Project.trace_retention_policy_id.isnot(None))
+ .where(Project.id.in_(ids))
+ )
+ async with self._db() as session:
+ data = await session.execute(stmt)
+ result = {project_rowid: policy_id for project_rowid, policy_id in data.all()}
+ return [
+ result.get(project_rowid, DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID)
+ for project_rowid in keys
+ ]
diff --git a/src/phoenix/server/api/helpers/prompts/models.py b/src/phoenix/server/api/helpers/prompts/models.py
index 622867ae44..0842231751 100644
--- a/src/phoenix/server/api/helpers/prompts/models.py
+++ b/src/phoenix/server/api/helpers/prompts/models.py
@@ -3,9 +3,10 @@
from enum import Enum
from typing import Any, Literal, Mapping, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, model_validator
+from pydantic import Field, RootModel, model_validator
from typing_extensions import Annotated, Self, TypeAlias, TypeGuard, assert_never
+from phoenix.db.types.db_models import UNDEFINED, DBBaseModel
from phoenix.db.types.model_provider import ModelProvider
from phoenix.server.api.helpers.prompts.conversions.anthropic import AnthropicToolChoiceConversion
from phoenix.server.api.helpers.prompts.conversions.openai import OpenAIToolChoiceConversion
@@ -13,25 +14,6 @@
JSONSerializable = Union[None, bool, int, float, str, dict[str, Any], list[Any]]
-class Undefined:
- """
- A singleton class that represents an unset or undefined value. Needed since Pydantic
- can't natively distinguish between an undefined value and a value that is set to
- None.
- """
-
- def __new__(cls) -> Any:
- if not hasattr(cls, "_instance"):
- cls._instance = super().__new__(cls)
- return cls._instance
-
- def __bool__(self) -> bool:
- return False
-
-
-UNDEFINED: Any = Undefined()
-
-
class PromptTemplateType(str, Enum):
STRING = "STR"
CHAT = "CHAT"
@@ -50,33 +32,18 @@ class PromptTemplateFormat(str, Enum):
NONE = "NONE"
-class PromptModel(BaseModel):
- model_config = ConfigDict(
- extra="forbid", # disallow extra attributes
- use_enum_values=True,
- validate_assignment=True,
- )
-
- def __init__(self, *args: Any, **kwargs: Any) -> None:
- kwargs = {k: v for k, v in kwargs.items() if v is not UNDEFINED}
- super().__init__(*args, **kwargs)
-
- def model_dump(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
- return super().model_dump(*args, exclude_unset=True, by_alias=True, **kwargs)
-
-
-class TextContentPart(PromptModel):
+class TextContentPart(DBBaseModel):
type: Literal["text"]
text: str
-class ToolCallFunction(PromptModel):
+class ToolCallFunction(DBBaseModel):
type: Literal["function"]
name: str
arguments: str
-class ToolCallContentPart(PromptModel):
+class ToolCallContentPart(DBBaseModel):
type: Literal["tool_call"]
tool_call_id: str
tool_call: Annotated[
@@ -85,7 +52,7 @@ class ToolCallContentPart(PromptModel):
]
-class ToolResultContentPart(PromptModel):
+class ToolResultContentPart(DBBaseModel):
type: Literal["tool_result"]
tool_call_id: str
tool_result: JSONSerializable
@@ -131,17 +98,17 @@ def to_gql(role: Role) -> PromptMessageRole:
assert_never(role)
-class PromptMessage(PromptModel):
+class PromptMessage(DBBaseModel):
role: Role
content: Union[str, Annotated[list[ContentPart], Field(..., min_length=1)]]
-class PromptChatTemplate(PromptModel):
+class PromptChatTemplate(DBBaseModel):
type: Literal["chat"]
messages: list[PromptMessage]
-class PromptStringTemplate(PromptModel):
+class PromptStringTemplate(DBBaseModel):
type: Literal["string"]
template: str
@@ -159,12 +126,12 @@ class PromptTemplateRootModel(RootModel[PromptTemplate]):
root: PromptTemplate
-class PromptToolFunction(PromptModel):
+class PromptToolFunction(DBBaseModel):
type: Literal["function"]
function: PromptToolFunctionDefinition
-class PromptToolFunctionDefinition(PromptModel):
+class PromptToolFunctionDefinition(DBBaseModel):
name: str
description: str = UNDEFINED
parameters: dict[str, Any] = UNDEFINED
@@ -174,26 +141,26 @@ class PromptToolFunctionDefinition(PromptModel):
PromptTool: TypeAlias = Annotated[Union[PromptToolFunction], Field(..., discriminator="type")]
-class PromptTools(PromptModel):
+class PromptTools(DBBaseModel):
type: Literal["tools"]
tools: Annotated[list[PromptTool], Field(..., min_length=1)]
tool_choice: PromptToolChoice = UNDEFINED
disable_parallel_tool_calls: bool = UNDEFINED
-class PromptToolChoiceNone(PromptModel):
+class PromptToolChoiceNone(DBBaseModel):
type: Literal["none"]
-class PromptToolChoiceZeroOrMore(PromptModel):
+class PromptToolChoiceZeroOrMore(DBBaseModel):
type: Literal["zero_or_more"]
-class PromptToolChoiceOneOrMore(PromptModel):
+class PromptToolChoiceOneOrMore(DBBaseModel):
type: Literal["one_or_more"]
-class PromptToolChoiceSpecificFunctionTool(PromptModel):
+class PromptToolChoiceSpecificFunctionTool(DBBaseModel):
type: Literal["specific_function"]
function_name: str
@@ -209,7 +176,7 @@ class PromptToolChoiceSpecificFunctionTool(PromptModel):
]
-class PromptOpenAIJSONSchema(PromptModel):
+class PromptOpenAIJSONSchema(DBBaseModel):
"""
Based on https://github.com/openai/openai-python/blob/d16e6edde5a155626910b5758a0b939bfedb9ced/src/openai/types/shared/response_format_json_schema.py#L13
"""
@@ -223,7 +190,7 @@ class PromptOpenAIJSONSchema(PromptModel):
strict: Optional[bool] = UNDEFINED
-class PromptOpenAIResponseFormatJSONSchema(PromptModel):
+class PromptOpenAIResponseFormatJSONSchema(DBBaseModel):
"""
Based on https://github.com/openai/openai-python/blob/d16e6edde5a155626910b5758a0b939bfedb9ced/src/openai/types/shared/response_format_json_schema.py#L40
"""
@@ -232,12 +199,12 @@ class PromptOpenAIResponseFormatJSONSchema(PromptModel):
type: Literal["json_schema"]
-class PromptResponseFormatJSONSchema(PromptModel):
+class PromptResponseFormatJSONSchema(DBBaseModel):
type: Literal["json_schema"]
json_schema: PromptResponseFormatJSONSchemaDefinition
-class PromptResponseFormatJSONSchemaDefinition(PromptModel):
+class PromptResponseFormatJSONSchemaDefinition(DBBaseModel):
name: str
description: str = UNDEFINED
schema_: dict[str, Any] = Field(UNDEFINED, alias="schema")
@@ -305,7 +272,7 @@ def denormalize_response_format(
# OpenAI tool definitions
-class OpenAIFunctionDefinition(PromptModel):
+class OpenAIFunctionDefinition(DBBaseModel):
"""
Based on https://github.com/openai/openai-python/blob/1e07c9d839e7e96f02d0a4b745f379a43086334c/src/openai/types/shared_params/function_definition.py#L13
"""
@@ -316,7 +283,7 @@ class OpenAIFunctionDefinition(PromptModel):
strict: Optional[bool] = UNDEFINED
-class OpenAIToolDefinition(PromptModel):
+class OpenAIToolDefinition(DBBaseModel):
"""
Based on https://github.com/openai/openai-python/blob/1e07c9d839e7e96f02d0a4b745f379a43086334c/src/openai/types/chat/chat_completion_tool_param.py#L12
"""
@@ -326,7 +293,7 @@ class OpenAIToolDefinition(PromptModel):
# Anthropic tool definitions
-class AnthropicCacheControlParam(PromptModel):
+class AnthropicCacheControlParam(DBBaseModel):
"""
Based on https://github.com/anthropics/anthropic-sdk-python/blob/93cbbbde964e244f02bf1bd2b579c5fabce4e267/src/anthropic/types/cache_control_ephemeral_param.py#L10
"""
@@ -334,7 +301,7 @@ class AnthropicCacheControlParam(PromptModel):
type: Literal["ephemeral"]
-class AnthropicToolDefinition(PromptModel):
+class AnthropicToolDefinition(DBBaseModel):
"""
Based on https://github.com/anthropics/anthropic-sdk-python/blob/93cbbbde964e244f02bf1bd2b579c5fabce4e267/src/anthropic/types/tool_param.py#L22
"""
@@ -345,7 +312,7 @@ class AnthropicToolDefinition(PromptModel):
description: str = UNDEFINED
-class PromptOpenAIInvocationParametersContent(PromptModel):
+class PromptOpenAIInvocationParametersContent(DBBaseModel):
temperature: float = UNDEFINED
max_tokens: int = UNDEFINED
max_completion_tokens: int = UNDEFINED
@@ -356,7 +323,7 @@ class PromptOpenAIInvocationParametersContent(PromptModel):
reasoning_effort: Literal["low", "medium", "high"] = UNDEFINED
-class PromptOpenAIInvocationParameters(PromptModel):
+class PromptOpenAIInvocationParameters(DBBaseModel):
type: Literal["openai"]
openai: PromptOpenAIInvocationParametersContent
@@ -365,21 +332,21 @@ class PromptAzureOpenAIInvocationParametersContent(PromptOpenAIInvocationParamet
pass
-class PromptAzureOpenAIInvocationParameters(PromptModel):
+class PromptAzureOpenAIInvocationParameters(DBBaseModel):
type: Literal["azure_openai"]
azure_openai: PromptAzureOpenAIInvocationParametersContent
-class PromptAnthropicThinkingConfigDisabled(PromptModel):
+class PromptAnthropicThinkingConfigDisabled(DBBaseModel):
type: Literal["disabled"]
-class PromptAnthropicThinkingConfigEnabled(PromptModel):
+class PromptAnthropicThinkingConfigEnabled(DBBaseModel):
type: Literal["enabled"]
budget_tokens: int = Field(..., ge=1024)
-class PromptAnthropicInvocationParametersContent(PromptModel):
+class PromptAnthropicInvocationParametersContent(DBBaseModel):
max_tokens: int
temperature: float = UNDEFINED
top_p: float = UNDEFINED
@@ -398,12 +365,12 @@ def check_thinking_budget_tokens_lt_max_tokens(self) -> Self:
return self
-class PromptAnthropicInvocationParameters(PromptModel):
+class PromptAnthropicInvocationParameters(DBBaseModel):
type: Literal["anthropic"]
anthropic: PromptAnthropicInvocationParametersContent
-class PromptGoogleInvocationParametersContent(PromptModel):
+class PromptGoogleInvocationParametersContent(DBBaseModel):
temperature: float = UNDEFINED
max_output_tokens: int = UNDEFINED
stop_sequences: list[str] = UNDEFINED
@@ -413,7 +380,7 @@ class PromptGoogleInvocationParametersContent(PromptModel):
top_k: int = UNDEFINED
-class PromptGoogleInvocationParameters(PromptModel):
+class PromptGoogleInvocationParameters(DBBaseModel):
type: Literal["google"]
google: PromptGoogleInvocationParametersContent
@@ -524,7 +491,7 @@ def denormalize_tools(
tools: PromptTools, model_provider: ModelProvider
) -> tuple[list[dict[str, Any]], Optional[Any]]:
assert tools.type == "tools"
- denormalized_tools: list[PromptModel]
+ denormalized_tools: list[DBBaseModel]
tool_choice: Optional[Any] = None
if model_provider is ModelProvider.OPENAI or model_provider is ModelProvider.AZURE_OPENAI:
denormalized_tools = [_prompt_to_openai_tool(tool) for tool in tools.tools]
diff --git a/src/phoenix/server/api/input_types/CreateSpanAnnotationInput.py b/src/phoenix/server/api/input_types/CreateSpanAnnotationInput.py
index 53ce0f65a3..c72c841c18 100644
--- a/src/phoenix/server/api/input_types/CreateSpanAnnotationInput.py
+++ b/src/phoenix/server/api/input_types/CreateSpanAnnotationInput.py
@@ -4,6 +4,8 @@
from strawberry.relay import GlobalID
from strawberry.scalars import JSON
+from phoenix.server.api.exceptions import BadRequest
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
@@ -16,3 +18,15 @@ class CreateSpanAnnotationInput:
score: Optional[float] = None
explanation: Optional[str] = None
metadata: JSON = strawberry.field(default_factory=dict)
+ identifier: Optional[str] = None
+ source: AnnotationSource
+
+ def __post_init__(self) -> None:
+ if self.identifier == "":
+ raise BadRequest("Identifier must be a non-empty string or null")
+
+
+@strawberry.input
+class CreateSpanNoteInput:
+ span_id: GlobalID
+ note: str
diff --git a/src/phoenix/server/api/input_types/CreateTraceAnnotationInput.py b/src/phoenix/server/api/input_types/CreateTraceAnnotationInput.py
index a6f8c6b485..5a992226cd 100644
--- a/src/phoenix/server/api/input_types/CreateTraceAnnotationInput.py
+++ b/src/phoenix/server/api/input_types/CreateTraceAnnotationInput.py
@@ -4,6 +4,8 @@
from strawberry.relay import GlobalID
from strawberry.scalars import JSON
+from phoenix.server.api.exceptions import BadRequest
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
@@ -16,3 +18,9 @@ class CreateTraceAnnotationInput:
score: Optional[float] = None
explanation: Optional[str] = None
metadata: JSON = strawberry.field(default_factory=dict)
+ identifier: Optional[str] = None
+ source: AnnotationSource
+
+ def __post_init__(self) -> None:
+ if self.identifier == "":
+ raise BadRequest("Identifier must be a non-empty string or null")
diff --git a/src/phoenix/server/api/input_types/PatchAnnotationInput.py b/src/phoenix/server/api/input_types/PatchAnnotationInput.py
index 8dc0d17b93..d3d8a50c08 100644
--- a/src/phoenix/server/api/input_types/PatchAnnotationInput.py
+++ b/src/phoenix/server/api/input_types/PatchAnnotationInput.py
@@ -5,6 +5,7 @@
from strawberry.relay import GlobalID
from strawberry.scalars import JSON
+from phoenix.server.api.exceptions import BadRequest
from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
@@ -17,3 +18,8 @@ class PatchAnnotationInput:
score: Optional[float] = UNSET
explanation: Optional[str] = UNSET
metadata: Optional[JSON] = UNSET
+ identifier: Optional[str] = UNSET
+
+ def __post_init__(self) -> None:
+ if self.identifier == "":
+ raise BadRequest("Identifier must be a non-empty string or null")
diff --git a/src/phoenix/server/api/input_types/SpanAnnotationFilter.py b/src/phoenix/server/api/input_types/SpanAnnotationFilter.py
new file mode 100644
index 0000000000..1bbe913326
--- /dev/null
+++ b/src/phoenix/server/api/input_types/SpanAnnotationFilter.py
@@ -0,0 +1,67 @@
+from typing import Optional
+
+import strawberry
+from strawberry import UNSET
+from strawberry.relay import GlobalID
+
+from phoenix.db import models
+from phoenix.server.api.exceptions import BadRequest
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
+from phoenix.server.api.types.node import from_global_id_with_expected_type
+
+
+@strawberry.input
+class SpanAnnotationFilterCondition:
+ names: Optional[list[str]] = UNSET
+ sources: Optional[list[AnnotationSource]] = UNSET
+ user_ids: Optional[list[Optional[GlobalID]]] = UNSET
+
+ def __post_init__(self) -> None:
+ if isinstance(self.names, list) and not self.names:
+ raise BadRequest("names must be a non-empty list")
+ if isinstance(self.sources, list) and not self.sources:
+ raise BadRequest("sources must be a non-empty list")
+ if isinstance(self.user_ids, list) and not self.user_ids:
+ raise BadRequest("user ids must be a non-empty list")
+
+
+@strawberry.input
+class SpanAnnotationFilter:
+ include: Optional[SpanAnnotationFilterCondition] = UNSET
+ exclude: Optional[SpanAnnotationFilterCondition] = UNSET
+
+ def __post_init__(self) -> None:
+ if self.include is UNSET and self.exclude is UNSET:
+ raise BadRequest("include and exclude cannot both be unset")
+
+
+def satisfies_filter(span_annotation: models.SpanAnnotation, filter: SpanAnnotationFilter) -> bool:
+ """
+ Returns true if the span annotation satisfies the filter and false otherwise.
+ """
+ span_annotation_source = AnnotationSource(span_annotation.source)
+ if include := filter.include:
+ if include.names and span_annotation.name not in include.names:
+ return False
+ if include.sources and span_annotation_source not in include.sources:
+ return False
+ if include.user_ids:
+ user_rowids = [
+ from_global_id_with_expected_type(user_id, "User") if user_id is not None else None
+ for user_id in include.user_ids
+ ]
+ if span_annotation.user_id not in user_rowids:
+ return False
+ if exclude := filter.exclude:
+ if exclude.names and span_annotation.name in exclude.names:
+ return False
+ if exclude.sources and span_annotation_source in exclude.sources:
+ return False
+ if exclude.user_ids:
+ user_rowids = [
+ from_global_id_with_expected_type(user_id, "User") if user_id is not None else None
+ for user_id in exclude.user_ids
+ ]
+ if span_annotation.user_id in user_rowids:
+ return False
+ return True
diff --git a/src/phoenix/server/api/mutations/__init__.py b/src/phoenix/server/api/mutations/__init__.py
index 010f9b28e4..5577a78587 100644
--- a/src/phoenix/server/api/mutations/__init__.py
+++ b/src/phoenix/server/api/mutations/__init__.py
@@ -1,5 +1,6 @@
import strawberry
+from phoenix.server.api.mutations.annotation_config_mutations import AnnotationConfigMutationMixin
from phoenix.server.api.mutations.api_key_mutations import ApiKeyMutationMixin
from phoenix.server.api.mutations.chat_mutations import (
ChatCompletionMutationMixin,
@@ -8,6 +9,9 @@
from phoenix.server.api.mutations.experiment_mutations import ExperimentMutationMixin
from phoenix.server.api.mutations.export_events_mutations import ExportEventsMutationMixin
from phoenix.server.api.mutations.project_mutations import ProjectMutationMixin
+from phoenix.server.api.mutations.project_trace_retention_policy_mutations import (
+ ProjectTraceRetentionPolicyMutationMixin,
+)
from phoenix.server.api.mutations.prompt_label_mutations import PromptLabelMutationMixin
from phoenix.server.api.mutations.prompt_mutations import PromptMutationMixin
from phoenix.server.api.mutations.prompt_version_tag_mutations import PromptVersionTagMutationMixin
@@ -19,12 +23,14 @@
@strawberry.type
class Mutation(
+ AnnotationConfigMutationMixin,
ApiKeyMutationMixin,
ChatCompletionMutationMixin,
DatasetMutationMixin,
ExperimentMutationMixin,
ExportEventsMutationMixin,
ProjectMutationMixin,
+ ProjectTraceRetentionPolicyMutationMixin,
PromptMutationMixin,
PromptVersionTagMutationMixin,
PromptLabelMutationMixin,
diff --git a/src/phoenix/server/api/mutations/annotation_config_mutations.py b/src/phoenix/server/api/mutations/annotation_config_mutations.py
new file mode 100644
index 0000000000..3d6dbea66d
--- /dev/null
+++ b/src/phoenix/server/api/mutations/annotation_config_mutations.py
@@ -0,0 +1,413 @@
+from typing import Optional
+
+import strawberry
+from sqlalchemy import delete, select, tuple_
+from sqlalchemy.exc import IntegrityError as PostgreSQLIntegrityError
+from sqlean.dbapi2 import IntegrityError as SQLiteIntegrityError # type: ignore[import-untyped]
+from strawberry.relay.types import GlobalID
+from strawberry.types import Info
+
+from phoenix.db import models
+from phoenix.db.types.annotation_configs import (
+ AnnotationConfigType,
+ AnnotationType,
+ CategoricalAnnotationValue,
+ OptimizationDirection,
+)
+from phoenix.db.types.annotation_configs import (
+ CategoricalAnnotationConfig as CategoricalAnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ ContinuousAnnotationConfig as ContinuousAnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ FreeformAnnotationConfig as FreeformAnnotationConfigModel,
+)
+from phoenix.server.api.auth import IsNotReadOnly
+from phoenix.server.api.context import Context
+from phoenix.server.api.exceptions import BadRequest, Conflict, NotFound
+from phoenix.server.api.queries import Query
+from phoenix.server.api.types.AnnotationConfig import (
+ AnnotationConfig,
+ CategoricalAnnotationConfig,
+ ContinuousAnnotationConfig,
+ FreeformAnnotationConfig,
+ to_gql_annotation_config,
+)
+from phoenix.server.api.types.node import from_global_id_with_expected_type
+from phoenix.server.api.types.Project import Project
+
+ANNOTATION_TYPE_NAMES = (
+ CategoricalAnnotationConfig.__name__,
+ ContinuousAnnotationConfig.__name__,
+ FreeformAnnotationConfig.__name__,
+)
+
+
+@strawberry.input
+class CategoricalAnnotationConfigValueInput:
+ label: str
+ score: Optional[float] = None
+
+
+@strawberry.input
+class CategoricalAnnotationConfigInput:
+ name: str
+ description: Optional[str] = None
+ optimization_direction: OptimizationDirection
+ values: list[CategoricalAnnotationConfigValueInput]
+
+
+@strawberry.input
+class ContinuousAnnotationConfigInput:
+ name: str
+ description: Optional[str] = None
+ optimization_direction: OptimizationDirection
+ lower_bound: Optional[float] = None
+ upper_bound: Optional[float] = None
+
+
+@strawberry.input
+class FreeformAnnotationConfigInput:
+ name: str
+ description: Optional[str] = None
+
+
+@strawberry.input(one_of=True)
+class AnnotationConfigInput:
+ categorical: Optional[CategoricalAnnotationConfigInput] = strawberry.UNSET
+ continuous: Optional[ContinuousAnnotationConfigInput] = strawberry.UNSET
+ freeform: Optional[FreeformAnnotationConfigInput] = strawberry.UNSET
+
+ def __post_init__(self) -> None:
+ if (
+ sum(
+ [
+ self.categorical is not strawberry.UNSET,
+ self.continuous is not strawberry.UNSET,
+ self.freeform is not strawberry.UNSET,
+ ]
+ )
+ != 1
+ ):
+ raise BadRequest("Exactly one of categorical, continuous, or freeform must be set")
+
+
+@strawberry.input
+class CreateAnnotationConfigInput:
+ annotation_config: AnnotationConfigInput
+
+
+@strawberry.type
+class CreateAnnotationConfigPayload:
+ query: Query
+ annotation_config: AnnotationConfig
+
+
+@strawberry.input
+class UpdateAnnotationConfigInput:
+ id: GlobalID
+ annotation_config: AnnotationConfigInput
+
+
+@strawberry.type
+class UpdateAnnotationConfigPayload:
+ query: Query
+ annotation_config: AnnotationConfig
+
+
+@strawberry.input
+class DeleteAnnotationConfigsInput:
+ ids: list[GlobalID]
+
+
+@strawberry.type
+class DeleteAnnotationConfigsPayload:
+ query: Query
+ annotation_configs: list[AnnotationConfig]
+
+
+@strawberry.input
+class AddAnnotationConfigToProjectInput:
+ project_id: GlobalID
+ annotation_config_id: GlobalID
+
+
+@strawberry.type
+class AddAnnotationConfigToProjectPayload:
+ query: Query
+ project: Project
+
+
+@strawberry.input
+class RemoveAnnotationConfigFromProjectInput:
+ project_id: GlobalID
+ annotation_config_id: GlobalID
+
+
+@strawberry.type
+class RemoveAnnotationConfigFromProjectPayload:
+ query: Query
+ project: Project
+
+
+def _to_pydantic_categorical_annotation_config(
+ input: CategoricalAnnotationConfigInput,
+) -> CategoricalAnnotationConfigModel:
+ try:
+ return CategoricalAnnotationConfigModel(
+ type=AnnotationType.CATEGORICAL.value,
+ description=input.description,
+ optimization_direction=input.optimization_direction,
+ values=[
+ CategoricalAnnotationValue(label=value.label, score=value.score)
+ for value in input.values
+ ],
+ )
+ except ValueError as error:
+ raise BadRequest(str(error))
+
+
+def _to_pydantic_continuous_annotation_config(
+ input: ContinuousAnnotationConfigInput,
+) -> ContinuousAnnotationConfigModel:
+ try:
+ return ContinuousAnnotationConfigModel(
+ type=AnnotationType.CONTINUOUS.value,
+ description=input.description,
+ optimization_direction=input.optimization_direction,
+ lower_bound=input.lower_bound,
+ upper_bound=input.upper_bound,
+ )
+ except ValueError as error:
+ raise BadRequest(str(error))
+
+
+def _to_pydantic_freeform_annotation_config(
+ input: FreeformAnnotationConfigInput,
+) -> FreeformAnnotationConfigModel:
+ try:
+ return FreeformAnnotationConfigModel(
+ type=AnnotationType.FREEFORM.value,
+ description=input.description,
+ )
+ except ValueError as error:
+ raise BadRequest(str(error))
+
+
+@strawberry.type
+class AnnotationConfigMutationMixin:
+ @strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore[misc]
+ async def create_annotation_config(
+ self,
+ info: Info[Context, None],
+ input: CreateAnnotationConfigInput,
+ ) -> CreateAnnotationConfigPayload:
+ input_annotation_config = input.annotation_config
+ config: AnnotationConfigType
+ name: str
+ if categorical_input := input_annotation_config.categorical:
+ name = categorical_input.name
+ config = _to_pydantic_categorical_annotation_config(categorical_input)
+ elif continuous_input := input_annotation_config.continuous:
+ name = input_annotation_config.continuous.name
+ config = _to_pydantic_continuous_annotation_config(continuous_input)
+ elif freeform_input := input_annotation_config.freeform:
+ name = freeform_input.name
+ config = _to_pydantic_freeform_annotation_config(freeform_input)
+ else:
+ raise BadRequest("No annotation config provided")
+
+ if name == "note":
+ raise BadRequest("The name 'note' is reserved for span notes")
+
+ async with info.context.db() as session:
+ annotation_config = models.AnnotationConfig(
+ name=name,
+ config=config,
+ )
+ session.add(annotation_config)
+ try:
+ await session.commit()
+ except (PostgreSQLIntegrityError, SQLiteIntegrityError):
+ raise Conflict(f"Annotation configuration with name '{name}' already exists")
+ return CreateAnnotationConfigPayload(
+ query=Query(),
+ annotation_config=to_gql_annotation_config(annotation_config),
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore[misc]
+ async def update_annotation_config(
+ self,
+ info: Info[Context, None],
+ input: UpdateAnnotationConfigInput,
+ ) -> UpdateAnnotationConfigPayload:
+ try:
+ config_id = int(input.id.node_id)
+ except ValueError:
+ raise BadRequest("Invalid annotation config ID")
+
+ if input.id.type_name not in ANNOTATION_TYPE_NAMES:
+ raise BadRequest("Invalid annotation config ID")
+
+ input_annotation_config = input.annotation_config
+ config: AnnotationConfigType
+ name: str
+ if categorical_input := input_annotation_config.categorical:
+ name = categorical_input.name
+ config = _to_pydantic_categorical_annotation_config(categorical_input)
+ elif continuous_input := input_annotation_config.continuous:
+ name = input_annotation_config.continuous.name
+ config = _to_pydantic_continuous_annotation_config(continuous_input)
+ elif freeform_input := input_annotation_config.freeform:
+ name = freeform_input.name
+ config = _to_pydantic_freeform_annotation_config(freeform_input)
+ else:
+ raise BadRequest("No annotation config provided")
+
+ if name == "note":
+ raise BadRequest("The name 'note' is reserved for span notes")
+
+ async with info.context.db() as session:
+ annotation_config = await session.get(models.AnnotationConfig, config_id)
+ if not annotation_config:
+ raise NotFound("Annotation config not found")
+
+ annotation_config.name = name
+ annotation_config.config = config
+ try:
+ await session.commit()
+ except (PostgreSQLIntegrityError, SQLiteIntegrityError):
+ raise Conflict(f"Annotation configuration with name '{name}' already exists")
+
+ return UpdateAnnotationConfigPayload(
+ query=Query(),
+ annotation_config=to_gql_annotation_config(annotation_config),
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore[misc]
+ async def delete_annotation_configs(
+ self,
+ info: Info[Context, None],
+ input: DeleteAnnotationConfigsInput,
+ ) -> DeleteAnnotationConfigsPayload:
+ config_ids = set()
+ for config_gid in input.ids:
+ if (type_name := config_gid.type_name) not in ANNOTATION_TYPE_NAMES:
+ raise BadRequest(f"Unexpected type name in Relay ID: {type_name}")
+ config_ids.add(int(config_gid.node_id))
+
+ async with info.context.db() as session:
+ result = await session.scalars(
+ delete(models.AnnotationConfig)
+ .where(models.AnnotationConfig.id.in_(config_ids))
+ .returning(models.AnnotationConfig)
+ )
+ deleted_annotation_configs = result.all()
+ if len(deleted_annotation_configs) < len(config_ids):
+ await session.rollback()
+ raise NotFound(
+ "Could not find one or more annotation configs to delete, deletion aborted."
+ )
+ return DeleteAnnotationConfigsPayload(
+ query=Query(),
+ annotation_configs=[
+ to_gql_annotation_config(annotation_config)
+ for annotation_config in deleted_annotation_configs
+ ],
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore[misc]
+ async def add_annotation_config_to_project(
+ self,
+ info: Info[Context, None],
+ input: list[AddAnnotationConfigToProjectInput],
+ ) -> AddAnnotationConfigToProjectPayload:
+ if not input:
+ raise BadRequest("No project annotation config associations provided")
+ project_annotation_config_ids: set[tuple[int, int]] = set()
+ for item in input:
+ project_id = from_global_id_with_expected_type(
+ global_id=item.project_id, expected_type_name="Project"
+ )
+ if (item.annotation_config_id.type_name) not in ANNOTATION_TYPE_NAMES:
+ raise BadRequest(
+ f"Invalidation ID for annotation config: {str(item.annotation_config_id)}"
+ )
+ annotation_config_id = int(item.annotation_config_id.node_id)
+ project_annotation_config_ids.add((project_id, annotation_config_id))
+ project_ids = [project_id for project_id, _ in project_annotation_config_ids]
+ annotation_config_ids = [
+ annotation_config_id for _, annotation_config_id in project_annotation_config_ids
+ ]
+
+ async with info.context.db() as session:
+ result = await session.scalars(
+ select(models.Project.id).where(models.Project.id.in_(project_ids))
+ )
+ resolved_project_ids = result.all()
+ if set(project_ids) - set(resolved_project_ids):
+ raise NotFound("One or more projects were not found")
+
+ result = await session.scalars(
+ select(models.AnnotationConfig.id).where(
+ models.AnnotationConfig.id.in_(annotation_config_ids)
+ )
+ )
+ resolved_annotation_config_ids = result.all()
+ if set(annotation_config_ids) - set(resolved_annotation_config_ids):
+ raise NotFound("One or more annotation configs were not found")
+
+ for project_id, annotation_config_id in project_annotation_config_ids:
+ project_annotation_config = models.ProjectAnnotationConfig(
+ project_id=project_id,
+ annotation_config_id=annotation_config_id,
+ )
+ session.add(project_annotation_config)
+
+ try:
+ await session.commit()
+ except (PostgreSQLIntegrityError, SQLiteIntegrityError):
+ await session.rollback()
+ raise Conflict(
+ "One or more annotation configs have already been added to the project"
+ )
+ return AddAnnotationConfigToProjectPayload(
+ query=Query(),
+ project=Project(project_rowid=project_id),
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore[misc]
+ async def remove_annotation_config_from_project(
+ self,
+ info: Info[Context, None],
+ input: list[RemoveAnnotationConfigFromProjectInput],
+ ) -> RemoveAnnotationConfigFromProjectPayload:
+ project_annotation_config_associations = set()
+ for item in input:
+ project_id = from_global_id_with_expected_type(
+ global_id=item.project_id, expected_type_name="Project"
+ )
+ if (type_name := item.annotation_config_id.type_name) not in ANNOTATION_TYPE_NAMES:
+ raise BadRequest(f"Unexpected type name in Relay ID: {type_name}")
+ annotation_config_id = int(item.annotation_config_id.node_id)
+ project_annotation_config_associations.add((project_id, annotation_config_id))
+ async with info.context.db() as session:
+ result = await session.scalars(
+ delete(models.ProjectAnnotationConfig)
+ .where(
+ tuple_(
+ models.ProjectAnnotationConfig.project_id,
+ models.ProjectAnnotationConfig.annotation_config_id,
+ ).in_(project_annotation_config_associations)
+ )
+ .returning(models.ProjectAnnotationConfig)
+ )
+ annotation_configs = result.all()
+ if len(annotation_configs) < len(project_annotation_config_associations):
+ await session.rollback()
+ raise NotFound("Could not find one or more input project annotation configs")
+ return RemoveAnnotationConfigFromProjectPayload(
+ query=Query(),
+ project=Project(project_rowid=project_id),
+ )
diff --git a/src/phoenix/server/api/mutations/dataset_mutations.py b/src/phoenix/server/api/mutations/dataset_mutations.py
index 0ed334ec48..02338f486a 100644
--- a/src/phoenix/server/api/mutations/dataset_mutations.py
+++ b/src/phoenix/server/api/mutations/dataset_mutations.py
@@ -11,7 +11,9 @@
ToolCallAttributes,
)
from sqlalchemy import and_, delete, distinct, func, insert, select, update
+from sqlalchemy.orm import contains_eager
from strawberry import UNSET
+from strawberry.relay.types import GlobalID
from strawberry.types import Info
from phoenix.db import models
@@ -130,43 +132,40 @@ async def add_spans_to_dataset(
raise ValueError(
f"Unknown dataset: {dataset_id}"
) # todo: implement error types https://github.com/Arize-ai/phoenix/issues/3221
- dataset_version_rowid = await session.scalar(
- insert(models.DatasetVersion)
- .values(
- dataset_id=dataset_rowid,
- description=dataset_version_description,
- metadata_=dataset_version_metadata,
- )
- .returning(models.DatasetVersion.id)
+ dataset_version = models.DatasetVersion(
+ dataset_id=dataset_rowid,
+ description=dataset_version_description,
+ metadata_=dataset_version_metadata or {},
)
+ session.add(dataset_version)
+ await session.flush()
spans = (
- await session.scalars(select(models.Span).where(models.Span.id.in_(span_rowids)))
- ).all()
- if missing_span_rowids := span_rowids - {span.id for span in spans}:
- raise ValueError(
- f"Could not find spans with rowids: {', '.join(map(str, missing_span_rowids))}"
- ) # todo: implement error handling types https://github.com/Arize-ai/phoenix/issues/3221
-
- span_annotations = (
- await session.scalars(
- select(models.SpanAnnotation).where(
- models.SpanAnnotation.span_rowid.in_(span_rowids)
+ (
+ await session.scalars(
+ select(models.Span)
+ .outerjoin(
+ models.SpanAnnotation,
+ models.Span.id == models.SpanAnnotation.span_rowid,
+ )
+ .outerjoin(models.User, models.SpanAnnotation.user_id == models.User.id)
+ .order_by(
+ models.Span.id,
+ models.SpanAnnotation.name,
+ models.User.username,
+ )
+ .where(models.Span.id.in_(span_rowids))
+ .options(
+ contains_eager(models.Span.span_annotations).contains_eager(
+ models.SpanAnnotation.user
+ )
+ )
)
)
- ).all()
-
- span_annotations_by_span: dict[int, dict[Any, Any]] = {span.id: {} for span in spans}
- for annotation in span_annotations:
- span_id = annotation.span_rowid
- if span_id not in span_annotations_by_span:
- span_annotations_by_span[span_id] = dict()
- span_annotations_by_span[span_id][annotation.name] = {
- "label": annotation.label,
- "score": annotation.score,
- "explanation": annotation.explanation,
- "metadata": annotation.metadata_,
- "annotator_kind": annotation.annotator_kind,
- }
+ .unique()
+ .all()
+ )
+ if span_rowids - {span.id for span in spans}:
+ raise NotFound("Some spans could not be found")
DatasetExample = models.DatasetExample
dataset_example_rowids = (
@@ -201,7 +200,7 @@ async def add_spans_to_dataset(
[
{
DatasetExampleRevision.dataset_example_id.key: dataset_example_rowid,
- DatasetExampleRevision.dataset_version_id.key: dataset_version_rowid,
+ DatasetExampleRevision.dataset_version_id.key: dataset_version.id,
DatasetExampleRevision.input.key: get_dataset_example_input(span),
DatasetExampleRevision.output.key: get_dataset_example_output(span),
DatasetExampleRevision.metadata_.key: {
@@ -212,11 +211,7 @@ async def add_spans_to_dataset(
if k in nonprivate_span_attributes
},
"span_kind": span.span_kind,
- **(
- {"annotations": annotations}
- if (annotations := span_annotations_by_span[span.id])
- else {}
- ),
+ "annotations": _gather_span_annotations_by_name(span.span_annotations),
},
DatasetExampleRevision.revision_kind.key: "CREATE",
}
@@ -602,6 +597,34 @@ def _to_orm_revision(
}
+def _gather_span_annotations_by_name(
+ span_annotations: list[models.SpanAnnotation],
+) -> dict[str, list[dict[str, Any]]]:
+ span_annotations_by_name: dict[str, list[dict[str, Any]]] = {}
+ for span_annotation in span_annotations:
+ if span_annotation.name not in span_annotations_by_name:
+ span_annotations_by_name[span_annotation.name] = []
+ span_annotations_by_name[span_annotation.name].append(
+ _to_span_annotation_dict(span_annotation)
+ )
+ return span_annotations_by_name
+
+
+def _to_span_annotation_dict(span_annotation: models.SpanAnnotation) -> dict[str, Any]:
+ return {
+ "label": span_annotation.label,
+ "score": span_annotation.score,
+ "explanation": span_annotation.explanation,
+ "metadata": span_annotation.metadata_,
+ "annotator_kind": span_annotation.annotator_kind,
+ "user_id": str(GlobalID(models.User.__name__, str(user_id)))
+ if (user_id := span_annotation.user_id) is not None
+ else None,
+ "username": user.username if (user := span_annotation.user) is not None else None,
+ "email": user.email if user is not None else None,
+ }
+
+
INPUT_MIME_TYPE = SpanAttributes.INPUT_MIME_TYPE
INPUT_VALUE = SpanAttributes.INPUT_VALUE
OUTPUT_MIME_TYPE = SpanAttributes.OUTPUT_MIME_TYPE
diff --git a/src/phoenix/server/api/mutations/project_trace_retention_policy_mutations.py b/src/phoenix/server/api/mutations/project_trace_retention_policy_mutations.py
new file mode 100644
index 0000000000..ddbcfe911a
--- /dev/null
+++ b/src/phoenix/server/api/mutations/project_trace_retention_policy_mutations.py
@@ -0,0 +1,245 @@
+from __future__ import annotations
+
+from typing import Optional
+
+import sqlalchemy as sa
+import strawberry
+from strawberry import UNSET, Info
+from strawberry.relay import GlobalID
+
+from phoenix.db import models
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+from phoenix.db.types.trace_retention import (
+ MaxCountRule,
+ MaxDaysOrCountRule,
+ MaxDaysRule,
+ TraceRetentionCronExpression,
+ TraceRetentionRule,
+)
+from phoenix.server.api.auth import IsAdminIfAuthEnabled, IsLocked, IsNotReadOnly
+from phoenix.server.api.context import Context
+from phoenix.server.api.exceptions import BadRequest, NotFound
+from phoenix.server.api.queries import Query
+from phoenix.server.api.types.CronExpression import CronExpression
+from phoenix.server.api.types.node import from_global_id_with_expected_type
+from phoenix.server.api.types.Project import Project
+from phoenix.server.api.types.ProjectTraceRetentionPolicy import (
+ ProjectTraceRetentionPolicy,
+)
+
+
+@strawberry.input
+class ProjectTraceRetentionRuleMaxDaysInput:
+ max_days: float
+
+
+@strawberry.input
+class ProjectTraceRetentionRuleMaxCountInput:
+ max_count: int
+
+
+@strawberry.input
+class ProjectTraceRetentionRuleMaxDaysOrCountInput(
+ ProjectTraceRetentionRuleMaxDaysInput,
+ ProjectTraceRetentionRuleMaxCountInput,
+): ...
+
+
+@strawberry.input(one_of=True)
+class ProjectTraceRetentionRuleInput:
+ max_days: Optional[ProjectTraceRetentionRuleMaxDaysInput] = UNSET
+ max_count: Optional[ProjectTraceRetentionRuleMaxCountInput] = UNSET
+ max_days_or_count: Optional[ProjectTraceRetentionRuleMaxDaysOrCountInput] = UNSET
+
+ def __post_init__(self) -> None:
+ if (
+ sum(
+ (
+ isinstance(self.max_days, ProjectTraceRetentionRuleMaxDaysInput),
+ isinstance(self.max_count, ProjectTraceRetentionRuleMaxCountInput),
+ isinstance(
+ self.max_days_or_count, ProjectTraceRetentionRuleMaxDaysOrCountInput
+ ),
+ )
+ )
+ != 1
+ ):
+ raise BadRequest("Exactly one rule must be provided")
+
+
+@strawberry.input
+class CreateProjectTraceRetentionPolicyInput:
+ name: str
+ cron_expression: CronExpression
+ rule: ProjectTraceRetentionRuleInput
+ add_projects: Optional[list[GlobalID]] = UNSET
+
+ def __post_init__(self) -> None:
+ if not self.name.strip():
+ raise BadRequest("Name cannot be empty")
+ if not self.cron_expression.strip():
+ raise BadRequest("Cron expression cannot be empty")
+
+
+@strawberry.input
+class PatchProjectTraceRetentionPolicyInput:
+ id: GlobalID
+ name: Optional[str] = UNSET
+ cron_expression: Optional[CronExpression] = UNSET
+ rule: Optional[ProjectTraceRetentionRuleInput] = UNSET
+ add_projects: Optional[list[GlobalID]] = UNSET
+ remove_projects: Optional[list[GlobalID]] = UNSET
+
+ def __post_init__(self) -> None:
+ if isinstance(self.name, str) and not self.name.strip():
+ raise BadRequest("Name cannot be empty")
+ if isinstance(self.cron_expression, str) and not self.cron_expression.strip():
+ raise BadRequest("Cron expression cannot be empty")
+ if isinstance(self.add_projects, list) and isinstance(self.remove_projects, list):
+ if set(self.add_projects) & set(self.remove_projects):
+ raise BadRequest("A project cannot be in both add and remove lists")
+
+
+@strawberry.input
+class DeleteProjectTraceRetentionPolicyInput:
+ id: GlobalID
+
+
+@strawberry.type
+class ProjectTraceRetentionPolicyMutationPayload:
+ query: Query = strawberry.field(default_factory=Query)
+ node: ProjectTraceRetentionPolicy
+
+
+@strawberry.type
+class ProjectTraceRetentionPolicyMutationMixin:
+ @strawberry.mutation(permission_classes=[IsNotReadOnly, IsAdminIfAuthEnabled, IsLocked]) # type: ignore
+ async def create_project_trace_retention_policy(
+ self,
+ info: Info[Context, None],
+ input: CreateProjectTraceRetentionPolicyInput,
+ ) -> ProjectTraceRetentionPolicyMutationPayload:
+ policy = models.ProjectTraceRetentionPolicy(
+ name=input.name,
+ cron_expression=TraceRetentionCronExpression.model_validate(input.cron_expression),
+ rule=_gql_to_db_rule(input.rule),
+ )
+ add_project_ids = (
+ []
+ if not isinstance(input.add_projects, list)
+ else [
+ from_global_id_with_expected_type(project_id, Project.__name__)
+ for project_id in input.add_projects
+ ]
+ )
+ async with info.context.db() as session:
+ session.add(policy)
+ await session.flush()
+ if add_project_ids:
+ stmt = (
+ sa.update(models.Project)
+ .where(models.Project.id.in_(set(add_project_ids)))
+ .values(trace_retention_policy_id=policy.id)
+ )
+ await session.execute(stmt)
+ return ProjectTraceRetentionPolicyMutationPayload(
+ node=ProjectTraceRetentionPolicy(id=policy.id, db_policy=policy),
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly, IsAdminIfAuthEnabled, IsLocked]) # type: ignore
+ async def patch_project_trace_retention_policy(
+ self,
+ info: Info[Context, None],
+ input: PatchProjectTraceRetentionPolicyInput,
+ ) -> ProjectTraceRetentionPolicyMutationPayload:
+ id_ = from_global_id_with_expected_type(input.id, ProjectTraceRetentionPolicy.__name__)
+ add_project_ids = (
+ []
+ if not isinstance(input.add_projects, list)
+ else [
+ from_global_id_with_expected_type(project_id, Project.__name__)
+ for project_id in input.add_projects
+ ]
+ )
+ remove_project_ids = (
+ []
+ if not isinstance(input.remove_projects, list)
+ else [
+ from_global_id_with_expected_type(project_id, Project.__name__)
+ for project_id in input.remove_projects
+ ]
+ )
+ async with info.context.db() as session:
+ policy = await session.get(models.ProjectTraceRetentionPolicy, id_)
+ if not policy:
+ raise NotFound(f"ProjectTraceRetentionPolicy with ID={input.id} not found")
+ if isinstance(input.name, str) and input.name != policy.name:
+ if id_ == DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID:
+ raise BadRequest(
+ "Cannot change the name of the default project trace retention policy"
+ )
+ policy.name = input.name
+ if isinstance(input.cron_expression, str):
+ policy.cron_expression = TraceRetentionCronExpression(root=input.cron_expression)
+ if isinstance(input.rule, ProjectTraceRetentionRuleInput):
+ policy.rule = _gql_to_db_rule(input.rule)
+ if policy is session.dirty:
+ await session.flush()
+ if add_project_ids:
+ stmt = (
+ sa.update(models.Project)
+ .where(models.Project.id.in_(set(add_project_ids)))
+ .values(trace_retention_policy_id=policy.id)
+ )
+ await session.execute(stmt)
+ if remove_project_ids:
+ stmt = (
+ sa.update(models.Project)
+ .where(models.Project.trace_retention_policy_id == policy.id)
+ .where(models.Project.id.in_(set(remove_project_ids)))
+ .values(trace_retention_policy_id=None)
+ )
+ await session.execute(stmt)
+ return ProjectTraceRetentionPolicyMutationPayload(
+ node=ProjectTraceRetentionPolicy(id=policy.id, db_policy=policy),
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly, IsAdminIfAuthEnabled]) # type: ignore
+ async def delete_project_trace_retention_policy(
+ self,
+ info: Info[Context, None],
+ input: DeleteProjectTraceRetentionPolicyInput,
+ ) -> ProjectTraceRetentionPolicyMutationPayload:
+ id_ = from_global_id_with_expected_type(input.id, ProjectTraceRetentionPolicy.__name__)
+ if id_ == DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID:
+ raise BadRequest("Cannot delete the default project trace retention policy.")
+ stmt = (
+ sa.delete(models.ProjectTraceRetentionPolicy)
+ .where(models.ProjectTraceRetentionPolicy.id == id_)
+ .returning(models.ProjectTraceRetentionPolicy)
+ )
+ async with info.context.db() as session:
+ policy = await session.scalar(stmt)
+ if not policy:
+ raise NotFound(f"ProjectTraceRetentionPolicy with ID={input.id} not found")
+ return ProjectTraceRetentionPolicyMutationPayload(
+ node=ProjectTraceRetentionPolicy(id=policy.id, db_policy=policy),
+ )
+
+
+def _gql_to_db_rule(
+ rule: ProjectTraceRetentionRuleInput,
+) -> TraceRetentionRule:
+ if isinstance(rule.max_days, ProjectTraceRetentionRuleMaxDaysInput):
+ return TraceRetentionRule(root=MaxDaysRule(max_days=rule.max_days.max_days))
+ elif isinstance(rule.max_count, ProjectTraceRetentionRuleMaxCountInput):
+ return TraceRetentionRule(root=MaxCountRule(max_count=rule.max_count.max_count))
+ elif isinstance(rule.max_days_or_count, ProjectTraceRetentionRuleMaxDaysOrCountInput):
+ return TraceRetentionRule(
+ root=MaxDaysOrCountRule(
+ max_days=rule.max_days_or_count.max_days,
+ max_count=rule.max_days_or_count.max_count,
+ )
+ )
+ else:
+ raise ValueError("Invalid rule input")
diff --git a/src/phoenix/server/api/mutations/span_annotations_mutations.py b/src/phoenix/server/api/mutations/span_annotations_mutations.py
index ee5c0760a7..318154c90c 100644
--- a/src/phoenix/server/api/mutations/span_annotations_mutations.py
+++ b/src/phoenix/server/api/mutations/span_annotations_mutations.py
@@ -1,19 +1,28 @@
-from collections.abc import Sequence
+from datetime import datetime
+from typing import Optional
import strawberry
-from sqlalchemy import delete, insert, update
-from strawberry import UNSET
-from strawberry.types import Info
+from sqlalchemy import delete, insert, select
+from starlette.requests import Request
+from strawberry import UNSET, Info
+from strawberry.relay import GlobalID
from phoenix.db import models
from phoenix.server.api.auth import IsLocked, IsNotReadOnly
from phoenix.server.api.context import Context
-from phoenix.server.api.input_types.CreateSpanAnnotationInput import CreateSpanAnnotationInput
+from phoenix.server.api.exceptions import BadRequest, NotFound, Unauthorized
+from phoenix.server.api.input_types.CreateSpanAnnotationInput import (
+ CreateSpanAnnotationInput,
+ CreateSpanNoteInput,
+)
from phoenix.server.api.input_types.DeleteAnnotationsInput import DeleteAnnotationsInput
from phoenix.server.api.input_types.PatchAnnotationInput import PatchAnnotationInput
from phoenix.server.api.queries import Query
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
+from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
from phoenix.server.api.types.node import from_global_id_with_expected_type
from phoenix.server.api.types.SpanAnnotation import SpanAnnotation, to_gql_span_annotation
+from phoenix.server.bearer_auth import PhoenixUser
from phoenix.server.dml_event import SpanAnnotationDeleteEvent, SpanAnnotationInsertEvent
@@ -29,33 +38,158 @@ class SpanAnnotationMutationMixin:
async def create_span_annotations(
self, info: Info[Context, None], input: list[CreateSpanAnnotationInput]
) -> SpanAnnotationMutationPayload:
- inserted_annotations: Sequence[models.SpanAnnotation] = []
+ if not input:
+ raise BadRequest("No span annotations provided.")
+
+ if any(d.name == "note" for d in input):
+ raise BadRequest("Span notes are not supported in this endpoint.")
+
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+
+ processed_annotations_map: dict[int, models.SpanAnnotation] = {}
+
+ span_rowids = []
+ for idx, annotation_input in enumerate(input):
+ try:
+ span_rowid = from_global_id_with_expected_type(annotation_input.span_id, "Span")
+ except ValueError:
+ raise BadRequest(
+ f"Invalid span ID for annotation at index {idx}: {annotation_input.span_id}"
+ )
+ span_rowids.append(span_rowid)
+
async with info.context.db() as session:
- values_list = [
- dict(
- span_rowid=from_global_id_with_expected_type(annotation.span_id, "Span"),
- name=annotation.name,
- label=annotation.label,
- score=annotation.score,
- explanation=annotation.explanation,
- annotator_kind=annotation.annotator_kind.value,
- metadata_=annotation.metadata,
+ for idx, (span_rowid, annotation_input) in enumerate(zip(span_rowids, input)):
+ resolved_identifier = ""
+ if annotation_input.identifier:
+ resolved_identifier = annotation_input.identifier
+ elif annotation_input.source == AnnotationSource.APP and user_id is not None:
+ # Ensure that the annotation has a per-user identifier if submitted via the UI
+ user_gid = str(GlobalID(type_name="User", node_id=str(user_id)))
+ resolved_identifier = f"px-app:{user_gid}"
+ values = {
+ "span_rowid": span_rowid,
+ "name": annotation_input.name,
+ "label": annotation_input.label,
+ "score": annotation_input.score,
+ "explanation": annotation_input.explanation,
+ "annotator_kind": annotation_input.annotator_kind.value,
+ "metadata_": annotation_input.metadata,
+ "identifier": resolved_identifier,
+ "source": annotation_input.source.value,
+ "user_id": user_id,
+ }
+
+ processed_annotation: Optional[models.SpanAnnotation] = None
+
+ q = select(models.SpanAnnotation).where(
+ models.SpanAnnotation.span_rowid == span_rowid,
+ models.SpanAnnotation.name == annotation_input.name,
+ models.SpanAnnotation.identifier == resolved_identifier,
+ )
+ existing_annotation = await session.scalar(q)
+
+ if existing_annotation:
+ existing_annotation.name = values["name"]
+ existing_annotation.label = values["label"]
+ existing_annotation.score = values["score"]
+ existing_annotation.explanation = values["explanation"]
+ existing_annotation.metadata_ = values["metadata_"]
+ existing_annotation.annotator_kind = values["annotator_kind"]
+ existing_annotation.source = values["source"]
+ existing_annotation.user_id = values["user_id"]
+ session.add(existing_annotation)
+ processed_annotation = existing_annotation
+
+ if processed_annotation is None:
+ stmt = insert(models.SpanAnnotation).values(**values)
+ stmt = stmt.returning(models.SpanAnnotation)
+ result = await session.scalars(stmt)
+ processed_annotation = result.one()
+
+ processed_annotations_map[idx] = processed_annotation
+
+ # Collect the objects that were inserted or updated
+ processed_annotation_objects = list(processed_annotations_map.values())
+ processed_annotation_ids = [anno.id for anno in processed_annotation_objects]
+
+ # Commit the transaction to finalize the state in the DB
+ await session.flush()
+
+ # Re-fetch the annotations in a batch to get the final state including DB defaults
+ final_annotations_result = await session.scalars(
+ select(models.SpanAnnotation).where(
+ models.SpanAnnotation.id.in_(processed_annotation_ids)
)
- for annotation in input
- ]
- stmt = (
- insert(models.SpanAnnotation).values(values_list).returning(models.SpanAnnotation)
)
+ final_annotations_by_id = {anno.id: anno for anno in final_annotations_result.all()}
+
+ # Order the final annotations according to the input order
+ ordered_final_annotations = [
+ final_annotations_by_id[id] for id in processed_annotation_ids
+ ]
+
+ # Put event on queue *after* successful commit
+ if ordered_final_annotations:
+ info.context.event_queue.put(
+ SpanAnnotationInsertEvent(tuple(processed_annotation_ids))
+ )
+
+ # Convert the fully loaded annotations to GQL types
+ returned_annotations = [
+ to_gql_span_annotation(anno) for anno in ordered_final_annotations
+ ]
+
+ await session.commit()
+
+ return SpanAnnotationMutationPayload(
+ span_annotations=returned_annotations,
+ query=Query(),
+ )
+
+ @strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
+ async def create_span_note(
+ self, info: Info[Context, None], annotation_input: CreateSpanNoteInput
+ ) -> SpanAnnotationMutationPayload:
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+
+ try:
+ span_rowid = from_global_id_with_expected_type(annotation_input.span_id, "Span")
+ except ValueError:
+ raise BadRequest(f"Invalid span ID: {annotation_input.span_id}")
+
+ async with info.context.db() as session:
+ timestamp = datetime.now().isoformat()
+ note_identifier = f"px-span-note:{timestamp}"
+ values = {
+ "span_rowid": span_rowid,
+ "name": "note",
+ "label": None,
+ "score": None,
+ "explanation": annotation_input.note,
+ "annotator_kind": AnnotatorKind.HUMAN.value,
+ "metadata_": dict(),
+ "identifier": note_identifier,
+ "source": AnnotationSource.APP.value,
+ "user_id": user_id,
+ }
+
+ stmt = insert(models.SpanAnnotation).values(**values)
+ stmt = stmt.returning(models.SpanAnnotation)
result = await session.scalars(stmt)
- inserted_annotations = result.all()
- if inserted_annotations:
- info.context.event_queue.put(
- SpanAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
- )
+ processed_annotation = result.one()
+
+ info.context.event_queue.put(SpanAnnotationInsertEvent((processed_annotation.id,)))
+ returned_annotation = to_gql_span_annotation(processed_annotation)
+ await session.commit()
return SpanAnnotationMutationPayload(
- span_annotations=[
- to_gql_span_annotation(annotation) for annotation in inserted_annotations
- ],
+ span_annotations=[returned_annotation],
query=Query(),
)
@@ -63,66 +197,134 @@ async def create_span_annotations(
async def patch_span_annotations(
self, info: Info[Context, None], input: list[PatchAnnotationInput]
) -> SpanAnnotationMutationPayload:
- patched_annotations = []
- async with info.context.db() as session:
- for annotation in input:
+ if not input:
+ raise BadRequest("No span annotations provided.")
+
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+
+ patch_by_id = {}
+ for patch in input:
+ try:
span_annotation_id = from_global_id_with_expected_type(
- annotation.annotation_id, "SpanAnnotation"
+ patch.annotation_id, SpanAnnotation.__name__
)
- patch = {
- column.key: patch_value
- for column, patch_value, column_is_nullable in (
- (models.SpanAnnotation.name, annotation.name, False),
- (
- models.SpanAnnotation.annotator_kind,
- annotation.annotator_kind.value
- if annotation.annotator_kind is not None
- and annotation.annotator_kind is not UNSET
- else None,
- False,
- ),
- (models.SpanAnnotation.label, annotation.label, True),
- (models.SpanAnnotation.score, annotation.score, True),
- (models.SpanAnnotation.explanation, annotation.explanation, True),
- (models.SpanAnnotation.metadata_, annotation.metadata, False),
+ except ValueError:
+ raise BadRequest(f"Invalid span annotation ID: {patch.annotation_id}")
+ if span_annotation_id in patch_by_id:
+ raise BadRequest(f"Duplicate patch for span annotation ID: {span_annotation_id}")
+ patch_by_id[span_annotation_id] = patch
+
+ async with info.context.db() as session:
+ span_annotations_by_id = {}
+ for span_annotation in await session.scalars(
+ select(models.SpanAnnotation).where(
+ models.SpanAnnotation.id.in_(patch_by_id.keys())
+ )
+ ):
+ if span_annotation.user_id != user_id:
+ raise Unauthorized(
+ "At least one span annotation is not associated with the current user."
)
- if patch_value is not UNSET and (patch_value is not None or column_is_nullable)
- }
- span_annotation = await session.scalar(
- update(models.SpanAnnotation)
- .where(models.SpanAnnotation.id == span_annotation_id)
- .values(**patch)
- .returning(models.SpanAnnotation)
+ span_annotations_by_id[span_annotation.id] = span_annotation
+ missing_span_annotation_ids = set(patch_by_id.keys()) - set(
+ span_annotations_by_id.keys()
+ )
+ if missing_span_annotation_ids:
+ raise NotFound(
+ f"Could not find span annotations with IDs: {missing_span_annotation_ids}"
)
- if span_annotation is not None:
- patched_annotations.append(to_gql_span_annotation(span_annotation))
- info.context.event_queue.put(SpanAnnotationInsertEvent((span_annotation.id,)))
- return SpanAnnotationMutationPayload(span_annotations=patched_annotations, query=Query())
+ for span_annotation_id, patch in patch_by_id.items():
+ span_annotation = span_annotations_by_id[span_annotation_id]
+ if patch.name:
+ span_annotation.name = patch.name
+ if patch.annotator_kind:
+ span_annotation.annotator_kind = patch.annotator_kind.value
+ if patch.label is not UNSET:
+ span_annotation.label = patch.label
+ if patch.score is not UNSET:
+ span_annotation.score = patch.score
+ if patch.explanation is not UNSET:
+ span_annotation.explanation = patch.explanation
+ if patch.metadata is not UNSET:
+ assert isinstance(patch.metadata, dict)
+ span_annotation.metadata_ = patch.metadata
+ if patch.identifier is not UNSET:
+ span_annotation.identifier = patch.identifier or ""
+ session.add(span_annotation)
+
+ patched_annotations = [
+ to_gql_span_annotation(span_annotation)
+ for span_annotation in span_annotations_by_id.values()
+ ]
+
+ info.context.event_queue.put(
+ SpanAnnotationInsertEvent(tuple(span_annotations_by_id.keys()))
+ )
+ return SpanAnnotationMutationPayload(
+ span_annotations=patched_annotations,
+ query=Query(),
+ )
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
async def delete_span_annotations(
self, info: Info[Context, None], input: DeleteAnnotationsInput
) -> SpanAnnotationMutationPayload:
- span_annotation_ids = [
- from_global_id_with_expected_type(global_id, "SpanAnnotation")
- for global_id in input.annotation_ids
- ]
+ if not input.annotation_ids:
+ raise BadRequest("No span annotation IDs provided.")
+
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ user_is_admin = False
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+ user_is_admin = user.is_admin
+
+ span_annotation_ids: dict[int, None] = {} # use a dict to preserve ordering
+ for annotation_gid in input.annotation_ids:
+ try:
+ span_annotation_id = from_global_id_with_expected_type(
+ annotation_gid, SpanAnnotation.__name__
+ )
+ except ValueError:
+ raise BadRequest(f"Invalid span annotation ID: {annotation_gid}")
+ if span_annotation_id in span_annotation_ids:
+ raise BadRequest(f"Duplicate span annotation ID: {span_annotation_id}")
+ span_annotation_ids[span_annotation_id] = None
+
async with info.context.db() as session:
stmt = (
delete(models.SpanAnnotation)
- .where(models.SpanAnnotation.id.in_(span_annotation_ids))
+ .where(models.SpanAnnotation.id.in_(span_annotation_ids.keys()))
.returning(models.SpanAnnotation)
)
result = await session.scalars(stmt)
- deleted_annotations = result.all()
+ deleted_annotations_by_id = {annotation.id: annotation for annotation in result.all()}
- deleted_annotations_gql = [
- to_gql_span_annotation(annotation) for annotation in deleted_annotations
- ]
- if deleted_annotations:
- info.context.event_queue.put(
- SpanAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
+ if not user_is_admin and any(
+ annotation.user_id != user_id for annotation in deleted_annotations_by_id.values()
+ ):
+ await session.rollback()
+ raise Unauthorized(
+ "At least one span annotation is not associated with the current user."
+ )
+
+ missing_span_annotation_ids = set(span_annotation_ids.keys()) - set(
+ deleted_annotations_by_id.keys()
)
+ if missing_span_annotation_ids:
+ raise NotFound(
+ f"Could not find span annotations with IDs: {missing_span_annotation_ids}"
+ )
+
+ deleted_annotations_gql = [
+ to_gql_span_annotation(deleted_annotations_by_id[id]) for id in span_annotation_ids
+ ]
+ info.context.event_queue.put(
+ SpanAnnotationDeleteEvent(tuple(deleted_annotations_by_id.keys()))
+ )
return SpanAnnotationMutationPayload(
span_annotations=deleted_annotations_gql, query=Query()
)
diff --git a/src/phoenix/server/api/mutations/trace_annotations_mutations.py b/src/phoenix/server/api/mutations/trace_annotations_mutations.py
index 017d8cb836..633bcab467 100644
--- a/src/phoenix/server/api/mutations/trace_annotations_mutations.py
+++ b/src/phoenix/server/api/mutations/trace_annotations_mutations.py
@@ -1,19 +1,23 @@
-from collections.abc import Sequence
+from typing import Optional
import strawberry
-from sqlalchemy import delete, insert, update
-from strawberry import UNSET
-from strawberry.types import Info
+from sqlalchemy import delete, insert, select
+from starlette.requests import Request
+from strawberry import UNSET, Info
+from strawberry.relay.types import GlobalID
from phoenix.db import models
from phoenix.server.api.auth import IsLocked, IsNotReadOnly
from phoenix.server.api.context import Context
+from phoenix.server.api.exceptions import BadRequest, NotFound, Unauthorized
from phoenix.server.api.input_types.CreateTraceAnnotationInput import CreateTraceAnnotationInput
from phoenix.server.api.input_types.DeleteAnnotationsInput import DeleteAnnotationsInput
from phoenix.server.api.input_types.PatchAnnotationInput import PatchAnnotationInput
from phoenix.server.api.queries import Query
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
from phoenix.server.api.types.node import from_global_id_with_expected_type
from phoenix.server.api.types.TraceAnnotation import TraceAnnotation, to_gql_trace_annotation
+from phoenix.server.bearer_auth import PhoenixUser
from phoenix.server.dml_event import TraceAnnotationDeleteEvent, TraceAnnotationInsertEvent
@@ -29,33 +33,93 @@ class TraceAnnotationMutationMixin:
async def create_trace_annotations(
self, info: Info[Context, None], input: list[CreateTraceAnnotationInput]
) -> TraceAnnotationMutationPayload:
- inserted_annotations: Sequence[models.TraceAnnotation] = []
+ if not input:
+ raise BadRequest("No trace annotations provided.")
+
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+
+ processed_annotations_map: dict[int, models.TraceAnnotation] = {}
+
+ trace_rowids = []
+ for idx, annotation_input in enumerate(input):
+ try:
+ trace_rowid = from_global_id_with_expected_type(annotation_input.trace_id, "Trace")
+ except ValueError:
+ raise BadRequest(
+ f"Invalid trace ID for annotation at index {idx}: "
+ f"{annotation_input.trace_id}"
+ )
+ trace_rowids.append(trace_rowid)
+
async with info.context.db() as session:
- values_list = [
- dict(
- trace_rowid=from_global_id_with_expected_type(annotation.trace_id, "Trace"),
- name=annotation.name,
- label=annotation.label,
- score=annotation.score,
- explanation=annotation.explanation,
- annotator_kind=annotation.annotator_kind.value,
- metadata_=annotation.metadata,
+ for idx, (trace_rowid, annotation_input) in enumerate(zip(trace_rowids, input)):
+ resolved_identifier = ""
+ if annotation_input.identifier:
+ resolved_identifier = annotation_input.identifier
+ elif annotation_input.source == AnnotationSource.APP and user_id is not None:
+ # Ensure that the annotation has a per-user identifier if submitted via the UI
+ user_gid = str(GlobalID(type_name="User", node_id=str(user_id)))
+ resolved_identifier = f"px-app:{user_gid}"
+ values = {
+ "trace_rowid": trace_rowid,
+ "name": annotation_input.name,
+ "label": annotation_input.label,
+ "score": annotation_input.score,
+ "explanation": annotation_input.explanation,
+ "annotator_kind": annotation_input.annotator_kind.value,
+ "metadata_": annotation_input.metadata,
+ "identifier": resolved_identifier,
+ "source": annotation_input.source.value,
+ "user_id": user_id,
+ }
+
+ processed_annotation: Optional[models.TraceAnnotation] = None
+
+ # Check if an annotation with this trace_rowid, name, and identifier already exists
+ q = select(models.TraceAnnotation).where(
+ models.TraceAnnotation.trace_rowid == trace_rowid,
+ models.TraceAnnotation.name == annotation_input.name,
+ models.TraceAnnotation.identifier == resolved_identifier,
)
- for annotation in input
- ]
- stmt = (
- insert(models.TraceAnnotation).values(values_list).returning(models.TraceAnnotation)
- )
- result = await session.scalars(stmt)
- inserted_annotations = result.all()
- if inserted_annotations:
- info.context.event_queue.put(
- TraceAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
- )
+ existing_annotation = await session.scalar(q)
+
+ if existing_annotation:
+ # Update existing annotation
+ existing_annotation.name = values["name"]
+ existing_annotation.label = values["label"]
+ existing_annotation.score = values["score"]
+ existing_annotation.explanation = values["explanation"]
+ existing_annotation.metadata_ = values["metadata_"]
+ existing_annotation.annotator_kind = values["annotator_kind"]
+ existing_annotation.source = values["source"]
+ existing_annotation.user_id = values["user_id"]
+ session.add(existing_annotation)
+ processed_annotation = existing_annotation
+
+ if processed_annotation is None:
+ stmt = insert(models.TraceAnnotation).values(**values)
+ stmt = stmt.returning(models.TraceAnnotation)
+ result = await session.scalars(stmt)
+ processed_annotation = result.one()
+
+ processed_annotations_map[idx] = processed_annotation
+
+ await session.commit()
+
+ inserted_annotation_ids = tuple(anno.id for anno in processed_annotations_map.values())
+ if inserted_annotation_ids:
+ info.context.event_queue.put(TraceAnnotationInsertEvent(inserted_annotation_ids))
+
+ returned_annotations = [
+ to_gql_trace_annotation(processed_annotations_map[i])
+ for i in sorted(processed_annotations_map.keys())
+ ]
+
return TraceAnnotationMutationPayload(
- trace_annotations=[
- to_gql_trace_annotation(annotation) for annotation in inserted_annotations
- ],
+ trace_annotations=returned_annotations,
query=Query(),
)
@@ -63,65 +127,132 @@ async def create_trace_annotations(
async def patch_trace_annotations(
self, info: Info[Context, None], input: list[PatchAnnotationInput]
) -> TraceAnnotationMutationPayload:
- patched_annotations = []
- async with info.context.db() as session:
- for annotation in input:
+ if not input:
+ raise BadRequest("No trace annotations provided.")
+
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+
+ patch_by_id = {}
+ for patch in input:
+ try:
trace_annotation_id = from_global_id_with_expected_type(
- annotation.annotation_id, "TraceAnnotation"
+ patch.annotation_id, "TraceAnnotation"
+ )
+ except ValueError:
+ raise BadRequest(f"Invalid trace annotation ID: {patch.annotation_id}")
+ if trace_annotation_id in patch_by_id:
+ raise BadRequest(f"Duplicate patch for trace annotation ID: {trace_annotation_id}")
+ patch_by_id[trace_annotation_id] = patch
+
+ async with info.context.db() as session:
+ trace_annotations_by_id = {}
+ for trace_annotation in await session.scalars(
+ select(models.TraceAnnotation).where(
+ models.TraceAnnotation.id.in_(patch_by_id.keys())
)
- patch = {
- column.key: patch_value
- for column, patch_value, column_is_nullable in (
- (models.TraceAnnotation.name, annotation.name, False),
- (
- models.TraceAnnotation.annotator_kind,
- annotation.annotator_kind.value
- if annotation.annotator_kind is not None
- else None,
- False,
- ),
- (models.TraceAnnotation.label, annotation.label, True),
- (models.TraceAnnotation.score, annotation.score, True),
- (models.TraceAnnotation.explanation, annotation.explanation, True),
- (models.TraceAnnotation.metadata_, annotation.metadata, False),
+ ):
+ if trace_annotation.user_id != user_id:
+ raise Unauthorized(
+ "At least one trace annotation is not associated with the current user."
)
- if patch_value is not UNSET and (patch_value is not None or column_is_nullable)
- }
- trace_annotation = await session.scalar(
- update(models.TraceAnnotation)
- .where(models.TraceAnnotation.id == trace_annotation_id)
- .values(**patch)
- .returning(models.TraceAnnotation)
+ trace_annotations_by_id[trace_annotation.id] = trace_annotation
+
+ missing_trace_annotation_ids = set(patch_by_id.keys()) - set(
+ trace_annotations_by_id.keys()
+ )
+ if missing_trace_annotation_ids:
+ raise NotFound(
+ f"Could not find trace annotations with IDs: {missing_trace_annotation_ids}"
)
- if trace_annotation:
- patched_annotations.append(to_gql_trace_annotation(trace_annotation))
- info.context.event_queue.put(TraceAnnotationInsertEvent((trace_annotation.id,)))
- return TraceAnnotationMutationPayload(trace_annotations=patched_annotations, query=Query())
+
+ for trace_annotation_id, patch in patch_by_id.items():
+ trace_annotation = trace_annotations_by_id[trace_annotation_id]
+ if patch.name:
+ trace_annotation.name = patch.name
+ if patch.annotator_kind:
+ trace_annotation.annotator_kind = patch.annotator_kind.value
+ if patch.label is not UNSET:
+ trace_annotation.label = patch.label
+ if patch.score is not UNSET:
+ trace_annotation.score = patch.score
+ if patch.explanation is not UNSET:
+ trace_annotation.explanation = patch.explanation
+ if patch.metadata is not UNSET:
+ assert isinstance(patch.metadata, dict)
+ trace_annotation.metadata_ = patch.metadata
+ if patch.identifier is not UNSET:
+ trace_annotation.identifier = patch.identifier or ""
+ session.add(trace_annotation)
+ await session.commit()
+
+ patched_annotations = [
+ to_gql_trace_annotation(trace_annotation)
+ for trace_annotation in trace_annotations_by_id.values()
+ ]
+ info.context.event_queue.put(TraceAnnotationInsertEvent(tuple(patch_by_id.keys())))
+ return TraceAnnotationMutationPayload(
+ trace_annotations=patched_annotations,
+ query=Query(),
+ )
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
async def delete_trace_annotations(
self, info: Info[Context, None], input: DeleteAnnotationsInput
) -> TraceAnnotationMutationPayload:
- trace_annotation_ids = [
- from_global_id_with_expected_type(global_id, "TraceAnnotation")
- for global_id in input.annotation_ids
- ]
+ if not input.annotation_ids:
+ raise BadRequest("No trace annotation IDs provided.")
+
+ trace_annotation_ids: dict[int, None] = {} # use dict to preserve order
+ for annotation_gid in input.annotation_ids:
+ try:
+ annotation_id = from_global_id_with_expected_type(annotation_gid, "TraceAnnotation")
+ except ValueError:
+ raise BadRequest(f"Invalid trace annotation ID: {annotation_gid}")
+ if annotation_id in trace_annotation_ids:
+ raise BadRequest(f"Duplicate trace annotation ID: {annotation_id}")
+ trace_annotation_ids[annotation_id] = None
+
+ assert isinstance(request := info.context.request, Request)
+ user_id: Optional[int] = None
+ user_is_admin = False
+ if "user" in request.scope and isinstance((user := info.context.user), PhoenixUser):
+ user_id = int(user.identity)
+ user_is_admin = user.is_admin
+
async with info.context.db() as session:
- stmt = (
+ result = await session.scalars(
delete(models.TraceAnnotation)
- .where(models.TraceAnnotation.id.in_(trace_annotation_ids))
+ .where(models.TraceAnnotation.id.in_(trace_annotation_ids.keys()))
.returning(models.TraceAnnotation)
)
- result = await session.scalars(stmt)
- deleted_annotations = result.all()
-
- deleted_annotations_gql = [
- to_gql_trace_annotation(annotation) for annotation in deleted_annotations
- ]
- if deleted_annotations:
- info.context.event_queue.put(
- TraceAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
+ deleted_annotations_by_id = {annotation.id: annotation for annotation in result.all()}
+
+ if not user_is_admin and any(
+ annotation.user_id != user_id for annotation in deleted_annotations_by_id.values()
+ ):
+ await session.rollback()
+ raise Unauthorized(
+ "At least one trace annotation is not associated with the current user "
+ "and the current user is not an admin."
+ )
+
+ missing_trace_annotation_ids = set(trace_annotation_ids.keys()) - set(
+ deleted_annotations_by_id.keys()
)
+ if missing_trace_annotation_ids:
+ raise NotFound(
+ f"Could not find trace annotations with IDs: {missing_trace_annotation_ids}"
+ )
+
+ deleted_gql_annotations = [
+ to_gql_trace_annotation(deleted_annotations_by_id[id]) for id in trace_annotation_ids
+ ]
+ info.context.event_queue.put(
+ TraceAnnotationDeleteEvent(tuple(deleted_annotations_by_id.keys()))
+ )
return TraceAnnotationMutationPayload(
- trace_annotations=deleted_annotations_gql, query=Query()
+ trace_annotations=deleted_gql_annotations, query=Query()
)
diff --git a/src/phoenix/server/api/queries.py b/src/phoenix/server/api/queries.py
index f703757e43..7ff33aca48 100644
--- a/src/phoenix/server/api/queries.py
+++ b/src/phoenix/server/api/queries.py
@@ -19,6 +19,7 @@
getenv,
)
from phoenix.db import enums, models
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
from phoenix.db.helpers import SupportedSQLDialect, exclude_experiment_projects
from phoenix.db.models import DatasetExample as OrmExample
from phoenix.db.models import DatasetExampleRevision as OrmRevision
@@ -42,6 +43,7 @@
from phoenix.server.api.input_types.Coordinates import InputCoordinate2D, InputCoordinate3D
from phoenix.server.api.input_types.DatasetSort import DatasetSort
from phoenix.server.api.input_types.InvocationParameters import InvocationParameter
+from phoenix.server.api.types.AnnotationConfig import AnnotationConfig, to_gql_annotation_config
from phoenix.server.api.types.Cluster import Cluster, to_gql_clusters
from phoenix.server.api.types.Dataset import Dataset, to_gql_dataset
from phoenix.server.api.types.DatasetExample import DatasetExample
@@ -65,14 +67,17 @@
from phoenix.server.api.types.pagination import ConnectionArgs, CursorString, connection_from_list
from phoenix.server.api.types.Project import Project
from phoenix.server.api.types.ProjectSession import ProjectSession, to_gql_project_session
+from phoenix.server.api.types.ProjectTraceRetentionPolicy import ProjectTraceRetentionPolicy
from phoenix.server.api.types.Prompt import Prompt, to_gql_prompt_from_orm
from phoenix.server.api.types.PromptLabel import PromptLabel, to_gql_prompt_label
from phoenix.server.api.types.PromptVersion import PromptVersion, to_gql_prompt_version
from phoenix.server.api.types.PromptVersionTag import PromptVersionTag, to_gql_prompt_version_tag
from phoenix.server.api.types.SortDir import SortDir
from phoenix.server.api.types.Span import Span
+from phoenix.server.api.types.SpanAnnotation import SpanAnnotation, to_gql_span_annotation
from phoenix.server.api.types.SystemApiKey import SystemApiKey
from phoenix.server.api.types.Trace import Trace
+from phoenix.server.api.types.TraceAnnotation import TraceAnnotation, to_gql_trace_annotation
from phoenix.server.api.types.User import User, to_gql_user
from phoenix.server.api.types.UserApiKey import UserApiKey, to_gql_api_key
from phoenix.server.api.types.UserRole import UserRole
@@ -588,6 +593,26 @@ async def node(self, id: GlobalID, info: Info[Context, None]) -> Node:
if not (prompt_version_tag := await session.get(models.PromptVersionTag, node_id)):
raise NotFound(f"Unknown prompt version tag: {id}")
return to_gql_prompt_version_tag(prompt_version_tag)
+ elif type_name == ProjectTraceRetentionPolicy.__name__:
+ async with info.context.db() as session:
+ db_policy = await session.scalar(
+ select(models.ProjectTraceRetentionPolicy).filter_by(id=node_id)
+ )
+ if not db_policy:
+ raise NotFound(f"Unknown project trace retention policy: {id}")
+ return ProjectTraceRetentionPolicy(id=db_policy.id, db_policy=db_policy)
+ elif type_name == SpanAnnotation.__name__:
+ async with info.context.db() as session:
+ span_annotation = await session.get(models.SpanAnnotation, node_id)
+ if not span_annotation:
+ raise NotFound(f"Unknown span annotation: {id}")
+ return to_gql_span_annotation(span_annotation)
+ elif type_name == TraceAnnotation.__name__:
+ async with info.context.db() as session:
+ trace_annotation = await session.get(models.TraceAnnotation, node_id)
+ if not trace_annotation:
+ raise NotFound(f"Unknown trace annotation: {id}")
+ return to_gql_trace_annotation(trace_annotation)
raise NotFound(f"Unknown node type: {type_name}")
@strawberry.field
@@ -657,6 +682,28 @@ async def prompt_labels(
args=args,
)
+ @strawberry.field
+ async def annotation_configs(
+ self,
+ info: Info[Context, None],
+ first: Optional[int] = 50,
+ last: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ ) -> Connection[AnnotationConfig]:
+ args = ConnectionArgs(
+ first=first,
+ after=after if isinstance(after, CursorString) else None,
+ last=last,
+ before=before if isinstance(before, CursorString) else None,
+ )
+ async with info.context.db() as session:
+ configs = await session.stream_scalars(
+ select(models.AnnotationConfig).order_by(models.AnnotationConfig.name)
+ )
+ data = [to_gql_annotation_config(config) async for config in configs]
+ return connection_from_list(data=data, args=args)
+
@strawberry.field
def clusters(
self,
@@ -785,6 +832,45 @@ def hdbscan_clustering(
clustered_events=clustered_events,
)
+ @strawberry.field
+ async def default_project_trace_retention_policy(
+ self,
+ info: Info[Context, None],
+ ) -> ProjectTraceRetentionPolicy:
+ stmt = select(models.ProjectTraceRetentionPolicy).filter_by(
+ id=DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+ )
+ async with info.context.db() as session:
+ db_policy = await session.scalar(stmt)
+ assert db_policy
+ return ProjectTraceRetentionPolicy(id=db_policy.id, db_policy=db_policy)
+
+ @strawberry.field
+ async def project_trace_retention_policies(
+ self,
+ info: Info[Context, None],
+ first: Optional[int] = 100,
+ last: Optional[int] = UNSET,
+ after: Optional[CursorString] = UNSET,
+ before: Optional[CursorString] = UNSET,
+ ) -> Connection[ProjectTraceRetentionPolicy]:
+ args = ConnectionArgs(
+ first=first,
+ after=after if isinstance(after, CursorString) else None,
+ last=last,
+ before=before if isinstance(before, CursorString) else None,
+ )
+ stmt = select(models.ProjectTraceRetentionPolicy).order_by(
+ models.ProjectTraceRetentionPolicy.id
+ )
+ async with info.context.db() as session:
+ result = await session.stream_scalars(stmt)
+ data = [
+ ProjectTraceRetentionPolicy(id=db_policy.id, db_policy=db_policy)
+ async for db_policy in result
+ ]
+ return connection_from_list(data=data, args=args)
+
@strawberry.field(
description="The allocated storage capacity of the database in bytes. "
"Return None if this information is unavailable.",
diff --git a/src/phoenix/server/api/routers/v1/__init__.py b/src/phoenix/server/api/routers/v1/__init__.py
index 9f0b614c69..9e5fed21c3 100644
--- a/src/phoenix/server/api/routers/v1/__init__.py
+++ b/src/phoenix/server/api/routers/v1/__init__.py
@@ -4,6 +4,8 @@
from phoenix.server.bearer_auth import is_authenticated
+from .annotation_configs import router as annotation_configs_router
+from .annotations import router as annotations_router
from .datasets import router as datasets_router
from .evaluations import router as evaluations_router
from .experiment_evaluations import router as experiment_evaluations_router
@@ -56,6 +58,8 @@ def create_v1_router(authentication_enabled: bool) -> APIRouter:
]
),
)
+ router.include_router(annotation_configs_router)
+ router.include_router(annotations_router)
router.include_router(datasets_router)
router.include_router(experiments_router)
router.include_router(experiment_runs_router)
diff --git a/src/phoenix/server/api/routers/v1/annotation_configs.py b/src/phoenix/server/api/routers/v1/annotation_configs.py
new file mode 100644
index 0000000000..ef13bf15dd
--- /dev/null
+++ b/src/phoenix/server/api/routers/v1/annotation_configs.py
@@ -0,0 +1,449 @@
+import logging
+from typing import Annotated, List, Literal, Optional, Union
+
+from fastapi import APIRouter, HTTPException, Path, Query
+from pydantic import Field, RootModel
+from sqlalchemy import delete, select
+from sqlalchemy.exc import IntegrityError as PostgreSQLIntegrityError
+from sqlean.dbapi2 import IntegrityError as SQLiteIntegrityError # type: ignore[import-untyped]
+from starlette.requests import Request
+from starlette.status import (
+ HTTP_400_BAD_REQUEST,
+ HTTP_404_NOT_FOUND,
+ HTTP_409_CONFLICT,
+)
+from strawberry.relay import GlobalID
+from typing_extensions import TypeAlias, assert_never
+
+from phoenix.db import models
+from phoenix.db.types.annotation_configs import (
+ AnnotationConfigType,
+ AnnotationType,
+ OptimizationDirection,
+)
+from phoenix.db.types.annotation_configs import (
+ CategoricalAnnotationConfig as CategoricalAnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ CategoricalAnnotationValue as CategoricalAnnotationValueModel,
+)
+from phoenix.db.types.annotation_configs import (
+ ContinuousAnnotationConfig as ContinuousAnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ FreeformAnnotationConfig as FreeformAnnotationConfigModel,
+)
+from phoenix.server.api.routers.v1.models import V1RoutesBaseModel
+from phoenix.server.api.routers.v1.utils import PaginatedResponseBody, ResponseBody
+from phoenix.server.api.types.AnnotationConfig import (
+ CategoricalAnnotationConfig as CategoricalAnnotationConfigType,
+)
+from phoenix.server.api.types.AnnotationConfig import (
+ ContinuousAnnotationConfig as ContinuousAnnotationConfigType,
+)
+from phoenix.server.api.types.AnnotationConfig import (
+ FreeformAnnotationConfig as FreeformAnnotationConfigType,
+)
+
+logger = logging.getLogger(__name__)
+
+router = APIRouter(tags=["annotation_configs"])
+
+
+class CategoricalAnnotationValue(V1RoutesBaseModel):
+ label: str
+ score: Optional[float] = None
+
+
+class CategoricalAnnotationConfigData(V1RoutesBaseModel):
+ name: str
+ type: Literal[AnnotationType.CATEGORICAL.value] # type: ignore[name-defined]
+ description: Optional[str] = None
+ optimization_direction: OptimizationDirection
+ values: List[CategoricalAnnotationValue]
+
+
+class ContinuousAnnotationConfigData(V1RoutesBaseModel):
+ name: str
+ type: Literal[AnnotationType.CONTINUOUS.value] # type: ignore[name-defined]
+ description: Optional[str] = None
+ optimization_direction: OptimizationDirection
+ lower_bound: Optional[float] = None
+ upper_bound: Optional[float] = None
+
+
+class FreeformAnnotationConfigData(V1RoutesBaseModel):
+ name: str
+ type: Literal[AnnotationType.FREEFORM.value] # type: ignore[name-defined]
+ description: Optional[str] = None
+
+
+AnnotationConfigData: TypeAlias = Annotated[
+ Union[
+ CategoricalAnnotationConfigData,
+ ContinuousAnnotationConfigData,
+ FreeformAnnotationConfigData,
+ ],
+ Field(..., discriminator="type"),
+]
+
+
+class CategoricalAnnotationConfig(CategoricalAnnotationConfigData):
+ id: str
+
+
+class ContinuousAnnotationConfig(ContinuousAnnotationConfigData):
+ id: str
+
+
+class FreeformAnnotationConfig(FreeformAnnotationConfigData):
+ id: str
+
+
+AnnotationConfig: TypeAlias = Annotated[
+ Union[
+ CategoricalAnnotationConfig,
+ ContinuousAnnotationConfig,
+ FreeformAnnotationConfig,
+ ],
+ Field(..., discriminator="type"),
+]
+
+
+def db_to_api_annotation_config(
+ annotation_config: models.AnnotationConfig,
+) -> AnnotationConfig:
+ config = annotation_config.config
+ name = annotation_config.name
+ type_ = config.type
+ description = config.description
+ if isinstance(config, ContinuousAnnotationConfigModel):
+ return ContinuousAnnotationConfig(
+ id=str(GlobalID(ContinuousAnnotationConfigType.__name__, str(annotation_config.id))),
+ name=name,
+ type=type_,
+ description=description,
+ optimization_direction=config.optimization_direction,
+ lower_bound=config.lower_bound,
+ upper_bound=config.upper_bound,
+ )
+ if isinstance(config, CategoricalAnnotationConfigModel):
+ return CategoricalAnnotationConfig(
+ id=str(GlobalID(CategoricalAnnotationConfigType.__name__, str(annotation_config.id))),
+ name=name,
+ type=type_,
+ description=description,
+ optimization_direction=config.optimization_direction,
+ values=[
+ CategoricalAnnotationValue(label=val.label, score=val.score)
+ for val in config.values
+ ],
+ )
+ if isinstance(config, FreeformAnnotationConfigModel):
+ return FreeformAnnotationConfig(
+ id=str(GlobalID(FreeformAnnotationConfigType.__name__, str(annotation_config.id))),
+ name=name,
+ type=type_,
+ description=description,
+ )
+ assert_never(config)
+
+
+def _get_annotation_global_id(annotation_config: models.AnnotationConfig) -> GlobalID:
+ config = annotation_config.config
+ if isinstance(config, ContinuousAnnotationConfigModel):
+ return GlobalID(ContinuousAnnotationConfigType.__name__, str(annotation_config.id))
+ if isinstance(config, CategoricalAnnotationConfigModel):
+ return GlobalID(CategoricalAnnotationConfigType.__name__, str(annotation_config.id))
+ if isinstance(config, FreeformAnnotationConfigModel):
+ return GlobalID(FreeformAnnotationConfigType.__name__, str(annotation_config.id))
+ assert_never(config)
+
+
+class CreateAnnotationConfigData(RootModel[AnnotationConfigData]):
+ root: AnnotationConfigData
+
+
+class GetAnnotationConfigsResponseBody(PaginatedResponseBody[AnnotationConfig]):
+ pass
+
+
+class GetAnnotationConfigResponseBody(ResponseBody[AnnotationConfig]):
+ pass
+
+
+class CreateAnnotationConfigResponseBody(ResponseBody[AnnotationConfig]):
+ pass
+
+
+class UpdateAnnotationConfigResponseBody(ResponseBody[AnnotationConfig]):
+ pass
+
+
+class DeleteAnnotationConfigResponseBody(ResponseBody[AnnotationConfig]):
+ pass
+
+
+@router.get(
+ "/annotation_configs",
+ summary="List annotation configurations",
+ description="Retrieve a paginated list of all annotation configurations in the system.",
+ response_description="A list of annotation configurations with pagination information",
+)
+async def list_annotation_configs(
+ request: Request,
+ cursor: Optional[str] = Query(
+ default=None,
+ description="Cursor for pagination (base64-encoded annotation config ID)",
+ ),
+ limit: int = Query(100, gt=0, description="Maximum number of configs to return"),
+) -> GetAnnotationConfigsResponseBody:
+ cursor_id: Optional[int] = None
+ if cursor:
+ try:
+ cursor_gid = GlobalID.from_id(cursor)
+ except ValueError:
+ raise HTTPException(
+ detail=f"Invalid cursor: {cursor}",
+ status_code=HTTP_400_BAD_REQUEST,
+ )
+ if cursor_gid.type_name not in (
+ CategoricalAnnotationConfigType.__name__,
+ ContinuousAnnotationConfigType.__name__,
+ FreeformAnnotationConfigType.__name__,
+ ):
+ raise HTTPException(
+ detail=f"Invalid cursor: {cursor}",
+ status_code=HTTP_400_BAD_REQUEST,
+ )
+ cursor_id = int(cursor_gid.node_id)
+
+ async with request.app.state.db() as session:
+ query = (
+ select(models.AnnotationConfig)
+ .order_by(models.AnnotationConfig.id.desc())
+ .limit(limit + 1) # overfetch by 1 to check if there are more results
+ )
+ if cursor_id is not None:
+ query = query.where(models.AnnotationConfig.id <= cursor_id)
+
+ result = await session.scalars(query)
+ configs = result.all()
+
+ next_cursor = None
+ if len(configs) == limit + 1:
+ last_config = configs[-1]
+ next_cursor = str(_get_annotation_global_id(last_config))
+ configs = configs[:-1]
+
+ return GetAnnotationConfigsResponseBody(
+ next_cursor=next_cursor,
+ data=[db_to_api_annotation_config(config) for config in configs],
+ )
+
+
+@router.get(
+ "/annotation_configs/{config_identifier}",
+ summary="Get an annotation configuration by ID or name",
+)
+async def get_annotation_config_by_name_or_id(
+ request: Request,
+ config_identifier: str = Path(..., description="ID or name of the annotation configuration"),
+) -> GetAnnotationConfigResponseBody:
+ async with request.app.state.db() as session:
+ query = select(models.AnnotationConfig)
+ # Try to interpret the identifier as an integer ID; if not, use it as a name.
+ try:
+ db_id = _get_annotation_config_db_id(config_identifier)
+ query = query.where(models.AnnotationConfig.id == db_id)
+ except ValueError:
+ query = query.where(models.AnnotationConfig.name == config_identifier)
+ config = await session.scalar(query)
+ if not config:
+ raise HTTPException(
+ status_code=HTTP_404_NOT_FOUND, detail="Annotation configuration not found"
+ )
+ return GetAnnotationConfigResponseBody(data=db_to_api_annotation_config(config))
+
+
+@router.post(
+ "/annotation_configs",
+ summary="Create an annotation configuration",
+)
+async def create_annotation_config(
+ request: Request,
+ data: CreateAnnotationConfigData,
+) -> CreateAnnotationConfigResponseBody:
+ input_config = data.root
+ _reserve_note_annotation_name(input_config)
+
+ try:
+ db_config = _to_db_annotation_config(input_config)
+ except ValueError as error:
+ raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=str(error))
+
+ async with request.app.state.db() as session:
+ annotation_config = models.AnnotationConfig(
+ name=input_config.name,
+ config=db_config,
+ )
+ session.add(annotation_config)
+ try:
+ await session.commit()
+ except (PostgreSQLIntegrityError, SQLiteIntegrityError):
+ raise HTTPException(
+ status_code=HTTP_409_CONFLICT,
+ detail="The name of the annotation configuration is already taken",
+ )
+ return CreateAnnotationConfigResponseBody(
+ data=db_to_api_annotation_config(annotation_config)
+ )
+
+
+@router.put(
+ "/annotation_configs/{config_id}",
+ summary="Update an annotation configuration",
+)
+async def update_annotation_config(
+ request: Request,
+ data: CreateAnnotationConfigData,
+ config_id: str = Path(..., description="ID of the annotation configuration"),
+) -> UpdateAnnotationConfigResponseBody:
+ input_config = data.root
+ _reserve_note_annotation_name(input_config)
+
+ config_gid = GlobalID.from_id(config_id)
+ if config_gid.type_name not in (
+ CategoricalAnnotationConfigType.__name__,
+ ContinuousAnnotationConfigType.__name__,
+ FreeformAnnotationConfigType.__name__,
+ ):
+ raise HTTPException(
+ status_code=HTTP_400_BAD_REQUEST, detail="Invalid annotation configuration ID"
+ )
+ config_rowid = int(config_gid.node_id)
+
+ try:
+ db_config = _to_db_annotation_config(input_config)
+ except ValueError as error:
+ raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=str(error))
+
+ async with request.app.state.db() as session:
+ existing_config = await session.get(models.AnnotationConfig, config_rowid)
+ if not existing_config:
+ raise HTTPException(
+ status_code=HTTP_404_NOT_FOUND, detail="Annotation configuration not found"
+ )
+
+ existing_config.name = input_config.name
+ existing_config.config = db_config
+
+ try:
+ await session.commit()
+ except (PostgreSQLIntegrityError, SQLiteIntegrityError):
+ raise HTTPException(
+ status_code=HTTP_409_CONFLICT,
+ detail="The name of the annotation configuration is already taken",
+ )
+
+ return UpdateAnnotationConfigResponseBody(data=db_to_api_annotation_config(existing_config))
+
+
+@router.delete(
+ "/annotation_configs/{config_id}",
+ summary="Delete an annotation configuration",
+)
+async def delete_annotation_config(
+ request: Request,
+ config_id: str = Path(..., description="ID of the annotation configuration"),
+) -> DeleteAnnotationConfigResponseBody:
+ config_gid = GlobalID.from_id(config_id)
+ if config_gid.type_name not in (
+ CategoricalAnnotationConfigType.__name__,
+ ContinuousAnnotationConfigType.__name__,
+ FreeformAnnotationConfigType.__name__,
+ ):
+ raise HTTPException(
+ status_code=HTTP_400_BAD_REQUEST, detail="Invalid annotation configuration ID"
+ )
+ config_rowid = int(config_gid.node_id)
+ async with request.app.state.db() as session:
+ stmt = (
+ delete(models.AnnotationConfig)
+ .where(models.AnnotationConfig.id == config_rowid)
+ .returning(models.AnnotationConfig)
+ )
+ annotation_config = await session.scalar(stmt)
+ if annotation_config is None:
+ raise HTTPException(
+ status_code=HTTP_404_NOT_FOUND, detail="Annotation configuration not found"
+ )
+ await session.commit()
+ return DeleteAnnotationConfigResponseBody(data=db_to_api_annotation_config(annotation_config))
+
+
+def _get_annotation_config_db_id(config_gid: str) -> int:
+ gid = GlobalID.from_id(config_gid)
+ type_name, node_id = gid.type_name, int(gid.node_id)
+ if type_name not in (
+ CategoricalAnnotationConfigType.__name__,
+ ContinuousAnnotationConfigType.__name__,
+ FreeformAnnotationConfigType.__name__,
+ ):
+ raise ValueError(f"Invalid annotation configuration ID: {config_gid}")
+ return node_id
+
+
+def _reserve_note_annotation_name(data: AnnotationConfigData) -> str:
+ name = data.name
+ if name == "note":
+ raise HTTPException(
+ status_code=HTTP_409_CONFLICT, detail="The name 'note' is reserved for span notes"
+ )
+ return name
+
+
+def _to_db_annotation_config(input_config: AnnotationConfigData) -> AnnotationConfigType:
+ if isinstance(input_config, ContinuousAnnotationConfigData):
+ return _to_db_continuous_annotation_config(input_config)
+ if isinstance(input_config, CategoricalAnnotationConfigData):
+ return _to_db_categorical_annotation_config(input_config)
+ if isinstance(input_config, FreeformAnnotationConfigData):
+ return _to_db_freeform_annotation_config(input_config)
+ assert_never(input_config)
+
+
+def _to_db_continuous_annotation_config(
+ input_config: ContinuousAnnotationConfigData,
+) -> ContinuousAnnotationConfigModel:
+ return ContinuousAnnotationConfigModel(
+ type=AnnotationType.CONTINUOUS.value,
+ description=input_config.description,
+ optimization_direction=input_config.optimization_direction,
+ lower_bound=input_config.lower_bound,
+ upper_bound=input_config.upper_bound,
+ )
+
+
+def _to_db_categorical_annotation_config(
+ input_config: CategoricalAnnotationConfigData,
+) -> CategoricalAnnotationConfigModel:
+ values = [
+ CategoricalAnnotationValueModel(label=value.label, score=value.score)
+ for value in input_config.values
+ ]
+ return CategoricalAnnotationConfigModel(
+ type=AnnotationType.CATEGORICAL.value,
+ description=input_config.description,
+ optimization_direction=input_config.optimization_direction,
+ values=values,
+ )
+
+
+def _to_db_freeform_annotation_config(
+ input_config: FreeformAnnotationConfigData,
+) -> FreeformAnnotationConfigModel:
+ return FreeformAnnotationConfigModel(
+ type=AnnotationType.FREEFORM.value,
+ description=input_config.description,
+ )
diff --git a/src/phoenix/server/api/routers/v1/annotations.py b/src/phoenix/server/api/routers/v1/annotations.py
new file mode 100644
index 0000000000..9a966bf1ab
--- /dev/null
+++ b/src/phoenix/server/api/routers/v1/annotations.py
@@ -0,0 +1,161 @@
+from __future__ import annotations
+
+import logging
+from datetime import datetime
+from typing import Literal, Optional
+
+from fastapi import APIRouter, HTTPException, Path, Query
+from sqlalchemy import exists, select
+from starlette.requests import Request
+from starlette.status import HTTP_200_OK, HTTP_404_NOT_FOUND, HTTP_422_UNPROCESSABLE_ENTITY
+from strawberry.relay import GlobalID
+
+from phoenix.db import models
+from phoenix.server.api.types.SpanAnnotation import SpanAnnotation as SpanAnnotationNodeType
+from phoenix.server.api.types.User import User as UserNodeType
+
+from .spans import SpanAnnotationData, SpanAnnotationResult
+from .utils import PaginatedResponseBody, _get_project_by_identifier, add_errors_to_responses
+
+logger = logging.getLogger(__name__)
+
+SPAN_ANNOTATION_NODE_NAME = SpanAnnotationNodeType.__name__
+USER_NODE_NAME = UserNodeType.__name__
+MAX_SPAN_IDS = 1_000
+
+router = APIRouter(tags=["annotations"])
+
+
+class SpanAnnotation(SpanAnnotationData):
+ id: str
+ created_at: datetime
+ updated_at: datetime
+ source: Literal["API", "APP"]
+ user_id: Optional[str]
+
+
+class SpanAnnotationsResponseBody(PaginatedResponseBody[SpanAnnotation]):
+ pass
+
+
+@router.get(
+ "/projects/{project_identifier}/span_annotations",
+ operation_id="listSpanAnnotationsBySpanIds",
+ summary="Get span annotations for a list of span_ids.",
+ status_code=HTTP_200_OK,
+ responses=add_errors_to_responses(
+ [
+ {"status_code": HTTP_404_NOT_FOUND, "description": "Project or spans not found"},
+ {"status_code": HTTP_422_UNPROCESSABLE_ENTITY, "description": "Invalid parameters"},
+ ]
+ ),
+)
+async def list_span_annotations(
+ request: Request,
+ project_identifier: str = Path(
+ description=(
+ "The project identifier: either project ID or project name. If using a project name as "
+ "the identifier, it cannot contain slash (/), question mark (?), or pound sign (#) "
+ "characters."
+ )
+ ),
+ span_ids: list[str] = Query(
+ ..., min_length=1, description="One or more span id to fetch annotations for"
+ ),
+ cursor: Optional[str] = Query(default=None, description="A cursor for pagination"),
+ limit: int = Query(
+ default=10,
+ gt=0,
+ le=10000,
+ description="The maximum number of annotations to return in a single request",
+ ),
+) -> SpanAnnotationsResponseBody:
+ span_ids = list({*span_ids})
+ if len(span_ids) > MAX_SPAN_IDS:
+ raise HTTPException(
+ status_code=HTTP_422_UNPROCESSABLE_ENTITY,
+ detail=f"Too many span_ids supplied: {len(span_ids)} (max {MAX_SPAN_IDS})",
+ )
+
+ async with request.app.state.db() as session:
+ project = await _get_project_by_identifier(session, project_identifier)
+ if not project:
+ raise HTTPException(
+ status_code=HTTP_404_NOT_FOUND,
+ detail=f"Project with identifier {project_identifier} not found",
+ )
+ stmt = (
+ select(models.Span.span_id, models.SpanAnnotation)
+ .join(models.Trace, models.Span.trace_rowid == models.Trace.id)
+ .join(models.Project, models.Trace.project_rowid == models.Project.id)
+ .join(models.SpanAnnotation, models.SpanAnnotation.span_rowid == models.Span.id)
+ .where(
+ models.Project.id == project.id,
+ models.Span.span_id.in_(span_ids),
+ )
+ .order_by(models.SpanAnnotation.id.desc())
+ .limit(limit + 1)
+ )
+
+ if cursor:
+ try:
+ cursor_id = int(GlobalID.from_id(cursor).node_id)
+ except ValueError:
+ raise HTTPException(
+ status_code=HTTP_422_UNPROCESSABLE_ENTITY,
+ detail="Invalid cursor value",
+ )
+ stmt = stmt.where(models.SpanAnnotation.id <= cursor_id)
+
+ rows: list[tuple[str, models.SpanAnnotation]] = [
+ r async for r in (await session.stream(stmt))
+ ]
+
+ next_cursor: Optional[str] = None
+ if len(rows) == limit + 1:
+ *rows, extra = rows
+ next_cursor = str(GlobalID(SPAN_ANNOTATION_NODE_NAME, str(extra[1].id)))
+
+ if not rows:
+ spans_exist = await session.scalar(
+ select(
+ exists().where(
+ models.Span.span_id.in_(span_ids),
+ models.Span.trace_rowid.in_(
+ select(models.Trace.id)
+ .join(models.Project)
+ .where(models.Project.id == project.id)
+ ),
+ )
+ )
+ )
+ if not spans_exist:
+ raise HTTPException(
+ detail="None of the supplied span_ids exist in this project",
+ status_code=HTTP_404_NOT_FOUND,
+ )
+
+ return SpanAnnotationsResponseBody(data=[], next_cursor=None)
+
+ data = [
+ SpanAnnotation(
+ id=str(GlobalID(SPAN_ANNOTATION_NODE_NAME, str(anno.id))),
+ span_id=span_id,
+ name=anno.name,
+ result=SpanAnnotationResult(
+ label=anno.label,
+ score=anno.score,
+ explanation=anno.explanation,
+ ),
+ metadata=anno.metadata_,
+ annotator_kind=anno.annotator_kind,
+ created_at=anno.created_at,
+ updated_at=anno.updated_at,
+ identifier=anno.identifier,
+ source=anno.source,
+ user_id=str(GlobalID(USER_NODE_NAME, str(anno.user_id))) if anno.user_id else None,
+ )
+ for span_id, anno in rows
+ ]
+
+ return SpanAnnotationsResponseBody(data=data, next_cursor=next_cursor)
diff --git a/src/phoenix/server/api/routers/v1/projects.py b/src/phoenix/server/api/routers/v1/projects.py
index 335defd613..b87c5ed04d 100644
--- a/src/phoenix/server/api/routers/v1/projects.py
+++ b/src/phoenix/server/api/routers/v1/projects.py
@@ -3,7 +3,6 @@
from fastapi import APIRouter, HTTPException, Path, Query
from pydantic import Field
from sqlalchemy import select
-from sqlalchemy.ext.asyncio import AsyncSession
from starlette.requests import Request
from starlette.status import (
HTTP_204_NO_CONTENT,
@@ -21,9 +20,9 @@
from phoenix.server.api.routers.v1.utils import (
PaginatedResponseBody,
ResponseBody,
+ _get_project_by_identifier,
add_errors_to_responses,
)
-from phoenix.server.api.types.node import from_global_id_with_expected_type
from phoenix.server.api.types.Project import Project as ProjectNodeType
router = APIRouter(tags=["projects"])
@@ -343,51 +342,3 @@ def _to_project_response(project: models.Project) -> Project:
name=project.name,
description=project.description,
)
-
-
-async def _get_project_by_identifier(
- session: AsyncSession,
- project_identifier: str,
-) -> models.Project:
- """
- Get a project by its ID or name.
-
- Args:
- session: The database session.
- project_identifier: The project ID or name.
-
- Returns:
- The project object.
-
- Raises:
- HTTPException: If the identifier format is invalid or the project is not found.
- """
- # Try to parse as a GlobalID first
- try:
- id_ = from_global_id_with_expected_type(
- GlobalID.from_id(project_identifier),
- ProjectNodeType.__name__,
- )
- except Exception:
- try:
- name = project_identifier
- except HTTPException:
- raise HTTPException(
- status_code=HTTP_422_UNPROCESSABLE_ENTITY,
- detail=f"Invalid project identifier format: {project_identifier}",
- )
- stmt = select(models.Project).filter_by(name=name)
- project = await session.scalar(stmt)
- if project is None:
- raise HTTPException(
- status_code=HTTP_404_NOT_FOUND,
- detail=f"Project with name {name} not found",
- )
- else:
- project = await session.get(models.Project, id_)
- if project is None:
- raise HTTPException(
- status_code=HTTP_404_NOT_FOUND,
- detail=f"Project with ID {project_identifier} not found",
- )
- return project
diff --git a/src/phoenix/server/api/routers/v1/spans.py b/src/phoenix/server/api/routers/v1/spans.py
index 08f125a0df..a1af9e94ac 100644
--- a/src/phoenix/server/api/routers/v1/spans.py
+++ b/src/phoenix/server/api/routers/v1/spans.py
@@ -1,12 +1,13 @@
+import warnings
from asyncio import get_running_loop
from collections.abc import AsyncIterator
from datetime import datetime, timezone
from secrets import token_urlsafe
-from typing import Any, Literal, Optional
+from typing import Annotated, Any, Literal, Optional
import pandas as pd
from fastapi import APIRouter, Header, HTTPException, Query
-from pydantic import Field
+from pydantic import AfterValidator, Field
from sqlalchemy import select
from starlette.requests import Request
from starlette.responses import Response, StreamingResponse
@@ -169,10 +170,16 @@ class SpanAnnotationResult(V1RoutesBaseModel):
)
-class SpanAnnotation(V1RoutesBaseModel):
+def _is_not_empty_string(identifier: Optional[str]) -> Optional[str]:
+ if identifier == "":
+ raise ValueError("Identifier must be a non-empty string or null")
+ return identifier
+
+
+class SpanAnnotationData(V1RoutesBaseModel):
span_id: str = Field(description="OpenTelemetry Span ID (hex format w/o 0x prefix)")
name: str = Field(description="The name of the annotation")
- annotator_kind: Literal["LLM", "HUMAN"] = Field(
+ annotator_kind: Literal["LLM", "CODE", "HUMAN"] = Field(
description="The kind of annotator used for the annotation"
)
result: Optional[SpanAnnotationResult] = Field(
@@ -181,6 +188,16 @@ class SpanAnnotation(V1RoutesBaseModel):
metadata: Optional[dict[str, Any]] = Field(
default=None, description="Metadata for the annotation"
)
+ identifier: Annotated[
+ Optional[str],
+ AfterValidator(_is_not_empty_string),
+ ] = Field(
+ default=None,
+ description=(
+ "The identifier of the annotation. "
+ "If provided, the annotation will be updated if it already exists."
+ ),
+ )
def as_precursor(self) -> Precursors.SpanAnnotation:
return Precursors.SpanAnnotation(
@@ -192,12 +209,15 @@ def as_precursor(self) -> Precursors.SpanAnnotation:
label=self.result.label if self.result else None,
explanation=self.result.explanation if self.result else None,
metadata_=self.metadata or {},
+ identifier=self.identifier or "",
+ source="API",
+ user_id=None,
),
)
-class AnnotateSpansRequestBody(RequestBody[list[SpanAnnotation]]):
- data: list[SpanAnnotation]
+class AnnotateSpansRequestBody(RequestBody[list[SpanAnnotationData]]):
+ data: list[SpanAnnotationData]
class InsertedSpanAnnotation(V1RoutesBaseModel):
@@ -211,7 +231,7 @@ class AnnotateSpansResponseBody(ResponseBody[list[InsertedSpanAnnotation]]):
@router.post(
"/span_annotations",
operation_id="annotateSpans",
- summary="Create or update span annotations",
+ summary="Create span annotations",
responses=add_errors_to_responses(
[{"status_code": HTTP_404_NOT_FOUND, "description": "Span not found"}]
),
@@ -225,7 +245,17 @@ async def annotate_spans(
) -> AnnotateSpansResponseBody:
if not request_body.data:
return AnnotateSpansResponseBody(data=[])
- precursors = [d.as_precursor() for d in request_body.data]
+ span_annotations = request_body.data
+ filtered_span_annotations = list(filter(lambda d: d.name != "note", span_annotations))
+ if len(filtered_span_annotations) != len(span_annotations):
+ warnings.warn(
+ (
+ "Span annotations with the name 'note' are not supported in this endpoint. "
+ "They will be ignored."
+ ),
+ UserWarning,
+ )
+ precursors = [d.as_precursor() for d in filtered_span_annotations]
if not sync:
await request.state.enqueue(*precursors)
return AnnotateSpansResponseBody(data=[])
@@ -254,7 +284,7 @@ async def annotate_spans(
values,
dialect=dialect,
table=models.SpanAnnotation,
- unique_by=("name", "span_rowid"),
+ unique_by=("name", "span_rowid", "identifier"),
).returning(models.SpanAnnotation.id)
)
inserted_ids.append(span_annotation_id)
diff --git a/src/phoenix/server/api/routers/v1/traces.py b/src/phoenix/server/api/routers/v1/traces.py
index 1d352b5f76..c0fe581364 100644
--- a/src/phoenix/server/api/routers/v1/traces.py
+++ b/src/phoenix/server/api/routers/v1/traces.py
@@ -10,7 +10,7 @@
ExportTraceServiceResponse,
)
from pydantic import Field
-from sqlalchemy import select
+from sqlalchemy import insert, select
from starlette.concurrency import run_in_threadpool
from starlette.datastructures import State
from starlette.requests import Request
@@ -23,8 +23,7 @@
from strawberry.relay import GlobalID
from phoenix.db import models
-from phoenix.db.helpers import SupportedSQLDialect
-from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
+from phoenix.db.insertion.helpers import as_kv
from phoenix.db.insertion.types import Precursors
from phoenix.server.dml_event import TraceAnnotationInsertEvent
from phoenix.trace.otel import decode_otlp_span
@@ -114,6 +113,13 @@ class TraceAnnotation(V1RoutesBaseModel):
metadata: Optional[dict[str, Any]] = Field(
default=None, description="Metadata for the annotation"
)
+ identifier: Optional[str] = Field(
+ default=None,
+ description=(
+ "The identifier of the annotation. "
+ "If provided, the annotation will be updated if it already exists."
+ ),
+ )
def as_precursor(self) -> Precursors.TraceAnnotation:
return Precursors.TraceAnnotation(
@@ -125,6 +131,9 @@ def as_precursor(self) -> Precursors.TraceAnnotation:
label=self.result.label if self.result else None,
explanation=self.result.explanation if self.result else None,
metadata_=self.metadata or {},
+ identifier=self.identifier,
+ source="APP",
+ user_id=None,
),
)
@@ -144,7 +153,7 @@ class AnnotateTracesResponseBody(ResponseBody[list[InsertedTraceAnnotation]]):
@router.post(
"/trace_annotations",
operation_id="annotateTraces",
- summary="Create or update trace annotations",
+ summary="Create trace annotations",
responses=add_errors_to_responses(
[{"status_code": HTTP_404_NOT_FOUND, "description": "Trace not found"}]
),
@@ -178,16 +187,10 @@ async def annotate_traces(
status_code=HTTP_404_NOT_FOUND,
)
inserted_ids = []
- dialect = SupportedSQLDialect(session.bind.dialect.name)
for p in precursors:
values = dict(as_kv(p.as_insertable(existing_traces[p.trace_id]).row))
trace_annotation_id = await session.scalar(
- insert_on_conflict(
- values,
- dialect=dialect,
- table=models.TraceAnnotation,
- unique_by=("name", "trace_rowid"),
- ).returning(models.TraceAnnotation.id)
+ insert(models.TraceAnnotation).values(**values).returning(models.TraceAnnotation.id)
)
inserted_ids.append(trace_annotation_id)
request.state.event_queue.put(TraceAnnotationInsertEvent(tuple(inserted_ids)))
diff --git a/src/phoenix/server/api/routers/v1/utils.py b/src/phoenix/server/api/routers/v1/utils.py
index 1cf558cc2c..e0ec19bccc 100644
--- a/src/phoenix/server/api/routers/v1/utils.py
+++ b/src/phoenix/server/api/routers/v1/utils.py
@@ -1,7 +1,19 @@
from typing import Any, Generic, Optional, TypedDict, TypeVar, Union
+from fastapi import HTTPException
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from starlette.status import (
+ HTTP_404_NOT_FOUND,
+ HTTP_422_UNPROCESSABLE_ENTITY,
+)
+from strawberry.relay import GlobalID
from typing_extensions import TypeAlias, assert_never
+from phoenix.db import models
+from phoenix.server.api.types.node import from_global_id_with_expected_type
+from phoenix.server.api.types.Project import Project as ProjectNodeType
+
from .models import V1RoutesBaseModel
StatusCode: TypeAlias = int
@@ -93,3 +105,51 @@ def add_text_csv_content_to_responses(
"text/csv": {"schema": {"type": "string", "contentMediaType": "text/csv"}}
}
return output_responses
+
+
+async def _get_project_by_identifier(
+ session: AsyncSession,
+ project_identifier: str,
+) -> models.Project:
+ """
+ Get a project by its ID or name.
+
+ Args:
+ session: The database session.
+ project_identifier: The project ID or name.
+
+ Returns:
+ The project object.
+
+ Raises:
+ HTTPException: If the identifier format is invalid or the project is not found.
+ """
+ # Try to parse as a GlobalID first
+ try:
+ id_ = from_global_id_with_expected_type(
+ GlobalID.from_id(project_identifier),
+ ProjectNodeType.__name__,
+ )
+ except Exception:
+ try:
+ name = project_identifier
+ except HTTPException:
+ raise HTTPException(
+ status_code=HTTP_422_UNPROCESSABLE_ENTITY,
+ detail=f"Invalid project identifier format: {project_identifier}",
+ )
+ stmt = select(models.Project).filter_by(name=name)
+ project = await session.scalar(stmt)
+ if project is None:
+ raise HTTPException(
+ status_code=HTTP_404_NOT_FOUND,
+ detail=f"Project with name {name} not found",
+ )
+ else:
+ project = await session.get(models.Project, id_)
+ if project is None:
+ raise HTTPException(
+ status_code=HTTP_404_NOT_FOUND,
+ detail=f"Project with ID {project_identifier} not found",
+ )
+ return project
diff --git a/src/phoenix/server/api/types/Annotation.py b/src/phoenix/server/api/types/Annotation.py
index d2c541439b..4e26cb5f97 100644
--- a/src/phoenix/server/api/types/Annotation.py
+++ b/src/phoenix/server/api/types/Annotation.py
@@ -1,3 +1,4 @@
+from datetime import datetime
from typing import Optional
import strawberry
@@ -22,3 +23,9 @@ class Annotation:
description="The annotator's explanation for the annotation result (i.e. "
"score or label, or both) given to the subject."
)
+ created_at: datetime = strawberry.field(
+ description="The date and time when the annotation was created."
+ )
+ updated_at: datetime = strawberry.field(
+ description="The date and time when the annotation was last updated."
+ )
diff --git a/src/phoenix/server/api/types/AnnotationConfig.py b/src/phoenix/server/api/types/AnnotationConfig.py
new file mode 100644
index 0000000000..0f78ba629a
--- /dev/null
+++ b/src/phoenix/server/api/types/AnnotationConfig.py
@@ -0,0 +1,124 @@
+from typing import Annotated, Optional, Union
+
+import strawberry
+from strawberry.relay import Node, NodeID
+from typing_extensions import TypeAlias, assert_never
+
+from phoenix.db import models
+from phoenix.db.types.annotation_configs import (
+ AnnotationType,
+ OptimizationDirection,
+)
+from phoenix.db.types.annotation_configs import (
+ CategoricalAnnotationConfig as CategoricalAnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ ContinuousAnnotationConfig as ContinuousAnnotationConfigModel,
+)
+from phoenix.db.types.annotation_configs import (
+ FreeformAnnotationConfig as FreeformAnnotationConfigModel,
+)
+
+
+@strawberry.interface
+class AnnotationConfigBase:
+ name: str
+ description: Optional[str]
+ annotation_type: AnnotationType
+
+
+@strawberry.type
+class CategoricalAnnotationValue:
+ label: str
+ score: Optional[float]
+
+
+@strawberry.type
+class CategoricalAnnotationConfig(Node, AnnotationConfigBase):
+ id_attr: NodeID[int]
+ optimization_direction: OptimizationDirection
+ values: list[CategoricalAnnotationValue]
+
+
+@strawberry.type
+class ContinuousAnnotationConfig(Node, AnnotationConfigBase):
+ id_attr: NodeID[int]
+ optimization_direction: OptimizationDirection
+ lower_bound: Optional[float]
+ upper_bound: Optional[float]
+
+
+@strawberry.type
+class FreeformAnnotationConfig(Node, AnnotationConfigBase):
+ id_attr: NodeID[int]
+
+
+AnnotationConfig: TypeAlias = Annotated[
+ Union[CategoricalAnnotationConfig, ContinuousAnnotationConfig, FreeformAnnotationConfig],
+ strawberry.union("AnnotationConfig"),
+]
+
+
+def _to_gql_categorical_annotation_config(
+ annotation_config: models.AnnotationConfig,
+) -> CategoricalAnnotationConfig:
+ config = annotation_config.config
+ assert isinstance(config, CategoricalAnnotationConfigModel)
+ values = [
+ CategoricalAnnotationValue(
+ label=val.label,
+ score=val.score,
+ )
+ for val in config.values
+ ]
+ return CategoricalAnnotationConfig(
+ id_attr=annotation_config.id,
+ name=annotation_config.name,
+ annotation_type=config.type,
+ optimization_direction=config.optimization_direction,
+ description=config.description,
+ values=values,
+ )
+
+
+def _to_gql_continuous_annotation_config(
+ annotation_config: models.AnnotationConfig,
+) -> ContinuousAnnotationConfig:
+ config = annotation_config.config
+ assert isinstance(config, ContinuousAnnotationConfigModel)
+ return ContinuousAnnotationConfig(
+ id_attr=annotation_config.id,
+ name=annotation_config.name,
+ annotation_type=config.type,
+ optimization_direction=config.optimization_direction,
+ description=config.description,
+ lower_bound=config.lower_bound,
+ upper_bound=config.upper_bound,
+ )
+
+
+def _to_gql_freeform_annotation_config(
+ annotation_config: models.AnnotationConfig,
+) -> FreeformAnnotationConfig:
+ config = annotation_config.config
+ assert isinstance(config, FreeformAnnotationConfigModel)
+ return FreeformAnnotationConfig(
+ id_attr=annotation_config.id,
+ name=annotation_config.name,
+ annotation_type=config.type,
+ description=config.description,
+ )
+
+
+def to_gql_annotation_config(annotation_config: models.AnnotationConfig) -> AnnotationConfig:
+ """
+ Convert an SQLAlchemy AnnotationConfig instance to one of the GraphQL types.
+ """
+ config = annotation_config.config
+ if isinstance(config, ContinuousAnnotationConfigModel):
+ return _to_gql_continuous_annotation_config(annotation_config)
+ if isinstance(config, CategoricalAnnotationConfigModel):
+ return _to_gql_categorical_annotation_config(annotation_config)
+ if isinstance(config, FreeformAnnotationConfigModel):
+ return _to_gql_freeform_annotation_config(annotation_config)
+ assert_never(annotation_config)
diff --git a/src/phoenix/server/api/types/AnnotationSource.py b/src/phoenix/server/api/types/AnnotationSource.py
new file mode 100644
index 0000000000..472e0cbccb
--- /dev/null
+++ b/src/phoenix/server/api/types/AnnotationSource.py
@@ -0,0 +1,9 @@
+from enum import Enum
+
+import strawberry
+
+
+@strawberry.enum
+class AnnotationSource(Enum):
+ API = "API"
+ APP = "APP"
diff --git a/src/phoenix/server/api/types/AnnotationSummary.py b/src/phoenix/server/api/types/AnnotationSummary.py
index 06cd9bdb51..07cca36309 100644
--- a/src/phoenix/server/api/types/AnnotationSummary.py
+++ b/src/phoenix/server/api/types/AnnotationSummary.py
@@ -12,10 +12,9 @@
@strawberry.type
class AnnotationSummary:
+ name: str
df: Private[pd.DataFrame]
-
- def __init__(self, dataframe: pd.DataFrame) -> None:
- self.df = dataframe
+ simple_avg: Private[bool] = False
@strawberry.field
def count(self) -> int:
@@ -23,28 +22,43 @@ def count(self) -> int:
@strawberry.field
def labels(self) -> list[str]:
- return self.df.label.dropna().tolist()
+ unique_labels = self.df["label"].dropna().unique()
+ return [str(label) for label in unique_labels]
@strawberry.field
def label_fractions(self) -> list[LabelFraction]:
- if not (n := self.df.label_count.sum()):
- return []
+ if self.simple_avg:
+ if not (n := self.df.label_count.sum()):
+ return []
+ return [
+ LabelFraction(
+ label=cast(str, row.label),
+ fraction=row.label_count / n,
+ )
+ for row in self.df.loc[
+ self.df.label.notna(),
+ ["label", "label_count"],
+ ].itertuples()
+ ]
return [
LabelFraction(
- label=cast(str, row.label),
- fraction=row.label_count / n,
+ label=row.label,
+ fraction=float(row.avg_label_fraction),
)
- for row in self.df.loc[
- self.df.label.notna(),
- ["label", "label_count"],
- ].itertuples()
+ for row in self.df.itertuples()
+ if row.label is not None
]
@strawberry.field
def mean_score(self) -> Optional[float]:
- if not (n := self.df.score_count.sum()):
+ if self.simple_avg:
+ if not (n := self.df.score_count.sum()):
+ return None
+ return cast(float, self.df.score_sum.sum() / n)
+ avg_scores = self.df["avg_score"].dropna()
+ if avg_scores.empty:
return None
- return cast(float, self.df.score_sum.sum() / n)
+ return float(avg_scores.mean()) # all avg_scores should be the same
@strawberry.field
def score_count(self) -> int:
diff --git a/src/phoenix/server/api/types/AnnotatorKind.py b/src/phoenix/server/api/types/AnnotatorKind.py
index 3d18341a7c..c4ba497af0 100644
--- a/src/phoenix/server/api/types/AnnotatorKind.py
+++ b/src/phoenix/server/api/types/AnnotatorKind.py
@@ -14,3 +14,4 @@ class ExperimentRunAnnotatorKind(Enum):
class AnnotatorKind(Enum):
LLM = "LLM"
HUMAN = "HUMAN"
+ CODE = "CODE"
diff --git a/src/phoenix/server/api/types/CronExpression.py b/src/phoenix/server/api/types/CronExpression.py
new file mode 100644
index 0000000000..452d3df1a1
--- /dev/null
+++ b/src/phoenix/server/api/types/CronExpression.py
@@ -0,0 +1,15 @@
+from typing import NewType
+
+import strawberry
+
+from phoenix.db.types.trace_retention import TraceRetentionCronExpression
+
+
+def parse_value(value: str) -> str:
+ return TraceRetentionCronExpression.model_validate(value).root
+
+
+CronExpression = strawberry.scalar(
+ NewType("CronExpression", str),
+ parse_value=parse_value,
+)
diff --git a/src/phoenix/server/api/types/Evaluation.py b/src/phoenix/server/api/types/Evaluation.py
index ac4ba80ac2..3bbf806753 100644
--- a/src/phoenix/server/api/types/Evaluation.py
+++ b/src/phoenix/server/api/types/Evaluation.py
@@ -1,6 +1,5 @@
import strawberry
-import phoenix.trace.v1 as pb
from phoenix.db.models import DocumentAnnotation, TraceAnnotation
from .Annotation import Annotation
@@ -8,19 +7,6 @@
@strawberry.type
class TraceEvaluation(Annotation):
- @staticmethod
- def from_pb_evaluation(evaluation: pb.Evaluation) -> "TraceEvaluation":
- result = evaluation.result
- score = result.score.value if result.HasField("score") else None
- label = result.label.value if result.HasField("label") else None
- explanation = result.explanation.value if result.HasField("explanation") else None
- return TraceEvaluation(
- name=evaluation.name,
- score=score,
- label=label,
- explanation=explanation,
- )
-
@staticmethod
def from_sql_trace_annotation(annotation: TraceAnnotation) -> "TraceEvaluation":
return TraceEvaluation(
@@ -28,6 +14,8 @@ def from_sql_trace_annotation(annotation: TraceAnnotation) -> "TraceEvaluation":
score=annotation.score,
label=annotation.label,
explanation=annotation.explanation,
+ created_at=annotation.created_at,
+ updated_at=annotation.updated_at,
)
@@ -38,22 +26,6 @@ class DocumentEvaluation(Annotation):
"is collected as a list (even when ordering is not inherently meaningful)."
)
- @staticmethod
- def from_pb_evaluation(evaluation: pb.Evaluation) -> "DocumentEvaluation":
- result = evaluation.result
- score = result.score.value if result.HasField("score") else None
- label = result.label.value if result.HasField("label") else None
- explanation = result.explanation.value if result.HasField("explanation") else None
- document_retrieval_id = evaluation.subject_id.document_retrieval_id
- document_position = document_retrieval_id.document_position
- return DocumentEvaluation(
- name=evaluation.name,
- score=score,
- label=label,
- explanation=explanation,
- document_position=document_position,
- )
-
@staticmethod
def from_sql_document_annotation(annotation: DocumentAnnotation) -> "DocumentEvaluation":
return DocumentEvaluation(
@@ -62,4 +34,6 @@ def from_sql_document_annotation(annotation: DocumentAnnotation) -> "DocumentEva
label=annotation.label,
explanation=annotation.explanation,
document_position=annotation.document_position,
+ created_at=annotation.created_at,
+ updated_at=annotation.updated_at,
)
diff --git a/src/phoenix/server/api/types/Project.py b/src/phoenix/server/api/types/Project.py
index 1325eefc1e..e91e4c6fb7 100644
--- a/src/phoenix/server/api/types/Project.py
+++ b/src/phoenix/server/api/types/Project.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import operator
from datetime import datetime
-from typing import Any, ClassVar, Optional
+from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Optional
import strawberry
from aioitertools.itertools import islice
@@ -8,7 +10,7 @@
from sqlalchemy import desc, distinct, func, or_, select
from sqlalchemy.sql.elements import ColumnElement
from sqlalchemy.sql.expression import tuple_
-from strawberry import ID, UNSET, Private
+from strawberry import ID, UNSET, Private, lazy
from strawberry.relay import Connection, Node, NodeID
from strawberry.types import Info
from typing_extensions import assert_never
@@ -22,13 +24,16 @@
)
from phoenix.server.api.input_types.SpanSort import SpanSort, SpanSortConfig
from phoenix.server.api.input_types.TimeRange import TimeRange
+from phoenix.server.api.types.AnnotationConfig import AnnotationConfig, to_gql_annotation_config
from phoenix.server.api.types.AnnotationSummary import AnnotationSummary
from phoenix.server.api.types.DocumentEvaluationSummary import DocumentEvaluationSummary
from phoenix.server.api.types.pagination import (
+ ConnectionArgs,
Cursor,
CursorSortColumn,
CursorString,
connection_from_cursors_and_nodes,
+ connection_from_list,
)
from phoenix.server.api.types.ProjectSession import ProjectSession, to_gql_project_session
from phoenix.server.api.types.SortDir import SortDir
@@ -38,6 +43,8 @@
from phoenix.trace.dsl import SpanFilter
DEFAULT_PAGE_SIZE = 30
+if TYPE_CHECKING:
+ from phoenix.server.api.types.ProjectTraceRetentionPolicy import ProjectTraceRetentionPolicy
@strawberry.type
@@ -536,6 +543,47 @@ async def validate_span_filter_condition(self, condition: str) -> ValidationResu
error_message=e.msg,
)
+ @strawberry.field
+ async def annotation_configs(
+ self,
+ info: Info[Context, None],
+ first: Optional[int] = 50,
+ last: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ ) -> Connection[AnnotationConfig]:
+ args = ConnectionArgs(
+ first=first,
+ after=after if isinstance(after, CursorString) else None,
+ last=last,
+ before=before if isinstance(before, CursorString) else None,
+ )
+ async with info.context.db() as session:
+ annotation_configs = await session.stream_scalars(
+ select(models.AnnotationConfig)
+ .join(
+ models.ProjectAnnotationConfig,
+ models.AnnotationConfig.id
+ == models.ProjectAnnotationConfig.annotation_config_id,
+ )
+ .where(models.ProjectAnnotationConfig.project_id == self.project_rowid)
+ .order_by(models.AnnotationConfig.name)
+ )
+ data = [to_gql_annotation_config(config) async for config in annotation_configs]
+ return connection_from_list(data=data, args=args)
+
+ @strawberry.field
+ async def trace_retention_policy(
+ self,
+ info: Info[Context, None],
+ ) -> Annotated[ProjectTraceRetentionPolicy, lazy(".ProjectTraceRetentionPolicy")]:
+ from .ProjectTraceRetentionPolicy import ProjectTraceRetentionPolicy
+
+ id_ = await info.context.data_loaders.trace_retention_policy_id_by_project_id.load(
+ self.project_rowid
+ )
+ return ProjectTraceRetentionPolicy(id=id_)
+
INPUT_VALUE = SpanAttributes.INPUT_VALUE.split(".")
OUTPUT_VALUE = SpanAttributes.OUTPUT_VALUE.split(".")
diff --git a/src/phoenix/server/api/types/ProjectTraceRetentionPolicy.py b/src/phoenix/server/api/types/ProjectTraceRetentionPolicy.py
new file mode 100644
index 0000000000..fba0793f76
--- /dev/null
+++ b/src/phoenix/server/api/types/ProjectTraceRetentionPolicy.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+from typing import Annotated, Optional, Union
+
+import strawberry
+from strawberry import UNSET, Private
+from strawberry.relay import Connection, Node, NodeID
+from strawberry.types import Info
+from typing_extensions import TypeAlias, assert_never
+
+from phoenix.db import models
+from phoenix.db.types.trace_retention import MaxCountRule, MaxDaysOrCountRule, MaxDaysRule
+from phoenix.server.api.context import Context
+from phoenix.server.api.types.CronExpression import CronExpression
+from phoenix.server.api.types.pagination import ConnectionArgs, CursorString, connection_from_list
+from phoenix.server.api.types.Project import Project
+
+
+@strawberry.type
+class TraceRetentionRuleMaxDays:
+ max_days: float
+
+
+@strawberry.type
+class TraceRetentionRuleMaxCount:
+ max_count: int
+
+
+@strawberry.type
+class TraceRetentionRuleMaxDaysOrCount(TraceRetentionRuleMaxDays, TraceRetentionRuleMaxCount): ...
+
+
+TraceRetentionRule: TypeAlias = Annotated[
+ Union[TraceRetentionRuleMaxDays, TraceRetentionRuleMaxCount, TraceRetentionRuleMaxDaysOrCount],
+ strawberry.union("TraceRetentionRule"),
+]
+
+
+@strawberry.type
+class ProjectTraceRetentionPolicy(Node):
+ id: NodeID[int]
+ db_policy: Private[Optional[models.ProjectTraceRetentionPolicy]] = None
+
+ @strawberry.field
+ async def name(
+ self,
+ info: Info[Context, None],
+ ) -> str:
+ if self.db_policy:
+ value = self.db_policy.name
+ else:
+ value = await info.context.data_loaders.project_trace_retention_policy_fields.load(
+ (self.id, models.ProjectTraceRetentionPolicy.name),
+ )
+ return value
+
+ @strawberry.field
+ async def cron_expression(
+ self,
+ info: Info[Context, None],
+ ) -> CronExpression:
+ if self.db_policy:
+ value = self.db_policy.cron_expression
+ else:
+ value = await info.context.data_loaders.project_trace_retention_policy_fields.load(
+ (self.id, models.ProjectTraceRetentionPolicy.cron_expression),
+ )
+ return CronExpression(value.root)
+
+ @strawberry.field
+ async def rule(
+ self,
+ info: Info[Context, None],
+ ) -> TraceRetentionRule:
+ if self.db_policy:
+ value = self.db_policy.rule
+ else:
+ value = await info.context.data_loaders.project_trace_retention_policy_fields.load(
+ (self.id, models.ProjectTraceRetentionPolicy.rule),
+ )
+ if isinstance(value.root, MaxDaysRule):
+ return TraceRetentionRuleMaxDays(max_days=value.root.max_days)
+ if isinstance(value.root, MaxCountRule):
+ return TraceRetentionRuleMaxCount(max_count=value.root.max_count)
+ if isinstance(value.root, MaxDaysOrCountRule):
+ return TraceRetentionRuleMaxDaysOrCount(
+ max_days=value.root.max_days, max_count=value.root.max_count
+ )
+ assert_never(value.root)
+
+ @strawberry.field
+ async def projects(
+ self,
+ info: Info[Context, None],
+ first: Optional[int] = 100,
+ last: Optional[int] = UNSET,
+ after: Optional[CursorString] = UNSET,
+ before: Optional[CursorString] = UNSET,
+ ) -> Connection[Project]:
+ args = ConnectionArgs(
+ first=first,
+ after=after if isinstance(after, CursorString) else None,
+ last=last,
+ before=before if isinstance(before, CursorString) else None,
+ )
+ project_rowids = await info.context.data_loaders.projects_by_trace_retention_policy_id.load(
+ self.id
+ )
+ data = [Project(project_rowid=project_rowid) for project_rowid in project_rowids]
+ return connection_from_list(data=data, args=args)
diff --git a/src/phoenix/server/api/types/Span.py b/src/phoenix/server/api/types/Span.py
index 1457fe35e6..ccab2d8840 100644
--- a/src/phoenix/server/api/types/Span.py
+++ b/src/phoenix/server/api/types/Span.py
@@ -1,11 +1,14 @@
import json
from asyncio import gather
+from collections import defaultdict
from collections.abc import Mapping
+from dataclasses import asdict, dataclass
from datetime import datetime
from enum import Enum
from typing import TYPE_CHECKING, Any, Iterable, Optional, cast
import numpy as np
+import pandas as pd
import strawberry
from openinference.semconv.trace import SpanAttributes
from strawberry import ID, UNSET
@@ -21,10 +24,15 @@
get_dataset_example_output,
)
from phoenix.server.api.input_types.InvocationParameters import InvocationParameter
+from phoenix.server.api.input_types.SpanAnnotationFilter import (
+ SpanAnnotationFilter,
+ satisfies_filter,
+)
from phoenix.server.api.input_types.SpanAnnotationSort import (
SpanAnnotationColumn,
SpanAnnotationSort,
)
+from phoenix.server.api.types.AnnotationSummary import AnnotationSummary
from phoenix.server.api.types.DocumentRetrievalMetrics import DocumentRetrievalMetrics
from phoenix.server.api.types.Evaluation import DocumentEvaluation
from phoenix.server.api.types.ExampleRevisionInterface import ExampleRevision
@@ -490,11 +498,16 @@ async def span_annotations(
self,
info: Info[Context, None],
sort: Optional[SpanAnnotationSort] = UNSET,
+ filter: Optional[SpanAnnotationFilter] = None,
) -> list[SpanAnnotation]:
span_id = self.span_rowid
annotations = await info.context.data_loaders.span_annotations.load(span_id)
sort_key = SpanAnnotationColumn.name.value
sort_descending = False
+ if filter:
+ annotations = [
+ annotation for annotation in annotations if satisfies_filter(annotation, filter)
+ ]
if sort:
sort_key = sort.col.value
sort_descending = sort.dir is SortDir.desc
@@ -503,6 +516,71 @@ async def span_annotations(
)
return [to_gql_span_annotation(annotation) for annotation in annotations]
+ @strawberry.field(description=("Notes associated with the span.")) # type: ignore
+ async def span_notes(
+ self,
+ info: Info[Context, None],
+ ) -> list[SpanAnnotation]:
+ span_id = self.span_rowid
+ annotations = await info.context.data_loaders.span_annotations.load(span_id)
+ annotations = [annotation for annotation in annotations if annotation.name == "note"]
+ annotations.sort(key=lambda annotation: getattr(annotation, "created_at"), reverse=False)
+ return [to_gql_span_annotation(annotation) for annotation in annotations]
+
+ @strawberry.field(description="Summarizes each annotation (by name) associated with the span") # type: ignore
+ async def span_annotation_summaries(
+ self,
+ info: Info[Context, None],
+ filter: Optional[SpanAnnotationFilter] = None,
+ ) -> list[AnnotationSummary]:
+ """
+ Retrieves and summarizes annotations associated with this span.
+
+ This method aggregates annotation data by name and label, calculating metrics
+ such as count of occurrences and sum of scores. The results are organized
+ into a structured format that can be easily converted to a DataFrame.
+
+ Args:
+ info: GraphQL context information
+ filter: Optional filter to apply to annotations before processing
+
+ Returns:
+ A list of AnnotationSummary objects, each containing:
+ - name: The name of the annotation
+ - data: A list of dictionaries with label statistics
+ """
+ # Load all annotations for this span from the data loader
+ annotations = await info.context.data_loaders.span_annotations.load(self.span_rowid)
+
+ # Apply filter if provided to narrow down the annotations
+ if filter:
+ annotations = [
+ annotation for annotation in annotations if satisfies_filter(annotation, filter)
+ ]
+
+ @dataclass
+ class Metrics:
+ record_count: int = 0
+ label_count: int = 0
+ score_sum: float = 0
+ score_count: int = 0
+
+ summaries: defaultdict[str, defaultdict[Optional[str], Metrics]] = defaultdict(
+ lambda: defaultdict(Metrics)
+ )
+ for annotation in annotations:
+ metrics = summaries[annotation.name][annotation.label]
+ metrics.record_count += 1
+ metrics.label_count += int(annotation.label is not None)
+ metrics.score_sum += annotation.score or 0
+ metrics.score_count += int(annotation.score is not None)
+
+ result: list[AnnotationSummary] = []
+ for name, label_metrics in summaries.items():
+ rows = [{"label": label, **asdict(metrics)} for label, metrics in label_metrics.items()]
+ result.append(AnnotationSummary(name=name, df=pd.DataFrame(rows), simple_avg=True))
+ return result
+
@strawberry.field(
description="Evaluations of the documents associated with the span, e.g. "
"if the span is a RETRIEVER with a list of documents in its RETRIEVAL_DOCUMENTS "
diff --git a/src/phoenix/server/api/types/SpanAnnotation.py b/src/phoenix/server/api/types/SpanAnnotation.py
index e8c1b5a0ea..da8824b76f 100644
--- a/src/phoenix/server/api/types/SpanAnnotation.py
+++ b/src/phoenix/server/api/types/SpanAnnotation.py
@@ -4,19 +4,26 @@
from strawberry import Private
from strawberry.relay import GlobalID, Node, NodeID
from strawberry.scalars import JSON
+from strawberry.types import Info
from phoenix.db import models
+from phoenix.server.api.context import Context
from .Annotation import Annotation
+from .AnnotationSource import AnnotationSource
from .AnnotatorKind import AnnotatorKind
+from .User import User, to_gql_user
@strawberry.type
class SpanAnnotation(Node, Annotation):
id_attr: NodeID[int]
+ user_id: Private[Optional[int]]
annotator_kind: AnnotatorKind
metadata: JSON
span_rowid: Private[Optional[int]]
+ source: AnnotationSource
+ identifier: Optional[str]
@strawberry.field
async def span_id(self) -> GlobalID:
@@ -24,6 +31,18 @@ async def span_id(self) -> GlobalID:
return GlobalID(type_name=Span.__name__, node_id=str(self.span_rowid))
+ @strawberry.field
+ async def user(
+ self,
+ info: Info[Context, None],
+ ) -> Optional[User]:
+ if self.user_id is None:
+ return None
+ user = await info.context.data_loaders.users.load(self.user_id)
+ if user is None:
+ return None
+ return to_gql_user(user)
+
def to_gql_span_annotation(
annotation: models.SpanAnnotation,
@@ -33,6 +52,7 @@ def to_gql_span_annotation(
"""
return SpanAnnotation(
id_attr=annotation.id,
+ user_id=annotation.user_id,
span_rowid=annotation.span_rowid,
name=annotation.name,
annotator_kind=AnnotatorKind(annotation.annotator_kind),
@@ -40,4 +60,8 @@ def to_gql_span_annotation(
score=annotation.score,
explanation=annotation.explanation,
metadata=annotation.metadata_,
+ source=AnnotationSource(annotation.source),
+ identifier=annotation.identifier or None,
+ created_at=annotation.created_at,
+ updated_at=annotation.updated_at,
)
diff --git a/src/phoenix/server/api/types/Trace.py b/src/phoenix/server/api/types/Trace.py
index bde94f8877..3f4244a616 100644
--- a/src/phoenix/server/api/types/Trace.py
+++ b/src/phoenix/server/api/types/Trace.py
@@ -208,13 +208,13 @@ async def spans(
return connection_from_list(data=data, args=args)
@strawberry.field(description="Annotations associated with the trace.") # type: ignore
- async def span_annotations(
+ async def trace_annotations(
self,
info: Info[Context, None],
sort: Optional[TraceAnnotationSort] = None,
) -> list[TraceAnnotation]:
async with info.context.db() as session:
- stmt = select(models.TraceAnnotation).filter_by(span_rowid=self.trace_rowid)
+ stmt = select(models.TraceAnnotation).filter_by(trace_rowid=self.trace_rowid)
if sort:
sort_col = getattr(models.TraceAnnotation, sort.col.value)
if sort.dir is SortDir.desc:
diff --git a/src/phoenix/server/api/types/TraceAnnotation.py b/src/phoenix/server/api/types/TraceAnnotation.py
index 937d6f0950..2829746cc0 100644
--- a/src/phoenix/server/api/types/TraceAnnotation.py
+++ b/src/phoenix/server/api/types/TraceAnnotation.py
@@ -4,14 +4,20 @@
from strawberry import Private
from strawberry.relay import GlobalID, Node, NodeID
from strawberry.scalars import JSON
+from strawberry.types import Info
from phoenix.db import models
+from phoenix.server.api.context import Context
from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
+from .AnnotationSource import AnnotationSource
+from .User import User, to_gql_user
+
@strawberry.type
class TraceAnnotation(Node):
id_attr: NodeID[int]
+ user_id: Private[Optional[int]]
name: str
annotator_kind: AnnotatorKind
label: Optional[str]
@@ -19,6 +25,8 @@ class TraceAnnotation(Node):
explanation: Optional[str]
metadata: JSON
trace_rowid: Private[Optional[int]]
+ identifier: Optional[str]
+ source: AnnotationSource
@strawberry.field
async def trace_id(self) -> GlobalID:
@@ -26,6 +34,18 @@ async def trace_id(self) -> GlobalID:
return GlobalID(type_name=Trace.__name__, node_id=str(self.trace_rowid))
+ @strawberry.field
+ async def user(
+ self,
+ info: Info[Context, None],
+ ) -> Optional[User]:
+ if self.user_id is None:
+ return None
+ user = await info.context.data_loaders.users.load(self.user_id)
+ if user is None:
+ return None
+ return to_gql_user(user)
+
def to_gql_trace_annotation(
annotation: models.TraceAnnotation,
@@ -35,6 +55,7 @@ def to_gql_trace_annotation(
"""
return TraceAnnotation(
id_attr=annotation.id,
+ user_id=annotation.user_id,
trace_rowid=annotation.trace_rowid,
name=annotation.name,
annotator_kind=AnnotatorKind(annotation.annotator_kind),
@@ -42,4 +63,6 @@ def to_gql_trace_annotation(
score=annotation.score,
explanation=annotation.explanation,
metadata=annotation.metadata_,
+ identifier=annotation.identifier or None,
+ source=AnnotationSource(annotation.source),
)
diff --git a/src/phoenix/server/app.py b/src/phoenix/server/app.py
index 68c686c18c..bcf1d580de 100644
--- a/src/phoenix/server/app.py
+++ b/src/phoenix/server/app.py
@@ -88,6 +88,7 @@
NumChildSpansDataLoader,
NumSpansPerTraceDataLoader,
ProjectByNameDataLoader,
+ ProjectIdsByTraceRetentionPolicyIdDataLoader,
PromptVersionSequenceNumberDataLoader,
RecordCountDataLoader,
SessionIODataLoader,
@@ -103,6 +104,7 @@
TableFieldsDataLoader,
TokenCountDataLoader,
TraceByTraceIdsDataLoader,
+ TraceRetentionPolicyIdByProjectIdDataLoader,
TraceRootSpansDataLoader,
UserRolesDataLoader,
UsersDataLoader,
@@ -123,6 +125,7 @@
from phoenix.server.jwt_store import JwtStore
from phoenix.server.middleware.gzip import GZipMiddleware
from phoenix.server.oauth2 import OAuth2Clients
+from phoenix.server.retention import TraceDataSweeper
from phoenix.server.telemetry import initialize_opentelemetry_tracer_provider
from phoenix.server.types import (
CanGetLastUpdatedAt,
@@ -475,6 +478,7 @@ def _lifespan(
db: DbSessionFactory,
bulk_inserter: BulkInserter,
dml_event_handler: DmlEventHandler,
+ trace_data_sweeper: Optional[TraceDataSweeper],
token_store: Optional[TokenStore] = None,
tracer_provider: Optional["TracerProvider"] = None,
enable_prometheus: bool = False,
@@ -507,6 +511,8 @@ async def lifespan(_: FastAPI) -> AsyncIterator[dict[str, Any]]:
)
await stack.enter_async_context(grpc_server)
await stack.enter_async_context(dml_event_handler)
+ if trace_data_sweeper:
+ await stack.enter_async_context(trace_data_sweeper)
if scaffolder_config:
scaffolder = Scaffolder(
config=scaffolder_config,
@@ -633,6 +639,9 @@ def get_context() -> Context:
num_child_spans=NumChildSpansDataLoader(db),
num_spans_per_trace=NumSpansPerTraceDataLoader(db),
project_fields=TableFieldsDataLoader(db, models.Project),
+ projects_by_trace_retention_policy_id=ProjectIdsByTraceRetentionPolicyIdDataLoader(
+ db
+ ),
prompt_version_sequence_number=PromptVersionSequenceNumberDataLoader(db),
record_counts=RecordCountDataLoader(
db,
@@ -656,6 +665,12 @@ def get_context() -> Context:
),
trace_by_trace_ids=TraceByTraceIdsDataLoader(db),
trace_fields=TableFieldsDataLoader(db, models.Trace),
+ trace_retention_policy_id_by_project_id=TraceRetentionPolicyIdByProjectIdDataLoader(
+ db
+ ),
+ project_trace_retention_policy_fields=TableFieldsDataLoader(
+ db, models.ProjectTraceRetentionPolicy
+ ),
trace_root_spans=TraceRootSpansDataLoader(db),
project_by_name=ProjectByNameDataLoader(db),
users=UsersDataLoader(db),
@@ -817,6 +832,10 @@ def create_app(
cache_for_dataloaders=cache_for_dataloaders,
last_updated_at=last_updated_at,
)
+ trace_data_sweeper = TraceDataSweeper(
+ db=db,
+ dml_event_handler=dml_event_handler,
+ )
bulk_inserter = bulk_inserter_factory(
db,
enable_prometheus=enable_prometheus,
@@ -874,6 +893,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
read_only=read_only,
bulk_inserter=bulk_inserter,
dml_event_handler=dml_event_handler,
+ trace_data_sweeper=trace_data_sweeper,
token_store=token_store,
tracer_provider=tracer_provider,
enable_prometheus=enable_prometheus,
diff --git a/src/phoenix/server/retention.py b/src/phoenix/server/retention.py
new file mode 100644
index 0000000000..e342bff3b2
--- /dev/null
+++ b/src/phoenix/server/retention.py
@@ -0,0 +1,76 @@
+from __future__ import annotations
+
+from asyncio import create_task, gather, sleep
+from datetime import datetime, timedelta, timezone
+
+import sqlalchemy as sa
+from sqlalchemy.orm import joinedload
+
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+from phoenix.db.models import Project, ProjectTraceRetentionPolicy
+from phoenix.server.dml_event import SpanDeleteEvent
+from phoenix.server.dml_event_handler import DmlEventHandler
+from phoenix.server.types import DaemonTask, DbSessionFactory
+from phoenix.utilities import hour_of_week
+
+
+class TraceDataSweeper(DaemonTask):
+ def __init__(self, db: DbSessionFactory, dml_event_handler: DmlEventHandler):
+ super().__init__()
+ self._db = db
+ self._dml_event_handler = dml_event_handler
+
+ async def _run(self) -> None:
+ """Check hourly and apply policies."""
+ while self._running:
+ await self._sleep_until_next_hour()
+ if not (policies := await self._get_policies()):
+ continue
+ current_hour = self._current_hour()
+ if tasks := [
+ create_task(self._apply(policy))
+ for policy in policies
+ if self._should_apply(policy, current_hour)
+ ]:
+ await gather(*tasks, return_exceptions=True)
+
+ async def _get_policies(self) -> list[ProjectTraceRetentionPolicy]:
+ stmt = sa.select(ProjectTraceRetentionPolicy).options(
+ joinedload(ProjectTraceRetentionPolicy.projects).load_only(Project.id)
+ )
+ async with self._db() as session:
+ result = await session.scalars(stmt)
+ # filter out no-op policies, e.g. max_days == 0
+ return [policy for policy in result if bool(policy.rule)]
+
+ @staticmethod
+ def _now() -> datetime:
+ return datetime.now(timezone.utc)
+
+ def _current_hour(self) -> int:
+ return hour_of_week(self._now())
+
+ def _should_apply(self, policy: ProjectTraceRetentionPolicy, current_hour: int) -> bool:
+ if current_hour != policy.cron_expression.get_hour_of_prev_run():
+ return False
+ if policy.id != DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID and not policy.projects:
+ return False
+ return True
+
+ async def _apply(self, policy: ProjectTraceRetentionPolicy) -> None:
+ project_rowids = (
+ (
+ sa.select(Project.id)
+ .where(Project.trace_retention_policy_id.is_(None))
+ .scalar_subquery()
+ )
+ if policy.id == DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+ else [p.id for p in policy.projects]
+ )
+ async with self._db() as session:
+ result = await policy.rule.delete_traces(session, project_rowids)
+ self._dml_event_handler.put(SpanDeleteEvent(tuple(result)))
+
+ async def _sleep_until_next_hour(self) -> None:
+ next_hour = self._now().replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
+ await sleep((next_hour - self._now()).total_seconds())
diff --git a/src/phoenix/trace/trace_dataset.py b/src/phoenix/trace/trace_dataset.py
index 83509b7c2f..573b53f67e 100644
--- a/src/phoenix/trace/trace_dataset.py
+++ b/src/phoenix/trace/trace_dataset.py
@@ -1,4 +1,5 @@
import json
+import warnings
from collections.abc import Iterable, Iterator
from datetime import datetime
from pathlib import Path
@@ -141,6 +142,13 @@ def __init__(
evaluations for the spans in the dataset. If provided, the evaluations
can be materialized into a unified dataframe as annotations.
"""
+
+ warnings.warn(
+ "phoenix.TraceDataset is deprecated and will be removed in a future version.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
# Validate the the dataframe has required fields
if missing_columns := set(REQUIRED_COLUMNS) - set(dataframe.columns):
raise ValueError(
diff --git a/src/phoenix/utilities/__init__.py b/src/phoenix/utilities/__init__.py
index e69de29bb2..e001b8e18a 100644
--- a/src/phoenix/utilities/__init__.py
+++ b/src/phoenix/utilities/__init__.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from datetime import datetime
+
+
+def hour_of_week(dt: datetime) -> int:
+ """
+ Convert a datetime object to hour of week (0-167) where 0 is midnight Sunday UTC.
+
+ Args:
+ dt (datetime): The datetime to convert (assumed to be in UTC)
+
+ Returns:
+ int: Hour of week (0-167)
+ """
+ # 0 is Monday in Python, so we need to adjust
+ weekday = (dt.weekday() + 1) % 7
+ return (weekday * 24) + dt.hour
diff --git a/tests/__generated__/graphql/__init__.py b/tests/__generated__/graphql/__init__.py
index 3efadde4f2..f7e7d86736 100644
--- a/tests/__generated__/graphql/__init__.py
+++ b/tests/__generated__/graphql/__init__.py
@@ -65,6 +65,25 @@ class TimeSeries(BaseModel):
data: list[TimeSeriesDataPoint]
+class AddAnnotationConfigToProjectPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ project: Project
+
+
+class AnnotationConfigConnection(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ edges: list[AnnotationConfigEdge] = Field(...)
+ pageInfo: PageInfo = Field(...)
+
+
+class AnnotationConfigEdge(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ cursor: str = Field(...)
+ node: Union[
+ "CategoricalAnnotationConfig", "ContinuousAnnotationConfig", "FreeformAnnotationConfig"
+ ] = Field(...)
+
+
class AnnotationSummary(BaseModel):
model_config = ConfigDict(frozen=True)
count: int
@@ -137,6 +156,22 @@ class BoundedFloatInvocationParameter(InvocationParameterBase):
required: bool
+class CategoricalAnnotationConfig(Node):
+ model_config = ConfigDict(frozen=True)
+ annotationType: Literal["CATEGORICAL", "CONTINUOUS", "FREEFORM"]
+ description: Optional[str] = None
+ id: str = Field(...)
+ name: str
+ optimizationDirection: Literal["MAXIMIZE", "MINIMIZE"]
+ values: list[CategoricalAnnotationValue]
+
+
+class CategoricalAnnotationValue(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ label: str
+ score: Optional[float] = None
+
+
class ChatCompletionFunctionCall(BaseModel):
model_config = ConfigDict(frozen=True)
arguments: str
@@ -206,6 +241,32 @@ class Cluster(BaseModel):
primaryToCorpusRatio: Optional[float] = Field(default=None)
+class ContinuousAnnotationConfig(Node):
+ model_config = ConfigDict(frozen=True)
+ annotationType: Literal["CATEGORICAL", "CONTINUOUS", "FREEFORM"]
+ description: Optional[str] = None
+ id: str = Field(...)
+ lowerBound: Optional[float] = None
+ name: str
+ optimizationDirection: Literal["MAXIMIZE", "MINIMIZE"]
+ upperBound: Optional[float] = None
+
+
+class CreateCategoricalAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: CategoricalAnnotationConfig
+
+
+class CreateContinuousAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: ContinuousAnnotationConfig
+
+
+class CreateFreeformAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: FreeformAnnotationConfig
+
+
class CreateSystemApiKeyMutationPayload(BaseModel):
model_config = ConfigDict(frozen=True)
apiKey: SystemApiKey
@@ -319,6 +380,13 @@ class DbTableStats(BaseModel):
tableName: str
+class DeleteAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: Union[
+ "CategoricalAnnotationConfig", "ContinuousAnnotationConfig", "FreeformAnnotationConfig"
+ ]
+
+
class DeleteApiKeyMutationPayload(BaseModel):
model_config = ConfigDict(frozen=True)
apiKeyId: str
@@ -587,6 +655,14 @@ class FloatInvocationParameter(InvocationParameterBase):
required: bool
+class FreeformAnnotationConfig(Node):
+ model_config = ConfigDict(frozen=True)
+ annotationType: Literal["CATEGORICAL", "CONTINUOUS", "FREEFORM"]
+ description: Optional[str] = None
+ id: str = Field(...)
+ name: str
+
+
class FunctionCallChunk(ChatCompletionSubscriptionPayload):
model_config = ConfigDict(frozen=True)
arguments: str
@@ -752,6 +828,7 @@ class Point3D(BaseModel):
class Project(Node):
model_config = ConfigDict(frozen=True)
+ annotationConfigs: AnnotationConfigConnection
documentEvaluationNames: list[str] = Field(...)
documentEvaluationSummary: Optional[DocumentEvaluationSummary] = None
endTime: Optional[str] = None
@@ -775,6 +852,7 @@ class Project(Node):
traceAnnotationSummary: Optional[AnnotationSummary] = None
traceAnnotationsNames: list[str] = Field(...)
traceCount: int
+ traceRetentionPolicy: ProjectTraceRetentionPolicy
validateSpanFilterCondition: ValidationResult
@@ -818,6 +896,36 @@ class ProjectSessionEdge(BaseModel):
node: ProjectSession = Field(...)
+class ProjectTraceRetentionPolicy(Node):
+ model_config = ConfigDict(frozen=True)
+ cronExpression: str
+ id: str = Field(...)
+ name: str
+ projects: ProjectConnection
+ rule: Union[
+ "TraceRetentionRuleMaxCount",
+ "TraceRetentionRuleMaxDays",
+ "TraceRetentionRuleMaxDaysOrCount",
+ ]
+
+
+class ProjectTraceRetentionPolicyConnection(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ edges: list[ProjectTraceRetentionPolicyEdge] = Field(...)
+ pageInfo: PageInfo = Field(...)
+
+
+class ProjectTraceRetentionPolicyEdge(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ cursor: str = Field(...)
+ node: ProjectTraceRetentionPolicy = Field(...)
+
+
+class ProjectTraceRetentionPolicyMutationPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ node: ProjectTraceRetentionPolicy
+
+
class Prompt(Node):
model_config = ConfigDict(frozen=True)
createdAt: str
@@ -1272,6 +1380,22 @@ class TraceEdge(BaseModel):
node: Trace = Field(...)
+class TraceRetentionRuleMaxCount(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxCount: int
+
+
+class TraceRetentionRuleMaxDays(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxDays: float
+
+
+class TraceRetentionRuleMaxDaysOrCount(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxCount: int
+ maxDays: float
+
+
class UMAPPoint(BaseModel):
model_config = ConfigDict(frozen=True)
coordinates: Union["Point2D", "Point3D"]
@@ -1290,6 +1414,21 @@ class UMAPPoints(BaseModel):
referenceData: list[UMAPPoint]
+class UpdateCategoricalAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: CategoricalAnnotationConfig
+
+
+class UpdateContinuousAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: ContinuousAnnotationConfig
+
+
+class UpdateFreeformAnnotationConfigPayload(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfig: FreeformAnnotationConfig
+
+
class User(Node):
model_config = ConfigDict(frozen=True)
apiKeys: list[UserApiKey]
@@ -1342,6 +1481,12 @@ class ValidationResult(BaseModel):
isValid: bool
+class AddAnnotationConfigToProjectInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ annotationConfigId: str
+ projectId: str
+
+
class AddExamplesToDatasetInput(BaseModel):
model_config = ConfigDict(frozen=True)
datasetId: str
@@ -1358,6 +1503,12 @@ class AddSpansToDatasetInput(BaseModel):
spanIds: list[str]
+class CategoricalAnnotationValueInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ label: str
+ score: Optional[float] = None
+
+
class ChatCompletionInput(BaseModel):
model_config = ConfigDict(frozen=True)
apiKey: Optional[str] = None
@@ -1438,6 +1589,14 @@ class CreateApiKeyInput(BaseModel):
name: str
+class CreateCategoricalAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ description: Optional[str] = None
+ name: str
+ optimizationDirection: Literal["MAXIMIZE", "MINIMIZE"]
+ values: list[CategoricalAnnotationValueInput]
+
+
class CreateChatPromptInput(BaseModel):
model_config = ConfigDict(frozen=True)
description: Optional[str] = None
@@ -1452,6 +1611,15 @@ class CreateChatPromptVersionInput(BaseModel):
tags: Optional[list[SetPromptVersionTagInput]] = None
+class CreateContinuousAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ description: Optional[str] = None
+ lowerBound: Optional[float] = None
+ name: str
+ optimizationDirection: Literal["MAXIMIZE", "MINIMIZE"]
+ upperBound: Optional[float] = None
+
+
class CreateDatasetInput(BaseModel):
model_config = ConfigDict(frozen=True)
description: Optional[str] = None
@@ -1459,6 +1627,20 @@ class CreateDatasetInput(BaseModel):
name: str
+class CreateFreeformAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ description: Optional[str] = None
+ name: str
+
+
+class CreateProjectTraceRetentionPolicyInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ addProjects: Optional[list[str]] = None
+ cronExpression: str
+ name: str
+ rule: ProjectTraceRetentionRuleInput
+
+
class CreatePromptLabelInput(BaseModel):
model_config = ConfigDict(frozen=True)
description: Optional[str] = None
@@ -1550,6 +1732,11 @@ class DatasetVersionSort(BaseModel):
dir: Literal["asc", "desc"]
+class DeleteAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ configId: str
+
+
class DeleteAnnotationsInput(BaseModel):
model_config = ConfigDict(frozen=True)
annotationIds: list[str]
@@ -1577,6 +1764,11 @@ class DeleteExperimentsInput(BaseModel):
experimentIds: list[str]
+class DeleteProjectTraceRetentionPolicyInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ id: str
+
+
class DeletePromptInput(BaseModel):
model_config = ConfigDict(frozen=True)
promptId: str
@@ -1701,6 +1893,16 @@ class PatchDatasetInput(BaseModel):
name: Optional[str] = None
+class PatchProjectTraceRetentionPolicyInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ addProjects: Optional[list[str]] = None
+ cronExpression: Optional[str] = None
+ id: str
+ name: Optional[str] = None
+ removeProjects: Optional[list[str]] = None
+ rule: Optional[ProjectTraceRetentionRuleInput] = None
+
+
class PatchPromptInput(BaseModel):
model_config = ConfigDict(frozen=True)
description: str
@@ -1740,6 +1942,29 @@ class ProjectSessionSort(BaseModel):
dir: Literal["asc", "desc"]
+class ProjectTraceRetentionRuleInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxCount: Optional[ProjectTraceRetentionRuleMaxCountInput] = None
+ maxDays: Optional[ProjectTraceRetentionRuleMaxDaysInput] = None
+ maxDaysOrCount: Optional[ProjectTraceRetentionRuleMaxDaysOrCountInput] = None
+
+
+class ProjectTraceRetentionRuleMaxCountInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxCount: int
+
+
+class ProjectTraceRetentionRuleMaxDaysInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxDays: float
+
+
+class ProjectTraceRetentionRuleMaxDaysOrCountInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ maxCount: int
+ maxDays: float
+
+
class PromptChatTemplateInput(BaseModel):
model_config = ConfigDict(frozen=True)
messages: list[PromptMessageInput]
@@ -1845,3 +2070,29 @@ class UnsetPromptLabelInput(BaseModel):
model_config = ConfigDict(frozen=True)
promptId: str
promptLabelId: str
+
+
+class UpdateCategoricalAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ configId: str
+ description: Optional[str] = None
+ name: str
+ optimizationDirection: Literal["MAXIMIZE", "MINIMIZE"]
+ values: list[CategoricalAnnotationValueInput]
+
+
+class UpdateContinuousAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ configId: str
+ description: Optional[str] = None
+ lowerBound: Optional[float] = None
+ name: str
+ optimizationDirection: Literal["MAXIMIZE", "MINIMIZE"]
+ upperBound: Optional[float] = None
+
+
+class UpdateFreeformAnnotationConfigInput(BaseModel):
+ model_config = ConfigDict(frozen=True)
+ configId: str
+ description: Optional[str] = None
+ name: str
diff --git a/tests/integration/_helpers.py b/tests/integration/_helpers.py
index 86e9456112..ee5dc16f42 100644
--- a/tests/integration/_helpers.py
+++ b/tests/integration/_helpers.py
@@ -154,8 +154,9 @@ def gql(
self,
query: str,
variables: Optional[Mapping[str, Any]] = None,
+ operation_name: Optional[str] = None,
) -> tuple[dict[str, Any], Headers]:
- return _gql(self, query=query, variables=variables)
+ return _gql(self, query=query, variables=variables, operation_name=operation_name)
def create_user(
self,
@@ -287,6 +288,9 @@ def kind(self) -> _ApiKeyKind:
return self._kind
+class _AdminSecret(str): ...
+
+
class _Token(_String, ABC): ...
@@ -335,6 +339,7 @@ def refresh(self) -> _LoggedInUser:
_RoleOrUser = Union[UserRoleInput, _User]
_SecurityArtifact: TypeAlias = Union[
+ _AdminSecret,
_AccessToken,
_RefreshToken,
_LoggedInTokens,
@@ -624,6 +629,8 @@ def _httpx_client(
return _httpx_client(logged_in_user.tokens, headers, cookies, transport)
elif isinstance(auth, _ApiKey):
headers = {**(headers or {}), "authorization": f"Bearer {auth}"}
+ elif isinstance(auth, _AdminSecret):
+ headers = {**(headers or {}), "authorization": f"Bearer {auth}"}
elif auth is None:
pass
else:
@@ -742,8 +749,9 @@ def _gql(
*,
query: str,
variables: Optional[Mapping[str, Any]] = None,
+ operation_name: Optional[str] = None,
) -> tuple[dict[str, Any], Headers]:
- json_ = dict(query=query, variables=dict(variables or {}))
+ json_ = dict(query=query, variables=dict(variables or {}), operationName=operation_name)
resp = _httpx_client(auth).post("graphql", json=json_)
return _json(resp), resp.headers
diff --git a/tests/integration/auth/test_auth.py b/tests/integration/auth/test_auth.py
index 0b9f2508a3..1174b4a64a 100644
--- a/tests/integration/auth/test_auth.py
+++ b/tests/integration/auth/test_auth.py
@@ -1,3 +1,4 @@
+from asyncio import sleep
from collections import defaultdict
from collections.abc import Iterator, Sequence
from contextlib import AbstractContextManager
@@ -1272,3 +1273,336 @@ def test_authenticated_users_are_recorded_in_prompts(
user = version["promptVersion"]["user"]
assert user is not None
assert user["id"] == logged_in_user.gid
+
+
+class TestSpanAnnotations:
+ QUERY = """
+ mutation CreateSpanAnnotations($input: [CreateSpanAnnotationInput!]!) {
+ createSpanAnnotations(input: $input) {
+ spanAnnotations {
+ ...SpanAnnotationFields
+ }
+ }
+ }
+
+ mutation PatchSpanAnnotations($input: [PatchAnnotationInput!]!) {
+ patchSpanAnnotations(input: $input) {
+ spanAnnotations {
+ ...SpanAnnotationFields
+ }
+ }
+ }
+
+ mutation DeleteSpanAnnotations($input: DeleteAnnotationsInput!) {
+ deleteSpanAnnotations(input: $input) {
+ spanAnnotations {
+ ...SpanAnnotationFields
+ }
+ }
+ }
+
+ query GetSpanAnnotation($annotationId: GlobalID!) {
+ spanAnnotation: node(id: $annotationId) {
+ ... on SpanAnnotation {
+ ...SpanAnnotationFields
+ }
+ }
+ }
+
+ fragment SpanAnnotationFields on SpanAnnotation {
+ id
+ name
+ score
+ label
+ explanation
+ annotatorKind
+ metadata
+ source
+ identifier
+ spanId
+ user {
+ id
+ email
+ username
+ }
+ }
+ """
+
+ async def test_other_users_cannot_patch_and_only_creator_or_admin_can_delete(
+ self,
+ _spans: Sequence[ReadableSpan],
+ _get_user: _GetUser,
+ ) -> None:
+ annotation_creator = _get_user(_MEMBER)
+ logged_in_annotation_creator = annotation_creator.log_in()
+ member = _get_user(_MEMBER)
+ logged_in_member = member.log_in()
+ admin = _get_user(_ADMIN)
+ logged_in_admin = admin.log_in()
+
+ # Add spans
+ user_api_key = logged_in_annotation_creator.create_api_key()
+ headers = dict(authorization=f"Bearer {user_api_key}")
+ exporter = _http_span_exporter(headers=headers)
+ assert exporter.export(_spans) is SpanExportResult.SUCCESS
+ await sleep(0.1) # wait for spans to be exported and written to disk
+
+ # Create span annotation
+ span_gid = str(GlobalID("Span", "1"))
+ response, _ = logged_in_annotation_creator.gql(
+ query=self.QUERY,
+ operation_name="CreateSpanAnnotations",
+ variables={
+ "input": {
+ "spanId": span_gid,
+ "name": "span-annotation-name",
+ "annotatorKind": "HUMAN",
+ "label": "correct",
+ "score": 1,
+ "explanation": "explanation",
+ "metadata": {},
+ "identifier": "identifier",
+ "source": "APP",
+ }
+ },
+ )
+
+ span_annotations = response["data"]["createSpanAnnotations"]["spanAnnotations"]
+ assert len(span_annotations) == 1
+ original_span_annotation = span_annotations[0]
+ annotation_id = original_span_annotation["id"]
+
+ # Only the user who created the annotation can patch
+ span_gid = str(GlobalID("Span", "1"))
+ for user in [logged_in_member, logged_in_admin]:
+ with pytest.raises(RuntimeError) as exc_info:
+ response, _ = user.gql(
+ query=self.QUERY,
+ operation_name="PatchSpanAnnotations",
+ variables={
+ "input": {
+ "annotationId": annotation_id,
+ "name": "patched-span-annotation-name",
+ "annotatorKind": "LLM",
+ "label": "incorrect",
+ "score": 0,
+ "explanation": "patched-explanation",
+ "metadata": {"patched": "key"},
+ "identifier": "patched-identifier",
+ }
+ },
+ )
+ assert "At least one span annotation is not associated with the current user." in str(
+ exc_info.value
+ )
+
+ # Check that the annotation remains unchanged
+ response, _ = user.gql(
+ query=self.QUERY,
+ operation_name="GetSpanAnnotation",
+ variables={"annotationId": annotation_id},
+ )
+ span_annotation = response["data"]["spanAnnotation"]
+ assert span_annotation == original_span_annotation
+
+ # Member who did not create the annotation cannot delete
+ with pytest.raises(RuntimeError) as exc_info:
+ logged_in_member.gql(
+ query=self.QUERY,
+ operation_name="DeleteSpanAnnotations",
+ variables={
+ "input": {
+ "annotationIds": [annotation_id],
+ }
+ },
+ )
+ assert "At least one span annotation is not associated with the current user." in str(
+ exc_info.value
+ )
+
+ # Check that the annotation remains unchanged
+ response, _ = user.gql(
+ query=self.QUERY,
+ operation_name="GetSpanAnnotation",
+ variables={"annotationId": annotation_id},
+ )
+ span_annotation = response["data"]["spanAnnotation"]
+ assert span_annotation == original_span_annotation
+
+ # Admin can delete
+ response, _ = logged_in_admin.gql(
+ query=self.QUERY,
+ operation_name="DeleteSpanAnnotations",
+ variables={
+ "input": {
+ "annotationIds": [annotation_id],
+ }
+ },
+ )
+
+
+class TestTraceAnnotations:
+ QUERY = """
+ mutation CreateTraceAnnotations($input: [CreateTraceAnnotationInput!]!) {
+ createTraceAnnotations(input: $input) {
+ traceAnnotations {
+ ...TraceAnnotationFields
+ }
+ }
+ }
+
+ mutation PatchTraceAnnotations($input: [PatchAnnotationInput!]!) {
+ patchTraceAnnotations(input: $input) {
+ traceAnnotations {
+ ...TraceAnnotationFields
+ }
+ }
+ }
+
+ mutation DeleteTraceAnnotations($input: DeleteAnnotationsInput!) {
+ deleteTraceAnnotations(input: $input) {
+ traceAnnotations {
+ ...TraceAnnotationFields
+ }
+ }
+ }
+
+ query GetTraceAnnotation($annotationId: GlobalID!) {
+ traceAnnotation: node(id: $annotationId) {
+ ... on TraceAnnotation {
+ ...TraceAnnotationFields
+ }
+ }
+ }
+
+ fragment TraceAnnotationFields on TraceAnnotation {
+ id
+ name
+ score
+ label
+ explanation
+ annotatorKind
+ metadata
+ source
+ identifier
+ traceId
+ user {
+ id
+ email
+ username
+ }
+ }
+ """
+
+ async def test_other_users_cannot_patch_and_only_creator_or_admin_can_delete(
+ self,
+ _spans: Sequence[ReadableSpan],
+ _get_user: _GetUser,
+ ) -> None:
+ annotation_creator = _get_user(_MEMBER)
+ logged_in_annotation_creator = annotation_creator.log_in()
+ member = _get_user(_MEMBER)
+ logged_in_member = member.log_in()
+ admin = _get_user(_ADMIN)
+ logged_in_admin = admin.log_in()
+
+ # Add spans
+ user_api_key = logged_in_annotation_creator.create_api_key()
+ headers = dict(authorization=f"Bearer {user_api_key}")
+ exporter = _http_span_exporter(headers=headers)
+ assert exporter.export(_spans) is SpanExportResult.SUCCESS
+ await sleep(0.1) # wait for spans to be exported and written to disk
+
+ # Create trace annotation
+ trace_gid = str(GlobalID("Trace", "1"))
+ response, _ = logged_in_annotation_creator.gql(
+ query=self.QUERY,
+ operation_name="CreateTraceAnnotations",
+ variables={
+ "input": {
+ "traceId": trace_gid,
+ "name": "trace-annotation-name",
+ "annotatorKind": "HUMAN",
+ "label": "correct",
+ "score": 1,
+ "explanation": "explanation",
+ "metadata": {},
+ "identifier": "identifier",
+ "source": "APP",
+ }
+ },
+ )
+
+ trace_annotations = response["data"]["createTraceAnnotations"]["traceAnnotations"]
+ assert len(trace_annotations) == 1
+ original_trace_annotation = trace_annotations[0]
+ annotation_id = original_trace_annotation["id"]
+
+ # Only the user who created the annotation can patch
+ trace_gid = str(GlobalID("Trace", "1"))
+ for user in [logged_in_member, logged_in_admin]:
+ with pytest.raises(RuntimeError) as exc_info:
+ response, _ = user.gql(
+ query=self.QUERY,
+ operation_name="PatchTraceAnnotations",
+ variables={
+ "input": {
+ "annotationId": annotation_id,
+ "name": "patched-trace-annotation-name",
+ "annotatorKind": "LLM",
+ "label": "incorrect",
+ "score": 0,
+ "explanation": "patched-explanation",
+ "metadata": {"patched": "key"},
+ "identifier": "patched-identifier",
+ }
+ },
+ )
+ assert "At least one trace annotation is not associated with the current user." in str(
+ exc_info.value
+ )
+
+ # Check that the annotation remains unchanged
+ response, _ = user.gql(
+ query=self.QUERY,
+ operation_name="GetTraceAnnotation",
+ variables={"annotationId": annotation_id},
+ )
+ trace_annotation = response["data"]["traceAnnotation"]
+ assert trace_annotation == original_trace_annotation
+
+ # Member who did not create the annotation cannot delete
+ with pytest.raises(RuntimeError) as exc_info:
+ logged_in_member.gql(
+ query=self.QUERY,
+ operation_name="DeleteTraceAnnotations",
+ variables={
+ "input": {
+ "annotationIds": [annotation_id],
+ }
+ },
+ )
+ assert (
+ "At least one trace annotation is not associated with the current user "
+ "and the current user is not an admin." in str(exc_info.value)
+ )
+
+ # Check that the annotation remains unchanged
+ response, _ = user.gql(
+ query=self.QUERY,
+ operation_name="GetTraceAnnotation",
+ variables={"annotationId": annotation_id},
+ )
+ trace_annotation = response["data"]["traceAnnotation"]
+ assert trace_annotation == original_trace_annotation
+
+ # Admin can delete
+ response, _ = logged_in_admin.gql(
+ query=self.QUERY,
+ operation_name="DeleteTraceAnnotations",
+ variables={
+ "input": {
+ "annotationIds": [annotation_id],
+ }
+ },
+ )
diff --git a/tests/integration/client/__init__.py b/tests/integration/client/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integration/client/conftest.py b/tests/integration/client/conftest.py
new file mode 100644
index 0000000000..2e8d590d56
--- /dev/null
+++ b/tests/integration/client/conftest.py
@@ -0,0 +1,87 @@
+import os
+from collections.abc import Iterator
+from contextlib import ExitStack
+from secrets import token_hex
+from time import sleep
+from typing import Any
+from unittest import mock
+
+import pytest
+from opentelemetry.sdk.environment_variables import (
+ OTEL_EXPORTER_OTLP_TRACES_HEADERS,
+)
+from opentelemetry.sdk.trace.export import SpanExportResult
+from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
+from opentelemetry.trace import format_span_id
+from typing_extensions import TypeAlias
+
+from .._helpers import _AdminSecret, _gql, _grpc_span_exporter, _server, _start_span
+
+
+@pytest.fixture(scope="package")
+def _admin_secret() -> _AdminSecret:
+ return _AdminSecret(token_hex(16))
+
+
+@pytest.fixture(scope="package")
+def _app(
+ _ports: Iterator[int],
+ _env_phoenix_sql_database_url: Any,
+ _admin_secret: _AdminSecret,
+) -> Iterator[None]:
+ values = (
+ ("PHOENIX_ENABLE_AUTH", "true"),
+ ("PHOENIX_DISABLE_RATE_LIMIT", "true"),
+ ("PHOENIX_SECRET", token_hex(16)),
+ ("PHOENIX_ADMIN_SECRET", str(_admin_secret)),
+ (OTEL_EXPORTER_OTLP_TRACES_HEADERS, f"Authorization=Bearer {_admin_secret}"),
+ )
+ with ExitStack() as stack:
+ stack.enter_context(mock.patch.dict(os.environ, values))
+ stack.enter_context(_server())
+ yield
+
+
+SpanId: TypeAlias = str
+SpanGlobalId: TypeAlias = str
+
+
+@pytest.fixture(autouse=True, scope="package")
+def _span_ids(
+ _app: Any,
+ _admin_secret: _AdminSecret,
+) -> tuple[tuple[SpanId, SpanGlobalId], tuple[SpanId, SpanGlobalId]]:
+ memory = InMemorySpanExporter()
+ for _ in range(2):
+ _start_span(project_name="default", exporter=memory).end()
+ assert (spans := memory.get_finished_spans())
+ assert _grpc_span_exporter().export(spans) is SpanExportResult.SUCCESS
+ sleep(0.1)
+ span1, span2 = spans
+ assert (sc1 := span1.get_span_context()) # type: ignore[no-untyped-call]
+ span_id1 = format_span_id(sc1.span_id)
+ assert (sc2 := span2.get_span_context()) # type: ignore[no-untyped-call]
+ span_id2 = format_span_id(sc2.span_id)
+ res, _ = _gql(_admin_secret, query=QUERY, operation_name="GetSpanIds")
+ gids = {e["node"]["spanId"]: e["node"]["id"] for e in res["data"]["node"]["spans"]["edges"]}
+ assert span_id1 in gids
+ assert span_id2 in gids
+ return (span_id1, gids[span_id1]), (span_id2, gids[span_id2])
+
+
+QUERY = """
+query GetSpanIds {
+ node(id: "UHJvamVjdDox") {
+ ... on Project {
+ spans {
+ edges {
+ node {
+ id
+ spanId
+ }
+ }
+ }
+ }
+ }
+}
+"""
diff --git a/tests/integration/client/test_annotations.py b/tests/integration/client/test_annotations.py
new file mode 100644
index 0000000000..e4bae3899d
--- /dev/null
+++ b/tests/integration/client/test_annotations.py
@@ -0,0 +1,721 @@
+# pyright: reportPrivateUsage=false
+from __future__ import annotations
+
+from secrets import token_bytes, token_hex
+from typing import Literal, Optional
+
+import pandas as pd
+import pytest
+from phoenix.client.__generated__ import v1
+from typing_extensions import TypeAlias
+
+from .._helpers import _ADMIN, _MEMBER, _await_or_return, _GetUser, _gql, _RoleOrUser
+
+# Type aliases for better readability
+SpanId: TypeAlias = str
+SpanGlobalId: TypeAlias = str
+
+
+class TestClientForSpanAnnotations:
+ """Integration tests for the Span Annotations client REST endpoints.
+
+ This test suite verifies the functionality of the Span Annotations API,
+ focusing on both single and batch annotation operations. The tests cover:
+ - Creating and updating annotations using the UPSERT pattern
+ - Batch creation and updating of annotations across multiple spans
+ - Proper handling of annotation fields (annotation_name, identifier, label, score, explanation)
+ - Synchronous and asynchronous clients work properly
+ - Role-based access control (admin vs member permissions)
+ - DataFrame-based annotation operations with different configurations:
+ * Using span_id as a column
+ * Using span_id as the index
+ * Using global annotator_kind
+
+ The test suite consists of three main test methods:
+ 1. test_add_span_annotation: Tests single annotation operations and UPSERT functionality
+ 2. test_log_span_annotations: Tests batch annotation operations and UPSERT functionality
+ 3. test_log_span_annotations_dataframe: Tests DataFrame-based annotation operations with different configurations
+
+ Example:
+ ```python
+ from phoenix.client import Client
+
+ client = Client()
+ annotation = client.annotations.add_span_annotation(
+ annotation_name="sentiment",
+ span_id="abc123",
+ label="positive",
+ score=0.9,
+ )
+ ```
+ """ # noqa: E501
+
+ # GraphQL query to retrieve span annotations for a given span ID
+ query = """
+ query GetSpanAnnotations($id: GlobalID!) {
+ node (id: $id) {
+ ... on Span {
+ spanAnnotations {
+ id
+ name
+ source
+ identifier
+ annotatorKind
+ metadata
+ label
+ score
+ explanation
+ }
+ }
+ }
+ }
+ """
+
+ @pytest.mark.parametrize("is_async", [True, False])
+ @pytest.mark.parametrize("role_or_user", [_MEMBER, _ADMIN])
+ async def test_add_span_annotation(
+ self,
+ is_async: bool,
+ role_or_user: _RoleOrUser,
+ _span_ids: tuple[tuple[SpanId, SpanGlobalId], tuple[SpanId, SpanGlobalId]],
+ _get_user: _GetUser,
+ monkeypatch: pytest.MonkeyPatch,
+ ) -> None:
+ """Test single span annotation operations.
+
+ This test verifies that:
+ 1. Single span annotations can be created with name, label, score, and explanation
+ 2. Annotations are correctly associated with the specified span
+ 3. All annotation fields are stored and retrieved correctly
+ 4. Both synchronous and asynchronous clients work properly
+ 5. Both admin and member users can create annotations
+ 6. The API supports UPSERT operations (update or insert) for annotations
+ 7. Annotations maintain their ID across updates (UPSERT)
+ """ # noqa: E501
+ # ============================================================================
+ # Setup
+ # ============================================================================
+ # Extract OTEL span ID and graphql Global ID from the fixture
+ (span_id1, span_gid1), _ = _span_ids
+
+ # Set up test environment with logged-in user
+ u = _get_user(role_or_user).log_in()
+ monkeypatch.setenv("PHOENIX_API_KEY", u.create_api_key())
+
+ # Import appropriate client based on test parameter
+ from phoenix.client import AsyncClient
+ from phoenix.client import Client as SyncClient
+
+ Client = AsyncClient if is_async else SyncClient
+
+ # ============================================================================
+ # Single Annotation UPSERT Test
+ # ============================================================================
+ # Test UPSERT functionality by adding multiple annotations with the same name.
+ # This verifies that the API correctly updates existing annotations rather
+ # than creating new annotations.
+ annotation_name = token_hex(16)
+ existing_gid: Optional[str] = None
+
+ # First iteration: Create initial annotation
+ for j in range(2):
+ # Generate random test data for the annotation
+ score = int.from_bytes(token_bytes(4), byteorder="big")
+ label = token_hex(16)
+ explanation = token_hex(16)
+ metadata = {token_hex(16): token_hex(16)}
+ # Create the span annotation
+ await _await_or_return(
+ Client().annotations.add_span_annotation(
+ annotation_name=annotation_name,
+ span_id=span_id1,
+ annotator_kind="LLM", # Test non-default annotator_kind
+ label=label,
+ score=score,
+ explanation=explanation,
+ metadata=metadata,
+ sync=True,
+ ),
+ )
+
+ # Verify the annotation was created correctly by querying the GraphQL API
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gid1},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {
+ (anno["label"], anno["score"], anno["explanation"]): anno
+ for anno in res["data"]["node"]["spanAnnotations"]
+ }
+
+ # Verify the annotation exists with the correct fields
+ key = (label, score, explanation)
+ assert key in annotations, "Created annotation should be present in span annotations" # noqa: E501
+
+ # Get the annotation and verify all fields match what was provided
+ anno = annotations[key]
+ assert anno["name"] == annotation_name, "Annotation name should match input" # noqa: E501
+ assert anno["source"] == "API", "Annotation source should be API" # noqa: E501
+ assert anno["annotatorKind"] == "LLM", "Annotation annotator_kind should be LLM" # noqa: E501
+ assert anno["metadata"] == metadata, "Annotation metadata should match input" # noqa: E501
+ if j == 0:
+ existing_gid = anno["id"]
+ else:
+ assert (
+ anno["id"] == existing_gid
+ ), "Annotation ID should remain the same after update" # noqa: E501
+
+ @pytest.mark.parametrize("is_async", [True, False])
+ @pytest.mark.parametrize("role_or_user", [_MEMBER, _ADMIN])
+ async def test_log_span_annotations(
+ self,
+ is_async: bool,
+ role_or_user: _RoleOrUser,
+ _span_ids: tuple[tuple[SpanId, SpanGlobalId], tuple[SpanId, SpanGlobalId]],
+ _get_user: _GetUser,
+ monkeypatch: pytest.MonkeyPatch,
+ ) -> None:
+ """Test batch span annotation operations.
+
+ This test verifies that:
+ 1. Multiple annotations can be created in a single batch operation
+ 2. Annotations are correctly associated with their respective spans
+ 3. All annotation fields are stored and retrieved correctly
+ 4. Both synchronous and asynchronous clients work properly
+ 5. Both admin and member users can create annotations
+ 6. The API supports UPSERT operations for batch annotations
+ 7. Annotations maintain their ID across updates (UPSERT)
+ 8. Batch operations correctly handle multiple spans
+ """ # noqa: E501
+ # ============================================================================
+ # Setup
+ # ============================================================================
+ # Extract OTEL span ID and graphql Global ID from the fixture
+ (span_id1, span_gid1), (span_id2, span_gid2) = _span_ids
+
+ # Set up test environment with logged-in user
+ u = _get_user(role_or_user).log_in()
+ monkeypatch.setenv("PHOENIX_API_KEY", u.create_api_key())
+
+ # Import appropriate client based on test parameter
+ from phoenix.client import AsyncClient
+ from phoenix.client import Client as SyncClient
+
+ Client = AsyncClient if is_async else SyncClient
+
+ # ============================================================================
+ # Batch Annotation Test
+ # ============================================================================
+ # Test batch annotation creation and updates using log_span_annotations
+ # Create annotations for both spans in a single batch operation
+
+ # Setup test data for batch operations
+ span_ids = [span_id1, span_id2]
+ span_gids = [span_gid1, span_gid2]
+ annotation_names = [token_hex(16), token_hex(16)]
+ identifiers = [token_hex(16), token_hex(16)]
+ existing_gids: list[Optional[str]] = [None, None]
+
+ # Two iterations: First creates annotations, second updates them
+ for i in range(2):
+ # Generate new random values for each iteration
+ labels = [token_hex(16), token_hex(16)]
+ scores = [
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ ]
+ explanations = [token_hex(16), token_hex(16)]
+ metadata = [{token_hex(16): token_hex(16)} for _ in range(2)]
+
+ # Create annotation data for both spans
+ span_annotations: list[v1.SpanAnnotationData] = [
+ {
+ "name": annotation_names[i],
+ "span_id": span_ids[i],
+ "annotator_kind": "CODE", # Test non-default annotator_kind
+ "identifier": identifiers[i],
+ "metadata": metadata[i],
+ "result": {
+ "label": labels[i],
+ "score": scores[i],
+ "explanation": explanations[i],
+ },
+ }
+ for i in range(len(span_ids))
+ ]
+
+ # Log the batch annotations
+ result = await _await_or_return(
+ Client().annotations.log_span_annotations(
+ span_annotations=span_annotations,
+ sync=True,
+ ),
+ )
+
+ # Verify the batch operation returned the expected number of results
+ assert result
+ assert len(result) == 2, "Batch operation should return results for both annotations" # noqa: E501
+
+ # Verify each annotation in the batch
+ for j in range(2):
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gids[j]},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {
+ (anno["label"], anno["score"], anno["explanation"]): anno
+ for anno in res["data"]["node"]["spanAnnotations"]
+ }
+ key = (labels[j], scores[j], explanations[j])
+
+ # Verify the batch annotation exists
+ assert (
+ key in annotations
+ ), f"Batch annotation {j+1} should be present in span annotations" # noqa: E501
+
+ # Verify the batch annotation fields match what was provided
+ anno = annotations[key]
+ assert (
+ anno["name"] == annotation_names[j]
+ ), f"Batch annotation {j+1} name should match input" # noqa: E501
+ assert anno["source"] == "API", f"Batch annotation {j+1} source should be API" # noqa: E501
+ assert (
+ anno["annotatorKind"] == "CODE"
+ ), f"Batch annotation {j+1} annotator_kind should be CODE" # noqa: E501
+ assert (
+ anno["metadata"] == metadata[j]
+ ), f"Batch annotation {j+1} metadata should match input" # noqa: E501
+ assert (
+ anno["identifier"] == identifiers[j]
+ ), f"Batch annotation {j+1} identifier should match input" # noqa: E501
+
+ # Verify ID persistence across updates
+ if i == 0:
+ existing_gids[j] = anno["id"]
+ else:
+ assert (
+ anno["id"] == existing_gids[j]
+ ), f"Batch annotation {j+1} ID should remain the same after update" # noqa: E501
+
+ @pytest.mark.parametrize("is_async", [True, False])
+ @pytest.mark.parametrize("role_or_user", [_MEMBER, _ADMIN])
+ async def test_log_span_annotations_dataframe(
+ self,
+ is_async: bool,
+ role_or_user: _RoleOrUser,
+ _span_ids: tuple[tuple[SpanId, SpanGlobalId], tuple[SpanId, SpanGlobalId]],
+ _get_user: _GetUser,
+ monkeypatch: pytest.MonkeyPatch,
+ ) -> None:
+ """Test DataFrame-based span annotation operations.
+
+ This test verifies that:
+ 1. Annotations can be created from a pandas DataFrame
+ 2. Both column-based and index-based span_id work correctly
+ 3. Optional fields (label, score, explanation) are handled properly
+ 4. Both synchronous and asynchronous clients work properly
+ 5. Both admin and member users can create annotations
+ 6. The API supports UPSERT operations for DataFrame annotations
+
+ The test uses three different DataFrame formats to verify different use cases:
+ 1. Using span_id as a column: Demonstrates standard DataFrame usage with all fields as columns
+ 2. Using span_id as the index: Tests alternative DataFrame structure with span_id as index
+ 3. Using global annotator_kind: Verifies global parameter override of DataFrame values
+
+ Each test case follows the same pattern:
+ - Create test data with random values
+ - Create DataFrame with specific structure
+ - Log annotations using the DataFrame
+ - Verify annotations were created correctly by querying the API
+ - Check all fields match the input data
+ """ # noqa: E501
+ # ============================================================================
+ # Setup
+ # ============================================================================
+ # Extract OTEL span ID and graphql Global ID from the fixture
+ (span_id1, span_gid1), (span_id2, span_gid2) = _span_ids
+
+ # Set up test environment with logged-in user
+ u = _get_user(role_or_user).log_in()
+ monkeypatch.setenv("PHOENIX_API_KEY", u.create_api_key())
+
+ # Import appropriate client based on test parameter
+ from phoenix.client import AsyncClient
+ from phoenix.client import Client as SyncClient
+
+ Client = AsyncClient if is_async else SyncClient
+
+ # ============================================================================
+ # Test Case 1: Using span_id as column
+ # ============================================================================
+ # This test case demonstrates standard DataFrame usage with span_id as a column
+ # All fields are provided as columns in the DataFrame
+ df1_annotation_names = [token_hex(16), token_hex(16)]
+ df1_span_ids = [span_id1, span_id2]
+ df1_annotator_kinds = ["HUMAN", "LLM"]
+ df1_labels = [token_hex(16), token_hex(16)]
+ df1_scores = [
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ ]
+ df1_explanations = [token_hex(16), token_hex(16)]
+ df1_metadata = [{token_hex(16): token_hex(16)} for _ in range(2)]
+ df1 = pd.DataFrame(
+ {
+ "name": df1_annotation_names,
+ "span_id": df1_span_ids,
+ "annotator_kind": df1_annotator_kinds,
+ "label": df1_labels,
+ "score": df1_scores,
+ "explanation": df1_explanations,
+ "metadata": df1_metadata,
+ }
+ )
+
+ # Log annotations from DataFrame
+ await _await_or_return(
+ Client().annotations.log_span_annotations_dataframe(
+ dataframe=df1,
+ sync=True,
+ ),
+ )
+
+ # Verify annotations were created correctly
+ for i, span_gid in enumerate([span_gid1, span_gid2]):
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gid},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {
+ (anno["label"], anno["score"], anno["explanation"]): anno
+ for anno in res["data"]["node"]["spanAnnotations"]
+ }
+
+ # Verify annotation exists with correct values
+ key = (df1_labels[i], df1_scores[i], df1_explanations[i])
+ assert (
+ key in annotations
+ ), f"DataFrame annotation {i+1} should be present in span annotations" # noqa: E501
+
+ anno = annotations[key]
+ assert (
+ anno["name"] == df1_annotation_names[i]
+ ), f"DataFrame annotation {i+1} name should match input" # noqa: E501
+ assert anno["source"] == "API", f"DataFrame annotation {i+1} source should be API" # noqa: E501
+ assert (
+ anno["metadata"] == df1_metadata[i]
+ ), f"DataFrame annotation {i+1} metadata should match input" # noqa: E501
+ assert (
+ anno["annotatorKind"] == df1_annotator_kinds[i]
+ ), f"DataFrame annotation {i+1} annotator_kind should match input" # noqa: E501
+
+ # ============================================================================
+ # Test Case 2: Using span_id as index
+ # ============================================================================
+ # This test case demonstrates using span_id as the DataFrame index
+ # This is an alternative way to specify span_id without a dedicated column
+ df2_annotation_names = [token_hex(16), token_hex(16)]
+ df2_annotator_kinds = ["HUMAN", "LLM"]
+ df2_labels = [token_hex(16), token_hex(16)]
+ df2_scores = [
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ ]
+ df2_explanations = [token_hex(16), token_hex(16)]
+ df2_metadata = [{token_hex(16): token_hex(16)} for _ in range(2)]
+ df2 = pd.DataFrame(
+ {
+ "name": df2_annotation_names,
+ "annotator_kind": df2_annotator_kinds,
+ "label": df2_labels,
+ "score": df2_scores,
+ "explanation": df2_explanations,
+ "metadata": df2_metadata,
+ },
+ index=[span_id1, span_id2],
+ )
+
+ # Log annotations from DataFrame
+ await _await_or_return(
+ Client().annotations.log_span_annotations_dataframe(
+ dataframe=df2,
+ sync=True,
+ ),
+ )
+
+ # Verify annotations were created correctly
+ for i, span_gid in enumerate([span_gid1, span_gid2]):
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gid},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {
+ (anno["label"], anno["score"], anno["explanation"]): anno
+ for anno in res["data"]["node"]["spanAnnotations"]
+ }
+
+ # Verify annotation exists with correct values
+ key = (df2_labels[i], df2_scores[i], df2_explanations[i])
+ assert (
+ key in annotations
+ ), f"DataFrame annotation {i+1} should be present in span annotations" # noqa: E501
+
+ anno = annotations[key]
+ assert (
+ anno["name"] == df2_annotation_names[i]
+ ), f"DataFrame annotation {i+1} name should match input" # noqa: E501
+ assert anno["source"] == "API", f"DataFrame annotation {i+1} source should be API" # noqa: E501
+ assert (
+ anno["metadata"] == df2_metadata[i]
+ ), f"DataFrame annotation {i+1} metadata should match input" # noqa: E501
+ assert (
+ anno["annotatorKind"] == df2_annotator_kinds[i]
+ ), f"DataFrame annotation {i+1} annotator_kind should match input" # noqa: E501
+
+ # ============================================================================
+ # Test Case 3: Using global annotator_kind
+ # ============================================================================
+ # This test case demonstrates using a global annotator_kind parameter
+ # The DataFrame does not include an annotator_kind column, and the value is
+ # provided as a parameter to the API call
+ global_annotator_kind: Literal["HUMAN"] = "HUMAN"
+ df3_annotation_names = [token_hex(16), token_hex(16)]
+ df3_span_ids = [span_id1, span_id2]
+ df3_labels = [token_hex(16), token_hex(16)]
+ df3_scores = [
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ int.from_bytes(token_bytes(4), byteorder="big"),
+ ]
+ df3_explanations = [token_hex(16), token_hex(16)]
+ df3_metadata = [{token_hex(16): token_hex(16)} for _ in range(2)]
+ df3 = pd.DataFrame(
+ {
+ "name": df3_annotation_names,
+ "span_id": df3_span_ids,
+ "label": df3_labels,
+ "score": df3_scores,
+ "explanation": df3_explanations,
+ "metadata": df3_metadata,
+ }
+ )
+
+ # Log annotations from DataFrame with global annotator_kind
+ await _await_or_return(
+ Client().annotations.log_span_annotations_dataframe(
+ dataframe=df3,
+ annotator_kind=global_annotator_kind,
+ sync=True,
+ ),
+ )
+
+ # Verify annotations were created correctly
+ for i, span_gid in enumerate([span_gid1, span_gid2]):
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gid},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {
+ (anno["label"], anno["score"], anno["explanation"]): anno
+ for anno in res["data"]["node"]["spanAnnotations"]
+ }
+
+ # Verify annotation exists with correct values
+ key = (df3_labels[i], df3_scores[i], df3_explanations[i])
+ assert (
+ key in annotations
+ ), f"DataFrame annotation {i+1} should be present in span annotations" # noqa: E501
+
+ anno = annotations[key]
+ assert (
+ anno["name"] == df3_annotation_names[i]
+ ), f"DataFrame annotation {i+1} name should match input" # noqa: E501
+ assert anno["source"] == "API", f"DataFrame annotation {i+1} source should be API" # noqa: E501
+ assert (
+ anno["metadata"] == df3_metadata[i]
+ ), f"DataFrame annotation {i+1} metadata should match input" # noqa: E501
+ assert (
+ anno["annotatorKind"] == global_annotator_kind
+ ), f"DataFrame annotation {i+1} annotator_kind should match global value" # noqa: E501
+
+ @pytest.mark.parametrize("is_async", [True, False])
+ @pytest.mark.parametrize("role_or_user", [_MEMBER, _ADMIN])
+ async def test_zero_score_annotation(
+ self,
+ is_async: bool,
+ role_or_user: _RoleOrUser,
+ _span_ids: tuple[tuple[SpanId, SpanGlobalId], tuple[SpanId, SpanGlobalId]],
+ _get_user: _GetUser,
+ monkeypatch: pytest.MonkeyPatch,
+ ) -> None:
+ """Test that a score of 0 is properly recorded and not treated as falsey.
+
+ This test verifies that:
+ 1. An annotation with a score of 0 is properly created and stored
+ 2. The score of 0 is not treated as falsey or None
+ 3. Both synchronous and asynchronous clients handle zero scores correctly
+ 4. Both admin and member users can create annotations with zero scores
+ 5. Optional fields (label, explanation) are properly stored as None when omitted
+ 6. The annotation can be retrieved and verified by its name
+ """ # noqa: E501
+ # ============================================================================
+ # Setup
+ # ============================================================================
+ # Extract OTEL span ID and graphql Global ID from the fixture
+ (span_id1, span_gid1), _ = _span_ids
+
+ # Set up test environment with logged-in user
+ u = _get_user(role_or_user).log_in()
+ monkeypatch.setenv("PHOENIX_API_KEY", u.create_api_key())
+
+ # Import appropriate client based on test parameter
+ from phoenix.client import AsyncClient
+ from phoenix.client import Client as SyncClient
+
+ Client = AsyncClient if is_async else SyncClient
+
+ # ============================================================================
+ # Test Case: Zero Score
+ # ============================================================================
+ # Test that a score of 0 is properly recorded and not treated as falsey
+ zero_score_annotation_name = token_hex(16)
+
+ # Create annotation with score of 0
+ await _await_or_return(
+ Client().annotations.add_span_annotation(
+ annotation_name=zero_score_annotation_name,
+ span_id=span_id1,
+ annotator_kind="LLM",
+ score=0, # Explicitly test score of 0
+ sync=True,
+ ),
+ )
+
+ # Verify the annotation was created correctly by querying the GraphQL API
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gid1},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {anno["name"]: anno for anno in res["data"]["node"]["spanAnnotations"]}
+
+ # Verify the annotation exists and has score of 0
+ assert (
+ zero_score_annotation_name in annotations
+ ), "Annotation with score of 0 should be present in span annotations"
+ assert (
+ annotations[zero_score_annotation_name]["score"] == 0
+ ), "Annotation score should be exactly 0"
+ assert (
+ annotations[zero_score_annotation_name]["label"] is None
+ ), "Annotation label should be None"
+ assert (
+ annotations[zero_score_annotation_name]["explanation"] is None
+ ), "Annotation explanation should be None"
+
+ @pytest.mark.parametrize("is_async", [True, False])
+ @pytest.mark.parametrize("role_or_user", [_MEMBER, _ADMIN])
+ async def test_zero_score_annotation_dataframe(
+ self,
+ is_async: bool,
+ role_or_user: _RoleOrUser,
+ _span_ids: tuple[tuple[SpanId, SpanGlobalId], tuple[SpanId, SpanGlobalId]],
+ _get_user: _GetUser,
+ monkeypatch: pytest.MonkeyPatch,
+ ) -> None:
+ """Test that a score of 0 is properly recorded and not treated as falsey in DataFrame annotations.
+
+ This test verifies that:
+ 1. A DataFrame annotation with a score of 0 is properly created and stored
+ 2. The score of 0 is not treated as falsey or None
+ 3. Both synchronous and asynchronous clients handle zero scores correctly
+ 4. Both admin and member users can create DataFrame annotations with zero scores
+ 5. Optional fields (label, explanation) are properly stored as None when omitted
+ 6. The annotation can be retrieved and verified by its name
+ """ # noqa: E501
+ # ============================================================================
+ # Setup
+ # ============================================================================
+ # Extract OTEL span ID and graphql Global ID from the fixture
+ (span_id1, span_gid1), _ = _span_ids
+
+ # Set up test environment with logged-in user
+ u = _get_user(role_or_user).log_in()
+ monkeypatch.setenv("PHOENIX_API_KEY", u.create_api_key())
+
+ # Import appropriate client based on test parameter
+ from phoenix.client import AsyncClient
+ from phoenix.client import Client as SyncClient
+
+ Client = AsyncClient if is_async else SyncClient
+
+ # ============================================================================
+ # Test Case: Zero Score in DataFrame
+ # ============================================================================
+ # Test that a score of 0 is properly recorded and not treated as falsey
+ zero_score_annotation_name = token_hex(16)
+
+ # Create DataFrame with score of 0
+ df = pd.DataFrame(
+ {
+ "name": [zero_score_annotation_name],
+ "span_id": [span_id1],
+ "annotator_kind": ["LLM"],
+ "score": [0], # Explicitly test score of 0
+ }
+ )
+
+ # Log annotations from DataFrame
+ await _await_or_return(
+ Client().annotations.log_span_annotations_dataframe(
+ dataframe=df,
+ sync=True,
+ ),
+ )
+
+ # Verify the annotation was created correctly by querying the GraphQL API
+ res, _ = _gql(
+ u,
+ query=self.query,
+ operation_name="GetSpanAnnotations",
+ variables={"id": span_gid1},
+ )
+
+ # Create a dictionary of annotations for easy lookup
+ annotations = {anno["name"]: anno for anno in res["data"]["node"]["spanAnnotations"]}
+
+ # Verify the annotation exists and has score of 0
+ assert (
+ zero_score_annotation_name in annotations
+ ), "DataFrame annotation with score of 0 should be present in span annotations"
+ assert (
+ annotations[zero_score_annotation_name]["score"] == 0
+ ), "DataFrame annotation score should be exactly 0"
+ assert (
+ annotations[zero_score_annotation_name]["label"] is None
+ ), "DataFrame annotation label should be None"
+ assert (
+ annotations[zero_score_annotation_name]["explanation"] is None
+ ), "DataFrame annotation explanation should be None"
diff --git a/tests/integration/projects/test_projects.py b/tests/integration/client/test_projects.py
similarity index 100%
rename from tests/integration/projects/test_projects.py
rename to tests/integration/client/test_projects.py
diff --git a/tests/integration/prompts/test_prompts.py b/tests/integration/client/test_prompts.py
similarity index 100%
rename from tests/integration/prompts/test_prompts.py
rename to tests/integration/client/test_prompts.py
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index ba2c3804e5..9b35e909c5 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -86,12 +86,12 @@ def _span_exporter(request: SubRequest) -> _SpanExporterFactory:
raise ValueError(f"Unknown exporter: {request.param}")
-@pytest.fixture(scope="module")
+@pytest.fixture(scope="package")
def _fake() -> Faker:
return Faker()
-@pytest.fixture(autouse=True, scope="module")
+@pytest.fixture(autouse=True, scope="package")
def _env(
_sql_database_url: URL,
_ports: Iterator[int],
@@ -107,7 +107,7 @@ def _env(
yield
-@pytest.fixture(autouse=True, scope="module")
+@pytest.fixture(autouse=True, scope="package")
def _env_phoenix_sql_database_url(
_sql_database_url: URL,
_fake: Faker,
@@ -117,7 +117,7 @@ def _env_phoenix_sql_database_url(
yield
-@pytest.fixture(autouse=True, scope="module")
+@pytest.fixture(autouse=True, scope="package")
def _env_postgresql_schema(
_sql_database_url: URL,
) -> Iterator[None]:
diff --git a/tests/integration/db_migrations/__init__.py b/tests/integration/db_migrations/__init__.py
index 2d77763e3e..eb4ad71a95 100644
--- a/tests/integration/db_migrations/__init__.py
+++ b/tests/integration/db_migrations/__init__.py
@@ -12,7 +12,8 @@ def _up(engine: Engine, alembic_config: Config, revision: str) -> None:
alembic_config.attributes["connection"] = conn
command.upgrade(alembic_config, revision)
engine.dispose()
- assert _version_num(engine) == (revision,)
+ actual = _version_num(engine)
+ assert actual == (revision,)
def _down(engine: Engine, alembic_config: Config, revision: str) -> None:
diff --git a/tests/integration/db_migrations/test_change_jsonb_to_json_for_prompts.py b/tests/integration/db_migrations/test_change_jsonb_to_json_for_prompts.py
new file mode 100644
index 0000000000..6c45badcb6
--- /dev/null
+++ b/tests/integration/db_migrations/test_change_jsonb_to_json_for_prompts.py
@@ -0,0 +1,238 @@
+import json
+import re
+from typing import Literal
+
+import pytest
+from alembic.config import Config
+from sqlalchemy import Engine, text
+
+from . import _down, _up, _version_num
+
+
+def test_change_jsonb_to_json_for_prompts(
+ _engine: Engine,
+ _alembic_config: Config,
+ _db_backend: Literal["sqlite", "postgresql"],
+) -> None:
+ """
+ Test the migration that changes the column type from JSONB to JSON for the
+ 'tools' and 'response_format' columns in the 'prompt_versions' table.
+
+ This test verifies:
+ 1. The initial state with JSONB columns
+ 2. The migration to JSON columns
+ 3. The downgrade back to JSONB columns
+
+ It also ensures data integrity throughout the migration process.
+ """
+ # Verify we're starting from a clean state
+ with pytest.raises(BaseException, match="alembic_version"):
+ _version_num(_engine)
+
+ # Run the migration that creates the prompt_versions table
+ _up(_engine, _alembic_config, "bc8fea3c2bc8")
+
+ # Sample data for testing - intentionally using keys in arbitrary order
+ # to demonstrate the difference between JSONB and JSON in PostgreSQL
+ tools_data = {"ZZZ": 3, "Z": 1, "ZZ": 2}
+ response_format_data = {"ZZZ": 3, "Z": 1, "ZZ": 2}
+
+ # Insert test data with JSONB columns
+ with _engine.connect() as conn:
+ # Create a prompt to reference
+ prompt_id = conn.execute(
+ text(
+ """
+ INSERT INTO prompts (name, metadata)
+ VALUES ('test_prompt', '{}')
+ RETURNING id
+ """
+ )
+ ).scalar()
+
+ # Insert prompt version with JSONB data
+ prompt_version_id = conn.execute(
+ text(
+ """
+ INSERT INTO prompt_versions (
+ prompt_id, template_type, template_format,
+ template, invocation_parameters, tools, response_format,
+ model_provider, model_name, metadata
+ )
+ VALUES (
+ :prompt_id, 'CHAT', 'F_STRING',
+ '{}', '{}', :tools, :response_format,
+ 'OPENAI', 'gpt-4', '{}'
+ )
+ RETURNING id
+ """ # noqa: E501
+ ),
+ {
+ "prompt_id": prompt_id,
+ "tools": json.dumps(tools_data),
+ "response_format": json.dumps(response_format_data),
+ },
+ ).scalar()
+ conn.commit() # Commit to ensure data is visible to subsequent connections
+
+ # STEP 1: Verify initial state with JSONB columns
+ with _engine.connect() as conn:
+ # Check column types based on database backend
+ if _db_backend == "postgresql":
+ # PostgreSQL: Use pg_typeof to check column types
+ column_types = conn.execute(
+ text(
+ """
+ SELECT pg_typeof(tools)::text, pg_typeof(response_format)::text
+ FROM prompt_versions
+ WHERE id = :id
+ """
+ ),
+ {"id": prompt_version_id},
+ ).first()
+ assert column_types is not None
+ assert column_types[0] == "jsonb"
+ assert column_types[1] == "jsonb"
+ else:
+ # SQLite: Check table definition from sqlite_master
+ table_def = conn.execute(
+ text(
+ """
+ SELECT sql FROM sqlite_master
+ WHERE type='table' AND name='prompt_versions';
+ """
+ )
+ ).scalar()
+ assert table_def is not None
+ # Verify columns are defined as JSONB
+ assert re.search(r"\btools\s+JSONB\b", table_def) is not None
+ assert re.search(r"\bresponse_format\s+JSONB\b", table_def) is not None
+
+ # Verify data was inserted correctly
+ if _db_backend == "sqlite":
+ # SQLite: JSON is stored as a string exactly as inserted
+ result = conn.execute(
+ text("SELECT tools, response_format FROM prompt_versions WHERE id = :id"),
+ {"id": prompt_version_id},
+ ).first()
+ assert result is not None
+ assert result[0] == json.dumps(tools_data)
+ assert result[1] == json.dumps(response_format_data)
+ else:
+ # PostgreSQL: JSONB doesn't preserve key order
+ result = conn.execute(
+ text(
+ """
+ SELECT tools::text, response_format::text
+ FROM prompt_versions WHERE id = :id
+ """
+ ),
+ {"id": prompt_version_id},
+ ).first()
+ assert result is not None
+ # Data is semantically equivalent when parsed
+ assert json.loads(result[0]) == tools_data
+ assert json.loads(result[1]) == response_format_data
+ # But serialized string differs due to key reordering in JSONB
+ # JSONB stores data in a binary format and reorders keys alphabetically
+ assert result[0] != json.dumps(tools_data)
+ assert result[1] != json.dumps(response_format_data)
+
+ # STEP 2: Run the migration to change JSONB to JSON
+ _up(_engine, _alembic_config, "8a3764fe7f1a")
+
+ # Verify the migration worked correctly
+ with _engine.connect() as conn:
+ # Check data is still accessible
+ result = conn.execute(
+ text("SELECT tools, response_format FROM prompt_versions WHERE id = :id"),
+ {"id": prompt_version_id},
+ ).first()
+
+ assert result is not None
+ if _db_backend == "sqlite":
+ assert result[0] == json.dumps(tools_data)
+ assert result[1] == json.dumps(response_format_data)
+ else:
+ assert result[0] == tools_data
+ assert result[1] == response_format_data
+
+ # Check column types after migration
+ if _db_backend == "postgresql":
+ # PostgreSQL: Verify columns are now JSON
+ column_types = conn.execute(
+ text(
+ """
+ SELECT pg_typeof(tools)::text, pg_typeof(response_format)::text
+ FROM prompt_versions
+ WHERE id = :id
+ """
+ ),
+ {"id": prompt_version_id},
+ ).first()
+ assert column_types is not None
+ assert column_types[0] == "json"
+ assert column_types[1] == "json"
+ else:
+ # SQLite: Verify columns are now JSON
+ table_def = conn.execute(
+ text(
+ """
+ SELECT sql FROM sqlite_master
+ WHERE type='table' AND name='prompt_versions';
+ """
+ )
+ ).scalar()
+ assert table_def is not None
+ # Verify columns are defined as JSON and not JSONB
+ assert re.search(r"\btools\s+JSON\b", table_def) is not None
+ assert re.search(r"\bresponse_format\s+JSON\b", table_def) is not None
+
+ # STEP 3: Test downgrade back to JSONB
+ _down(_engine, _alembic_config, "bc8fea3c2bc8")
+
+ # Verify the downgrade worked correctly
+ with _engine.connect() as conn:
+ # Check data is still accessible
+ result = conn.execute(
+ text("SELECT tools, response_format FROM prompt_versions WHERE id = :id"),
+ {"id": prompt_version_id},
+ ).first()
+
+ assert result is not None
+ if _db_backend == "sqlite":
+ assert result[0] == json.dumps(tools_data)
+ assert result[1] == json.dumps(response_format_data)
+ else:
+ assert result[0] == tools_data
+ assert result[1] == response_format_data
+
+ # Check column types after downgrade
+ if _db_backend == "postgresql":
+ # PostgreSQL: Verify columns are back to JSONB
+ column_types = conn.execute(
+ text(
+ """
+ SELECT pg_typeof(tools)::text, pg_typeof(response_format)::text
+ FROM prompt_versions
+ WHERE id = :id
+ """
+ ),
+ {"id": prompt_version_id},
+ ).first()
+ assert column_types is not None
+ assert column_types[0] == "jsonb"
+ assert column_types[1] == "jsonb"
+ else:
+ # SQLite: Verify columns are back to JSONB
+ table_def = conn.execute(
+ text(
+ """
+ SELECT sql FROM sqlite_master
+ WHERE type='table' AND name='prompt_versions';
+ """
+ )
+ ).scalar()
+ assert table_def is not None
+ assert re.search(r"\btools\s+JSONB\b", table_def) is not None
+ assert re.search(r"\bresponse_format\s+JSONB\b", table_def) is not None
diff --git a/tests/integration/db_migrations/test_data_migration_2f9d1a65945f_annotation_config_migration.py b/tests/integration/db_migrations/test_data_migration_2f9d1a65945f_annotation_config_migration.py
new file mode 100644
index 0000000000..46c43dbd60
--- /dev/null
+++ b/tests/integration/db_migrations/test_data_migration_2f9d1a65945f_annotation_config_migration.py
@@ -0,0 +1,1134 @@
+from datetime import datetime, timezone
+from typing import Any, Literal
+
+import pytest
+from alembic.config import Config
+from sqlalchemy import Connection, Engine, text
+from typing_extensions import assert_never
+
+from . import _down, _up, _version_num
+
+
+def test_annotation_config_migration(
+ _engine: Engine,
+ _alembic_config: Config,
+ _db_backend: Literal["sqlite", "postgresql"],
+) -> None:
+ # no migrations applied yet
+ with pytest.raises(BaseException, match="alembic_version"):
+ _version_num(_engine)
+
+ # apply migrations up to right before annotation config migration
+ _up(_engine, _alembic_config, "bc8fea3c2bc8")
+
+ # insert entities to be annotated
+ now = datetime.now(timezone.utc)
+ with _engine.connect() as conn:
+ # create a project
+ project_id = conn.execute(
+ text(
+ """
+ INSERT INTO projects (name, description)
+ VALUES (:name, :description)
+ RETURNING id
+ """
+ ),
+ {"name": "project-name", "description": None},
+ ).scalar()
+
+ # insert a trace
+ trace_rowid = conn.execute(
+ text(
+ """
+ INSERT INTO traces (project_rowid, trace_id, start_time, end_time)
+ VALUES (:project_id, :trace_id, :now, :now)
+ RETURNING id
+ """
+ ),
+ {
+ "project_id": project_id,
+ "trace_id": "trace1",
+ "now": now,
+ },
+ ).scalar()
+ assert isinstance(trace_rowid, int)
+
+ # insert a span
+ span_rowid = conn.execute(
+ text(
+ """
+ INSERT INTO spans (
+ trace_rowid, span_id, parent_id, name, span_kind, start_time, end_time,
+ attributes, events, status_code, status_message,
+ cumulative_error_count, cumulative_llm_token_count_prompt,
+ cumulative_llm_token_count_completion, llm_token_count_prompt,
+ llm_token_count_completion
+ )
+ VALUES (
+ :trace_rowid, :span_id, :parent_id, :name, :span_kind, :start_time, :end_time,
+ :attributes, :events, :status_code, :status_message,
+ :cumulative_error_count, :cumulative_llm_token_count_prompt,
+ :cumulative_llm_token_count_completion, :llm_token_count_prompt,
+ :llm_token_count_completion
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "trace_rowid": trace_rowid,
+ "span_id": "span1",
+ "parent_id": None,
+ "name": "span-name",
+ "span_kind": "INTERNAL",
+ "start_time": now,
+ "end_time": now,
+ "attributes": "{}",
+ "events": "[]",
+ "status_code": "OK",
+ "status_message": "",
+ "cumulative_error_count": 0,
+ "cumulative_llm_token_count_prompt": 0,
+ "cumulative_llm_token_count_completion": 0,
+ "llm_token_count_prompt": None,
+ "llm_token_count_completion": None,
+ },
+ ).scalar()
+ assert isinstance(span_rowid, int)
+ conn.commit()
+
+ for iteration_index in range(2):
+ # test behavior before up migration
+ with _engine.connect() as conn:
+ # verify columns
+ if _db_backend == "sqlite":
+ trace_annotations_table_def = _get_sqlite_table_info(conn, "trace_annotations")
+ span_annotations_table_def = _get_sqlite_table_info(conn, "span_annotations")
+ document_annotations_table_def = _get_sqlite_table_info(
+ conn, "document_annotations"
+ )
+
+ # Check trace_annotations
+ assert "identifier" not in trace_annotations_table_def
+ assert "source" not in trace_annotations_table_def
+ assert "user_id" not in trace_annotations_table_def
+ assert "annotator_kind VARCHAR NOT NULL" in trace_annotations_table_def
+ assert (
+ """CONSTRAINT "ck_trace_annotations_`valid_annotator_kind`" CHECK (annotator_kind IN ('LLM', 'HUMAN'))""" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_trace_annotations_trace_rowid_traces FOREIGN KEY(trace_rowid) REFERENCES traces (id) ON DELETE CASCADE" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT pk_trace_annotations PRIMARY KEY (id)"
+ in trace_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT uq_trace_annotations_name_trace_rowid UNIQUE (name, trace_rowid)"
+ in trace_annotations_table_def
+ )
+ assert trace_annotations_table_def.count("CONSTRAINT") == 4
+
+ # Check span_annotations
+ assert "identifier" not in span_annotations_table_def
+ assert "source" not in span_annotations_table_def
+ assert "user_id" not in span_annotations_table_def
+ assert "annotator_kind VARCHAR NOT NULL" in span_annotations_table_def
+ assert (
+ """CONSTRAINT "ck_span_annotations_`valid_annotator_kind`" CHECK (annotator_kind IN ('LLM', 'HUMAN'))""" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_span_annotations_span_rowid_spans FOREIGN KEY(span_rowid) REFERENCES spans (id) ON DELETE CASCADE" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT pk_span_annotations PRIMARY KEY (id)" in span_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT uq_span_annotations_name_span_rowid UNIQUE (name, span_rowid)"
+ in span_annotations_table_def
+ )
+ assert span_annotations_table_def.count("CONSTRAINT") == 4
+
+ # Check document_annotations
+ assert "identifier" not in document_annotations_table_def
+ assert "source" not in document_annotations_table_def
+ assert "user_id" not in document_annotations_table_def
+ assert "annotator_kind VARCHAR NOT NULL" in document_annotations_table_def
+ assert (
+ """CONSTRAINT "ck_document_annotations_`valid_annotator_kind`" CHECK (annotator_kind IN ('LLM', 'HUMAN'))""" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_document_annotations_span_rowid_spans FOREIGN KEY(span_rowid) REFERENCES spans (id) ON DELETE CASCADE" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT pk_document_annotations PRIMARY KEY (id)"
+ in document_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT uq_document_annotations_name_span_rowid_document_position UNIQUE (name, span_rowid, document_position)" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert document_annotations_table_def.count("CONSTRAINT") == 4
+
+ elif _db_backend == "postgresql":
+ # Get table information for all three tables
+ trace_annotations_info = _get_postgres_table_info(conn, "trace_annotations")
+ span_annotations_info = _get_postgres_table_info(conn, "span_annotations")
+ document_annotations_info = _get_postgres_table_info(conn, "document_annotations")
+
+ # Check trace_annotations
+ columns = trace_annotations_info["columns"]
+ assert "identifier" not in columns
+ assert "source" not in columns
+ assert "user_id" not in columns
+ assert "annotator_kind" in columns
+ assert columns["annotator_kind"]["data_type"] == "character varying"
+ assert columns["annotator_kind"]["is_nullable"] == "NO"
+ constraints = trace_annotations_info["constraints"]
+ assert constraints["ck_trace_annotations_`valid_annotator_kind`"] == {
+ "constraint_type": "CHECK",
+ "column_names": None,
+ }
+ assert constraints["fk_trace_annotations_trace_rowid_traces"] == {
+ "constraint_type": "FOREIGN KEY",
+ "column_names": ["trace_rowid"],
+ }
+ assert constraints["pk_trace_annotations"] == {
+ "constraint_type": "PRIMARY KEY",
+ "column_names": ["id"],
+ }
+ assert constraints["uq_trace_annotations_name_trace_rowid"] == {
+ "constraint_type": "UNIQUE",
+ "column_names": ["name", "trace_rowid"],
+ }
+
+ # Check span_annotations
+ columns = span_annotations_info["columns"]
+ assert "identifier" not in columns
+ assert "source" not in columns
+ assert "user_id" not in columns
+ assert "annotator_kind" in columns
+ assert columns["annotator_kind"]["data_type"] == "character varying"
+ assert columns["annotator_kind"]["is_nullable"] == "NO"
+ constraints = span_annotations_info["constraints"]
+ assert constraints["ck_span_annotations_`valid_annotator_kind`"] == {
+ "constraint_type": "CHECK",
+ "column_names": None,
+ }
+ assert constraints["fk_span_annotations_span_rowid_spans"] == {
+ "constraint_type": "FOREIGN KEY",
+ "column_names": ["span_rowid"],
+ }
+ assert constraints["pk_span_annotations"] == {
+ "constraint_type": "PRIMARY KEY",
+ "column_names": ["id"],
+ }
+ assert constraints["uq_span_annotations_name_span_rowid"] == {
+ "constraint_type": "UNIQUE",
+ "column_names": ["name", "span_rowid"],
+ }
+
+ # Check document_annotations
+ columns = document_annotations_info["columns"]
+ assert "identifier" not in columns
+ assert "source" not in columns
+ assert "user_id" not in columns
+ assert "annotator_kind" in columns
+ assert columns["annotator_kind"]["data_type"] == "character varying"
+ assert columns["annotator_kind"]["is_nullable"] == "NO"
+ constraints = document_annotations_info["constraints"]
+ assert constraints["ck_document_annotations_`valid_annotator_kind`"] == {
+ "constraint_type": "CHECK",
+ "column_names": None,
+ }
+ assert constraints["fk_document_annotations_span_rowid_spans"] == {
+ "constraint_type": "FOREIGN KEY",
+ "column_names": ["span_rowid"],
+ }
+ assert constraints["pk_document_annotations"] == {
+ "constraint_type": "PRIMARY KEY",
+ "column_names": ["id"],
+ }
+ assert constraints["uq_document_annotations_name_span_rowid_document_position"] == {
+ "constraint_type": "UNIQUE",
+ "column_names": ["name", "span_rowid", "document_position"],
+ }
+
+ else:
+ assert_never(_db_backend)
+
+ # insert a trace annotation with LLM annotator kind
+ trace_annotation_from_llm_id = _create_trace_annotation_pre_migration(
+ conn=conn,
+ trace_rowid=trace_rowid,
+ name=f"trace-annotation-from-llm-{iteration_index}",
+ label="trace-annotation-label",
+ score=1.23,
+ explanation="trace-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="LLM",
+ )
+ conn.commit()
+
+ # insert a trace annotation with HUMAN annotator kind
+ trace_annotation_from_human_id = _create_trace_annotation_pre_migration(
+ conn=conn,
+ trace_rowid=trace_rowid,
+ name=f"trace-annotation-from-human-{iteration_index}",
+ label="trace-annotation-label",
+ score=1.23,
+ explanation="trace-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="HUMAN",
+ )
+ conn.commit()
+
+ # insert a span annotation with LLM annotator kind
+ span_annotation_from_llm_id = _create_span_annotation_pre_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ name=f"span-annotation-from-llm-{iteration_index}",
+ label="span-annotation-label",
+ score=1.23,
+ explanation="span-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="LLM",
+ )
+ conn.commit()
+
+ # insert a span annotation with HUMAN annotator kind
+ span_annotation_from_human_id = _create_span_annotation_pre_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ name=f"span-annotation-from-human-{iteration_index}",
+ label="span-annotation-label",
+ score=1.23,
+ explanation="span-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="HUMAN",
+ )
+ conn.commit()
+
+ # insert a document annotation with LLM annotator kind
+ document_annotation_from_llm_id = _create_document_annotation_pre_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ document_position=0,
+ name=f"document-annotation-from-llm-{iteration_index}",
+ label="document-annotation-label",
+ score=1.23,
+ explanation="document-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="LLM",
+ )
+ conn.commit()
+
+ # insert a document annotation with HUMAN annotator kind
+ document_annotation_from_human_id = _create_document_annotation_pre_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ document_position=1,
+ name=f"document-annotation-from-human-{iteration_index}",
+ label="document-annotation-label",
+ score=1.23,
+ explanation="document-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="HUMAN",
+ )
+ conn.commit()
+
+ with _engine.connect() as conn:
+ # verify that 'CODE' annotator_kind is not allowed for trace annotations before migration # noqa: E501
+ with pytest.raises(Exception) as exc_info:
+ _create_trace_annotation_pre_migration(
+ conn=conn,
+ trace_rowid=trace_rowid,
+ name=f"trace-annotation-from-llm-{iteration_index}",
+ label="trace-annotation-label",
+ score=1.23,
+ explanation="trace-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ )
+ # conn.commit()
+ assert "valid_annotator_kind" in str(exc_info.value)
+
+ with _engine.connect() as conn:
+ # verify that 'CODE' annotator_kind is not allowed for span annotations before migration
+ with pytest.raises(Exception) as exc_info:
+ _create_span_annotation_pre_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ name=f"span-annotation-from-code-{iteration_index}",
+ label="span-annotation-label",
+ score=1.23,
+ explanation="span-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ )
+ conn.commit()
+ assert "valid_annotator_kind" in str(exc_info.value)
+
+ with _engine.connect() as conn:
+ # Verify that 'CODE' annotator_kind is not allowed for document annotations before migration # noqa: E501
+ with pytest.raises(Exception) as exc_info:
+ _create_document_annotation_pre_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ document_position=2,
+ name=f"document-annotation-from-code-{iteration_index}",
+ label="document-annotation-label",
+ score=1.23,
+ explanation="document-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ )
+ conn.commit()
+ assert "valid_annotator_kind" in str(exc_info.value)
+
+ # run the annotation config migration
+ _up(_engine, _alembic_config, "2f9d1a65945f")
+
+ # verify new columns exist and have been backfilled
+ with _engine.connect() as conn:
+ # verify expected columns and constraints exist
+ if _db_backend == "sqlite":
+ trace_annotations_table_def = _get_sqlite_table_info(conn, "trace_annotations")
+ span_annotations_table_def = _get_sqlite_table_info(conn, "span_annotations")
+ document_annotations_table_def = _get_sqlite_table_info(
+ conn, "document_annotations"
+ )
+
+ # Check trace_annotations
+ assert "annotator_kind VARCHAR NOT NULL" in trace_annotations_table_def
+ assert "identifier VARCHAR NOT NULL" in trace_annotations_table_def
+ assert "source VARCHAR NOT NULL" in trace_annotations_table_def
+ assert "user_id INTEGER" in trace_annotations_table_def
+ assert (
+ "CONSTRAINT pk_trace_annotations PRIMARY KEY (id)"
+ in trace_annotations_table_def
+ )
+ assert (
+ """CONSTRAINT "ck_trace_annotations_`valid_annotator_kind`" CHECK (annotator_kind IN ('LLM', 'CODE', 'HUMAN'))""" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_trace_annotations_trace_rowid_traces FOREIGN KEY(trace_rowid) REFERENCES traces (id) ON DELETE CASCADE" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT uq_trace_annotations_name_trace_rowid_identifier UNIQUE (name, trace_rowid, identifier)" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_trace_annotations_user_id_users FOREIGN KEY(user_id) REFERENCES users (id) ON DELETE SET NULL" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert (
+ """CONSTRAINT "ck_trace_annotations_`valid_source`" CHECK (source IN ('API', 'APP'))""" # noqa: E501
+ in trace_annotations_table_def
+ )
+ assert trace_annotations_table_def.count("CONSTRAINT") == 6
+
+ # Check span_annotations
+ assert "annotator_kind VARCHAR NOT NULL" in span_annotations_table_def
+ assert "identifier VARCHAR NOT NULL" in span_annotations_table_def
+ assert "source VARCHAR NOT NULL" in span_annotations_table_def
+ assert "user_id INTEGER" in span_annotations_table_def
+ assert (
+ "CONSTRAINT pk_span_annotations PRIMARY KEY (id)" in span_annotations_table_def
+ )
+ assert (
+ """CONSTRAINT "ck_span_annotations_`valid_annotator_kind`" CHECK (annotator_kind IN ('LLM', 'CODE', 'HUMAN'))""" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_span_annotations_span_rowid_spans FOREIGN KEY(span_rowid) REFERENCES spans (id) ON DELETE CASCADE" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT uq_span_annotations_name_span_rowid_identifier UNIQUE (name, span_rowid, identifier)" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_span_annotations_user_id_users FOREIGN KEY(user_id) REFERENCES users (id) ON DELETE SET NULL" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert (
+ """CONSTRAINT "ck_span_annotations_`valid_source`" CHECK (source IN ('API', 'APP'))""" # noqa: E501
+ in span_annotations_table_def
+ )
+ assert span_annotations_table_def.count("CONSTRAINT") == 6
+
+ # Check document_annotations
+ assert "annotator_kind VARCHAR NOT NULL" in document_annotations_table_def
+ assert "identifier VARCHAR NOT NULL" in document_annotations_table_def
+ assert "source VARCHAR NOT NULL" in document_annotations_table_def
+ assert "user_id INTEGER" in document_annotations_table_def
+ assert (
+ "CONSTRAINT pk_document_annotations PRIMARY KEY (id)"
+ in document_annotations_table_def
+ )
+ assert (
+ """CONSTRAINT "ck_document_annotations_`valid_annotator_kind`" CHECK (annotator_kind IN ('LLM', 'CODE', 'HUMAN'))""" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_document_annotations_span_rowid_spans FOREIGN KEY(span_rowid) REFERENCES spans (id) ON DELETE CASCADE" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT uq_document_annotations_name_span_rowid_document_pos_identifier UNIQUE (name, span_rowid, document_position, identifier)" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert (
+ "CONSTRAINT fk_document_annotations_user_id_users FOREIGN KEY(user_id) REFERENCES users (id) ON DELETE SET NULL" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert (
+ """CONSTRAINT "ck_document_annotations_`valid_source`" CHECK (source IN ('API', 'APP'))""" # noqa: E501
+ in document_annotations_table_def
+ )
+ assert document_annotations_table_def.count("CONSTRAINT") == 6
+
+ elif _db_backend == "postgresql":
+ # Get table information for all three tables
+ trace_annotations_info = _get_postgres_table_info(conn, "trace_annotations")
+ span_annotations_info = _get_postgres_table_info(conn, "span_annotations")
+ document_annotations_info = _get_postgres_table_info(conn, "document_annotations")
+ else:
+ assert_never(_db_backend)
+
+ # get the trace annotation from llm
+ trace_annotation_from_llm = conn.execute(
+ text(
+ """
+ SELECT identifier, source, user_id
+ FROM trace_annotations
+ WHERE id = :id
+ """
+ ),
+ {"id": trace_annotation_from_llm_id},
+ ).first()
+ assert trace_annotation_from_llm is not None
+ (identifier, source, user_id) = trace_annotation_from_llm
+ assert identifier == ""
+ assert source == "API"
+ assert user_id is None
+
+ # get the trace annotation from human
+ trace_annotation_from_human = conn.execute(
+ text(
+ """
+ SELECT identifier, source, user_id
+ FROM trace_annotations
+ WHERE id = :id
+ """
+ ),
+ {"id": trace_annotation_from_human_id},
+ ).first()
+ assert trace_annotation_from_human is not None
+ (identifier, source, user_id) = trace_annotation_from_human
+ assert identifier == ""
+ assert source == "APP"
+ assert user_id is None
+
+ # get the span annotation from llm
+ span_annotation_from_llm = conn.execute(
+ text(
+ """
+ SELECT identifier, source, user_id
+ FROM span_annotations
+ WHERE id = :id
+ """
+ ),
+ {"id": span_annotation_from_llm_id},
+ ).first()
+ assert span_annotation_from_llm is not None
+ (identifier, source, user_id) = span_annotation_from_llm
+ assert identifier == ""
+ assert source == "API"
+ assert user_id is None
+
+ # get the span annotation from human
+ span_annotation_from_human = conn.execute(
+ text(
+ """
+ SELECT identifier, source, user_id
+ FROM span_annotations
+ WHERE id = :id
+ """
+ ),
+ {"id": span_annotation_from_human_id},
+ ).first()
+ assert span_annotation_from_human is not None
+ (identifier, source, user_id) = span_annotation_from_human
+ assert identifier == ""
+ assert source == "APP"
+ assert user_id is None
+
+ # get the document annotation from llm
+ document_annotation_from_llm = conn.execute(
+ text(
+ """
+ SELECT identifier, source, user_id
+ FROM document_annotations
+ WHERE id = :id
+ """
+ ),
+ {"id": document_annotation_from_llm_id},
+ ).first()
+ assert document_annotation_from_llm is not None
+ (identifier, source, user_id) = document_annotation_from_llm
+ assert identifier == ""
+ assert source == "API"
+ assert user_id is None
+
+ # get the document annotation from human
+ document_annotation_from_human = conn.execute(
+ text(
+ """
+ SELECT identifier, source, user_id
+ FROM document_annotations
+ WHERE id = :id
+ """
+ ),
+ {"id": document_annotation_from_human_id},
+ ).first()
+ assert document_annotation_from_human is not None
+ (identifier, source, user_id) = document_annotation_from_human
+ assert identifier == ""
+ assert source == "APP"
+ assert user_id is None
+
+ with _engine.connect() as conn:
+ # verify source is non-nullable for trace annotations
+ with pytest.raises(Exception) as exc_info:
+ _create_trace_annotation_post_migration(
+ conn=conn,
+ trace_rowid=trace_rowid,
+ name=f"trace-annotation-name-{iteration_index}",
+ label="trace-annotation-label",
+ score=1.23,
+ explanation="trace-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ identifier="",
+ user_id=None,
+ source=None, # type: ignore
+ )
+ error_message = str(exc_info.value).lower()
+ assert (
+ "not null" in error_message
+ or "not-null" in error_message
+ or "notnull" in error_message
+ )
+ assert "source" in error_message
+
+ with _engine.connect() as conn:
+ # verify source is non-nullable for span annotations
+ with pytest.raises(Exception) as exc_info:
+ _create_span_annotation_post_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ name=f"span-annotation-name-{iteration_index}",
+ label="span-annotation-label",
+ score=1.23,
+ explanation="span-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ identifier="",
+ user_id=None,
+ source=None, # type: ignore
+ )
+ error_message = str(exc_info.value).lower()
+ assert (
+ "not null" in error_message
+ or "not-null" in error_message
+ or "notnull" in error_message
+ )
+ assert "source" in error_message
+
+ with _engine.connect() as conn:
+ # verify source is non-nullable for document annotations
+ with pytest.raises(Exception) as exc_info:
+ _create_document_annotation_post_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ document_position=4,
+ name=f"document-annotation-name-{iteration_index}",
+ label="document-annotation-label",
+ score=1.23,
+ explanation="document-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ identifier="",
+ user_id=None,
+ source=None, # type: ignore
+ )
+ error_message = str(exc_info.value).lower()
+ assert (
+ "not null" in error_message
+ or "not-null" in error_message
+ or "notnull" in error_message
+ )
+ assert "source" in error_message
+
+ with _engine.connect() as conn:
+ # verify identifier is non-nullable for trace annotations
+ with pytest.raises(Exception) as exc_info:
+ _create_trace_annotation_post_migration(
+ conn=conn,
+ trace_rowid=trace_rowid,
+ name=f"trace-annotation-name-{iteration_index}",
+ label="trace-annotation-label",
+ score=1.23,
+ explanation="trace-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ identifier=None, # type: ignore
+ user_id=None,
+ source="API",
+ )
+ error_message = str(exc_info.value).lower()
+ assert (
+ "not null" in error_message
+ or "not-null" in error_message
+ or "notnull" in error_message
+ )
+ assert "identifier" in error_message
+
+ with _engine.connect() as conn:
+ # verify identifier is non-nullable for span annotations
+ with pytest.raises(Exception) as exc_info:
+ _create_span_annotation_post_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ name=f"span-annotation-name-{iteration_index}",
+ label="span-annotation-label",
+ score=1.23,
+ explanation="span-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ identifier=None, # type: ignore
+ user_id=None,
+ source="API",
+ )
+ error_message = str(exc_info.value).lower()
+ assert (
+ "not null" in error_message
+ or "not-null" in error_message
+ or "notnull" in error_message
+ )
+ assert "identifier" in error_message
+
+ with _engine.connect() as conn:
+ # verify identifier is non-nullable for document annotations
+ with pytest.raises(Exception) as exc_info:
+ _create_document_annotation_post_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ document_position=4,
+ name=f"document-annotation-name-{iteration_index}",
+ label="document-annotation-label",
+ score=1.23,
+ explanation="document-annotation-explanation",
+ metadata='{"foo": "bar"}',
+ annotator_kind="CODE",
+ identifier=None, # type: ignore
+ user_id=None,
+ source="API",
+ )
+ error_message = str(exc_info.value).lower()
+ assert (
+ "not null" in error_message
+ or "not-null" in error_message
+ or "notnull" in error_message
+ )
+ assert "identifier" in error_message
+
+ with _engine.connect() as conn:
+ # verify that after migration, 'CODE' is allowed
+ trace_annotation_from_code_id = _create_trace_annotation_post_migration(
+ conn=conn,
+ trace_rowid=trace_rowid,
+ name=f"trace-annotation-name-2-{iteration_index}",
+ label="trace-annotation-label-2",
+ score=2.34,
+ explanation="trace-annotation-explanation",
+ metadata='{"foo": "baz"}',
+ annotator_kind="CODE",
+ user_id=None,
+ identifier="id1",
+ source="API",
+ )
+ conn.commit()
+
+ # verify CODE annotator kind for span annotations
+ span_annotation_from_code_id = _create_span_annotation_post_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ name=f"span-annotation-name-2-{iteration_index}",
+ label="span-annotation-label-2",
+ score=2.34,
+ explanation="span-annotation-explanation",
+ metadata='{"foo": "baz"}',
+ annotator_kind="CODE",
+ user_id=None,
+ identifier="id2",
+ source="API",
+ )
+ conn.commit()
+
+ # verify CODE annotator kind for document annotations
+ document_annotation_from_code_id = _create_document_annotation_post_migration(
+ conn=conn,
+ span_rowid=span_rowid,
+ document_position=3,
+ name=f"document-annotation-name-2-{iteration_index}",
+ label="document-annotation-label-2",
+ score=2.34,
+ explanation="document-annotation-explanation",
+ metadata='{"foo": "baz"}',
+ annotator_kind="CODE",
+ user_id=None,
+ identifier="id3",
+ source="API",
+ )
+ conn.commit()
+
+ # delete the annotations with CODE annotator kind because they will break the down migration # noqa: E501
+ conn.execute(
+ text("DELETE FROM trace_annotations WHERE id = :id"),
+ {"id": trace_annotation_from_code_id},
+ )
+ conn.execute(
+ text("DELETE FROM span_annotations WHERE id = :id"),
+ {"id": span_annotation_from_code_id},
+ )
+ conn.execute(
+ text("DELETE FROM document_annotations WHERE id = :id"),
+ {"id": document_annotation_from_code_id},
+ )
+ conn.commit()
+
+ _down(_engine, _alembic_config, "bc8fea3c2bc8")
+
+
+def _create_trace_annotation_pre_migration(
+ conn: Connection,
+ trace_rowid: int,
+ name: str,
+ label: str,
+ score: float,
+ explanation: str,
+ metadata: str,
+ annotator_kind: str,
+) -> int:
+ id = conn.execute(
+ text(
+ """
+ INSERT INTO trace_annotations (
+ trace_rowid, name, label, score, explanation,
+ metadata, annotator_kind
+ )
+ VALUES (
+ :trace_rowid, :name, :label, :score, :explanation,
+ :metadata, :annotator_kind
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "trace_rowid": trace_rowid,
+ "name": name,
+ "label": label,
+ "score": score,
+ "explanation": explanation,
+ "metadata": metadata,
+ "annotator_kind": annotator_kind,
+ },
+ ).scalar()
+ assert isinstance(id, int)
+ return id
+
+
+def _create_span_annotation_pre_migration(
+ conn: Connection,
+ span_rowid: int,
+ name: str,
+ label: str,
+ score: float,
+ explanation: str,
+ metadata: str,
+ annotator_kind: str,
+) -> int:
+ id = conn.execute(
+ text(
+ """
+ INSERT INTO span_annotations (
+ span_rowid, name, label, score, explanation,
+ metadata, annotator_kind
+ )
+ VALUES (
+ :span_rowid, :name, :label, :score, :explanation,
+ :metadata, :annotator_kind
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "span_rowid": span_rowid,
+ "name": name,
+ "label": label,
+ "score": score,
+ "explanation": explanation,
+ "metadata": metadata,
+ "annotator_kind": annotator_kind,
+ },
+ ).scalar()
+ assert isinstance(id, int)
+ return id
+
+
+def _create_document_annotation_pre_migration(
+ conn: Connection,
+ span_rowid: int,
+ document_position: int,
+ name: str,
+ label: str,
+ score: float,
+ explanation: str,
+ metadata: str,
+ annotator_kind: str,
+) -> int:
+ id = conn.execute(
+ text(
+ """
+ INSERT INTO document_annotations (
+ span_rowid, document_position, name, label, score, explanation,
+ metadata, annotator_kind
+ )
+ VALUES (
+ :span_rowid, :document_position, :name, :label, :score, :explanation,
+ :metadata, :annotator_kind
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "span_rowid": span_rowid,
+ "document_position": document_position,
+ "name": name,
+ "label": label,
+ "score": score,
+ "explanation": explanation,
+ "metadata": metadata,
+ "annotator_kind": annotator_kind,
+ },
+ ).scalar()
+ assert isinstance(id, int)
+ return id
+
+
+def _create_trace_annotation_post_migration(
+ conn: Connection,
+ trace_rowid: int,
+ name: str,
+ label: str,
+ score: float,
+ explanation: str,
+ metadata: str,
+ annotator_kind: str,
+ user_id: Any,
+ identifier: str,
+ source: str,
+) -> int:
+ id = conn.execute(
+ text(
+ """
+ INSERT INTO trace_annotations (
+ trace_rowid, name, label, score, explanation,
+ metadata, annotator_kind, user_id, identifier, source
+ )
+ VALUES (
+ :trace_rowid, :name, :label, :score, :explanation,
+ :metadata, :annotator_kind, :user_id, :identifier, :source
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "trace_rowid": trace_rowid,
+ "name": name,
+ "label": label,
+ "score": score,
+ "explanation": explanation,
+ "metadata": metadata,
+ "annotator_kind": annotator_kind,
+ "user_id": user_id,
+ "identifier": identifier,
+ "source": source,
+ },
+ ).scalar()
+ assert isinstance(id, int)
+ return id
+
+
+def _create_span_annotation_post_migration(
+ conn: Connection,
+ span_rowid: int,
+ name: str,
+ label: str,
+ score: float,
+ explanation: str,
+ metadata: str,
+ annotator_kind: str,
+ user_id: Any,
+ identifier: str,
+ source: str,
+) -> int:
+ id = conn.execute(
+ text(
+ """
+ INSERT INTO span_annotations (
+ span_rowid, name, label, score, explanation,
+ metadata, annotator_kind, user_id, identifier, source
+ )
+ VALUES (
+ :span_rowid, :name, :label, :score, :explanation,
+ :metadata, :annotator_kind, :user_id, :identifier, :source
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "span_rowid": span_rowid,
+ "name": name,
+ "label": label,
+ "score": score,
+ "explanation": explanation,
+ "metadata": metadata,
+ "annotator_kind": annotator_kind,
+ "user_id": user_id,
+ "identifier": identifier,
+ "source": source,
+ },
+ ).scalar()
+ assert isinstance(id, int)
+ return id
+
+
+def _create_document_annotation_post_migration(
+ conn: Connection,
+ span_rowid: int,
+ document_position: int,
+ name: str,
+ label: str,
+ score: float,
+ explanation: str,
+ metadata: str,
+ annotator_kind: str,
+ user_id: Any,
+ identifier: str,
+ source: str,
+) -> int:
+ id = conn.execute(
+ text(
+ """
+ INSERT INTO document_annotations (
+ span_rowid, document_position, name, label, score, explanation,
+ metadata, annotator_kind, user_id, identifier, source
+ )
+ VALUES (
+ :span_rowid, :document_position, :name, :label, :score, :explanation,
+ :metadata, :annotator_kind, :user_id, :identifier, :source
+ )
+ RETURNING id
+ """
+ ),
+ {
+ "span_rowid": span_rowid,
+ "document_position": document_position,
+ "name": name,
+ "label": label,
+ "score": score,
+ "explanation": explanation,
+ "metadata": metadata,
+ "annotator_kind": annotator_kind,
+ "user_id": user_id,
+ "identifier": identifier,
+ "source": source,
+ },
+ ).scalar()
+ assert isinstance(id, int)
+ return id
+
+
+def _get_sqlite_table_info(conn: Connection, table_name: str) -> str:
+ table_info = conn.execute(
+ text(
+ """
+ SELECT sql FROM sqlite_master
+ WHERE type='table' AND name=:table_name;
+ """
+ ),
+ {"table_name": table_name},
+ ).scalar()
+ assert isinstance(table_info, str)
+ return table_info
+
+
+def _get_postgres_table_info(conn: Connection, table_name: str) -> dict[str, Any]:
+ table_info = conn.execute(
+ text(
+ """
+ SELECT json_build_object(
+ 'table_name', t.table_name,
+ 'columns', (
+ SELECT json_object_agg(
+ c.column_name,
+ json_build_object(
+ 'data_type', c.data_type,
+ 'is_nullable', c.is_nullable,
+ 'ordinal_position', c.ordinal_position
+ )
+ )
+ FROM information_schema.columns c
+ WHERE c.table_name = :table_name
+ AND c.table_schema = current_schema()
+ ),
+ 'constraints', (
+ SELECT json_object_agg(
+ tc.constraint_name,
+ json_build_object(
+ 'constraint_type', tc.constraint_type,
+ 'column_names', (
+ SELECT json_agg(kcu.column_name ORDER BY kcu.position_in_unique_constraint NULLS FIRST, kcu.ordinal_position)
+ FROM information_schema.key_column_usage kcu
+ WHERE tc.constraint_name = kcu.constraint_name
+ AND tc.table_schema = kcu.table_schema
+ AND tc.table_name = kcu.table_name
+ )
+ )
+ )
+ FROM information_schema.table_constraints tc
+ WHERE tc.table_name = :table_name
+ AND tc.table_schema = current_schema()
+ )
+ ) AS table_structure
+ FROM information_schema.tables t
+ WHERE t.table_name = :table_name
+ AND t.table_schema = current_schema()
+ LIMIT 1;
+ """ # noqa: E501
+ ),
+ {"table_name": table_name},
+ ).scalar()
+ assert isinstance(table_info, dict)
+ return table_info
diff --git a/tests/integration/db_migrations/test_up_and_down_migrations.py b/tests/integration/db_migrations/test_up_and_down_migrations.py
index 5bea25777d..9879cea3c9 100644
--- a/tests/integration/db_migrations/test_up_and_down_migrations.py
+++ b/tests/integration/db_migrations/test_up_and_down_migrations.py
@@ -293,3 +293,18 @@ def test_up_and_down_migrations(
_up(_engine, _alembic_config, "bc8fea3c2bc8")
_down(_engine, _alembic_config, "4ded9e43755f")
_up(_engine, _alembic_config, "bc8fea3c2bc8")
+
+ for _ in range(2):
+ _up(_engine, _alembic_config, "2f9d1a65945f")
+ _down(_engine, _alembic_config, "bc8fea3c2bc8")
+ _up(_engine, _alembic_config, "2f9d1a65945f")
+
+ for _ in range(2):
+ _up(_engine, _alembic_config, "bb8139330879")
+ _down(_engine, _alembic_config, "2f9d1a65945f")
+ _up(_engine, _alembic_config, "bb8139330879")
+
+ for _ in range(2):
+ _up(_engine, _alembic_config, "8a3764fe7f1a")
+ _down(_engine, _alembic_config, "bb8139330879")
+ _up(_engine, _alembic_config, "8a3764fe7f1a")
diff --git a/tests/integration/projects/conftest.py b/tests/integration/projects/conftest.py
deleted file mode 100644
index bbebde91d7..0000000000
--- a/tests/integration/projects/conftest.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import os
-import secrets
-from collections.abc import Iterator
-from contextlib import ExitStack
-from typing import Any
-from unittest import mock
-
-import pytest
-from faker import Faker
-from phoenix.auth import DEFAULT_SECRET_LENGTH
-from phoenix.config import (
- ENV_PHOENIX_DISABLE_RATE_LIMIT,
- ENV_PHOENIX_ENABLE_AUTH,
- ENV_PHOENIX_SECRET,
-)
-
-from .._helpers import _Secret, _server
-
-
-@pytest.fixture(scope="module")
-def _secret(
- _env_phoenix_sql_database_url: Any,
-) -> _Secret:
- return secrets.token_hex(DEFAULT_SECRET_LENGTH)
-
-
-@pytest.fixture(autouse=True, scope="module")
-def _app(
- _ports: Iterator[int],
- _secret: _Secret,
- _env_phoenix_sql_database_url: Any,
- _fake: Faker,
-) -> Iterator[None]:
- values = (
- (ENV_PHOENIX_ENABLE_AUTH, "true"),
- (ENV_PHOENIX_DISABLE_RATE_LIMIT, "true"),
- (ENV_PHOENIX_SECRET, _secret),
- )
- with ExitStack() as stack:
- stack.enter_context(mock.patch.dict(os.environ, values))
- stack.enter_context(_server())
- yield
diff --git a/tests/integration/prompts/conftest.py b/tests/integration/prompts/conftest.py
deleted file mode 100644
index 5e1cfabe0d..0000000000
--- a/tests/integration/prompts/conftest.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import os
-import secrets
-from collections.abc import Iterator
-from contextlib import ExitStack
-from typing import Any
-from unittest import mock
-
-import pytest
-from faker import Faker
-from phoenix.auth import DEFAULT_SECRET_LENGTH
-from phoenix.config import (
- ENV_PHOENIX_ADMIN_SECRET,
- ENV_PHOENIX_DISABLE_RATE_LIMIT,
- ENV_PHOENIX_ENABLE_AUTH,
- ENV_PHOENIX_SECRET,
-)
-
-from .._helpers import _Secret, _server
-
-
-@pytest.fixture(scope="module")
-def _secret(
- _env_phoenix_sql_database_url: Any,
-) -> _Secret:
- return secrets.token_hex(DEFAULT_SECRET_LENGTH)
-
-
-@pytest.fixture(autouse=True, scope="module")
-def _app(
- _ports: Iterator[int],
- _secret: _Secret,
- _env_phoenix_sql_database_url: Any,
- _fake: Faker,
-) -> Iterator[None]:
- values = (
- (ENV_PHOENIX_ENABLE_AUTH, "true"),
- (ENV_PHOENIX_DISABLE_RATE_LIMIT, "true"),
- (ENV_PHOENIX_SECRET, _secret),
- (ENV_PHOENIX_ADMIN_SECRET, secrets.token_hex(DEFAULT_SECRET_LENGTH)),
- )
- with ExitStack() as stack:
- stack.enter_context(mock.patch.dict(os.environ, values))
- stack.enter_context(_server())
- yield
diff --git a/tests/unit/db/insertion/test_helpers.py b/tests/unit/db/insertion/test_helpers.py
index cde9ebbe89..c2be25655d 100644
--- a/tests/unit/db/insertion/test_helpers.py
+++ b/tests/unit/db/insertion/test_helpers.py
@@ -1,5 +1,4 @@
from asyncio import sleep
-from datetime import datetime
import pytest
from sqlalchemy import insert, select
@@ -68,109 +67,37 @@ async def test_handles_conflicts_in_expected_manner(
db: DbSessionFactory,
) -> None:
async with db() as session:
- project_rowid = await session.scalar(
- insert(models.Project).values(dict(name="abc")).returning(models.Project.id)
+ project_id = await session.scalar(
+ insert(models.Project)
+ .values(dict(name="abc", description="initial description"))
+ .returning(models.Project.id)
)
- trace_rowid = await session.scalar(
- insert(models.Trace)
- .values(
- dict(
- project_rowid=project_rowid,
- trace_id="xyz",
- start_time=datetime.now(),
- end_time=datetime.now(),
- )
- )
- .returning(models.Trace.id)
- )
- record = await session.scalar(
- insert(models.TraceAnnotation)
- .values(
- dict(
- name="uvw",
- trace_rowid=trace_rowid,
- annotator_kind="LLM",
- score=12,
- label="ijk",
- metadata_={"1": "2"},
- )
- )
- .returning(models.TraceAnnotation)
+ project_record = await session.scalar(
+ select(models.Project).where(models.Project.id == project_id)
)
- anno = await session.scalar(
- select(models.TraceAnnotation)
- .where(models.TraceAnnotation.trace_rowid == trace_rowid)
- .order_by(models.TraceAnnotation.created_at)
- )
- assert anno is not None
- assert record is not None
- assert anno.id == record.id
- assert anno.created_at == record.created_at
- assert anno.name == record.name
- assert anno.trace_rowid == record.trace_rowid
- assert anno.updated_at == record.updated_at
- assert anno.score == record.score
- assert anno.label == record.label
- assert anno.explanation == record.explanation
- assert anno.metadata_ == record.metadata_
-
- await sleep(1) # increment `updated_at` by 1 second
+ assert project_record is not None
async with db() as session:
dialect = SupportedSQLDialect(session.bind.dialect.name)
+ new_values = dict(name="abc", description="updated description")
+ await sleep(1)
await session.execute(
insert_on_conflict(
- dict(
- name="uvw",
- trace_rowid=trace_rowid,
- annotator_kind="LLM",
- score=None,
- metadata_={},
- ),
- dict(
- name="rst",
- trace_rowid=trace_rowid,
- annotator_kind="LLM",
- score=12,
- metadata_={"1": "2"},
- ),
- dict(
- name="uvw",
- trace_rowid=trace_rowid,
- annotator_kind="HUMAN",
- score=21,
- metadata_={"2": "1"},
- ),
+ new_values,
dialect=dialect,
- table=models.TraceAnnotation,
- unique_by=("name", "trace_rowid"),
+ table=models.Project,
+ unique_by=("name",),
on_conflict=on_conflict,
)
)
- annos = list(
- await session.scalars(
- select(models.TraceAnnotation)
- .where(models.TraceAnnotation.trace_rowid == trace_rowid)
- .order_by(models.TraceAnnotation.created_at)
- )
+ updated_project = await session.scalar(
+ select(models.Project).where(models.Project.id == project_id)
)
- assert len(annos) == 2
- anno = annos[0]
- assert anno.id == record.id
- assert anno.created_at == record.created_at
- assert anno.name == record.name
- assert anno.trace_rowid == record.trace_rowid
+ assert updated_project is not None
+
if on_conflict is OnConflict.DO_NOTHING:
- assert anno.updated_at == record.updated_at
- assert anno.annotator_kind == record.annotator_kind
- assert anno.score == record.score
- assert anno.label == record.label
- assert anno.explanation == record.explanation
- assert anno.metadata_ == record.metadata_
+ assert updated_project.description == "initial description"
+ assert updated_project.updated_at == project_record.updated_at
else:
- assert anno.updated_at > record.updated_at
- assert anno.annotator_kind != record.annotator_kind
- assert anno.score == 21
- assert anno.label is None
- assert anno.explanation is None
- assert anno.metadata_ == {"2": "1"}
+ assert updated_project.description == "updated description"
+ assert updated_project.updated_at > project_record.updated_at
diff --git a/tests/unit/db/test_facilitator.py b/tests/unit/db/test_facilitator.py
index 98d3f1e87f..6f6793cbea 100644
--- a/tests/unit/db/test_facilitator.py
+++ b/tests/unit/db/test_facilitator.py
@@ -1,13 +1,23 @@
-from secrets import token_bytes
+from secrets import token_bytes, token_hex
import pytest
+import sqlalchemy as sa
from _pytest.monkeypatch import MonkeyPatch
from sqlalchemy import select
from phoenix.config import ENV_PHOENIX_ADMINS
from phoenix.db import models
from phoenix.db.enums import UserRole
-from phoenix.db.facilitator import _ensure_admins, _ensure_enums
+from phoenix.db.facilitator import (
+ _ensure_admins,
+ _ensure_default_project_trace_retention_policy,
+ _ensure_enums,
+)
+from phoenix.db.types.trace_retention import (
+ MaxDaysRule,
+ TraceRetentionCronExpression,
+ TraceRetentionRule,
+)
from phoenix.server.types import DbSessionFactory
@@ -101,3 +111,38 @@ async def test_ensure_startup_admins(
assert user.username == "Franklin, Benjamin"
assert user.user_role_id == admin_role_id
assert user.reset_password
+
+
+class TestEnsureDefaultProjectTraceRetentionPolicy:
+ async def test_default_project_trace_retention_policy_insertion(
+ self,
+ db: DbSessionFactory,
+ ) -> None:
+ stmt = sa.select(models.ProjectTraceRetentionPolicy)
+ async with db() as session:
+ policies = list(await session.scalars(stmt))
+ assert len(policies) == 0
+ for _ in range(2):
+ await _ensure_default_project_trace_retention_policy(db)
+ async with db() as session:
+ policies = list(await session.scalars(stmt))
+ assert len(policies) == 1
+ policy = policies[0]
+ assert policy.id == 0
+ assert policy.name == "Default"
+ assert policy.cron_expression.root == "0 0 * * 0"
+ assert policy.rule.root == MaxDaysRule(max_days=0)
+ assert not bool(policy.rule) # rule is dormant by default
+
+ # Should be able to insert new policies without error. This could be an issue for postgres
+ # if the default policy is inserted at id=1 without incrementing the serial so the next
+ # insert would have id=1 and fail.
+ policy = models.ProjectTraceRetentionPolicy(
+ name=token_hex(8),
+ cron_expression=TraceRetentionCronExpression(root="0 0 * * 0"),
+ rule=TraceRetentionRule(root=MaxDaysRule(max_days=0)),
+ )
+ async with db() as session:
+ session.add(policy)
+ await session.flush()
+ assert policy.id == 1
diff --git a/tests/unit/db/types/test_annotation_configs.py b/tests/unit/db/types/test_annotation_configs.py
new file mode 100644
index 0000000000..344e9bcdc6
--- /dev/null
+++ b/tests/unit/db/types/test_annotation_configs.py
@@ -0,0 +1,80 @@
+from contextlib import nullcontext
+from typing import Any, ContextManager
+
+import pytest
+
+from phoenix.db.types.annotation_configs import (
+ AnnotationType,
+ CategoricalAnnotationConfig,
+ CategoricalAnnotationValue,
+ ContinuousAnnotationConfig,
+ OptimizationDirection,
+)
+
+
+@pytest.mark.parametrize(
+ "values, expectation",
+ (
+ pytest.param(
+ [
+ CategoricalAnnotationValue(label="A", score=1.0),
+ ],
+ nullcontext(),
+ id="valid-values-pass-validation",
+ ),
+ pytest.param(
+ [],
+ pytest.raises(ValueError, match="Values must be non-empty"),
+ id="empty-values-raise-validation-error",
+ ),
+ pytest.param(
+ [
+ CategoricalAnnotationValue(label="A", score=1.0),
+ CategoricalAnnotationValue(label="A", score=2.0),
+ ],
+ pytest.raises(
+ ValueError,
+ match='Values for categorical annotation config has duplicate label: "A"',
+ ),
+ id="duplicate-labels-raise-validation-error",
+ ),
+ ),
+)
+def test_categorical_annotation_config_correctly_validates_values(
+ values: list[CategoricalAnnotationValue],
+ expectation: ContextManager[Any],
+) -> None:
+ with expectation:
+ CategoricalAnnotationConfig(
+ type=AnnotationType.CATEGORICAL.value,
+ values=values,
+ optimization_direction=OptimizationDirection.MAXIMIZE,
+ )
+
+
+def test_cannot_create_categorical_annotation_config_with_empty_label() -> None:
+ with pytest.raises(ValueError, match="Label must be non-empty"):
+ CategoricalAnnotationConfig(
+ type=AnnotationType.CATEGORICAL.value,
+ values=[CategoricalAnnotationValue(label="", score=1.0)],
+ optimization_direction=OptimizationDirection.MAXIMIZE,
+ )
+
+
+@pytest.mark.parametrize(
+ ("lower_bound", "upper_bound"),
+ [
+ pytest.param(1.0, 0.0, id="lower-bound-greater-than-upper-bound"),
+ pytest.param(1.0, 1.0, id="lower-bound-equals-upper-bound"),
+ ],
+)
+def test_cannot_create_continuous_annotation_config_with_invalid_bounds(
+ lower_bound: float, upper_bound: float
+) -> None:
+ with pytest.raises(ValueError, match="Lower bound must be strictly less than upper bound"):
+ ContinuousAnnotationConfig(
+ type=AnnotationType.CONTINUOUS.value,
+ lower_bound=lower_bound,
+ upper_bound=upper_bound,
+ optimization_direction=OptimizationDirection.MAXIMIZE,
+ )
diff --git a/tests/unit/db/types/test_trace_retention.py b/tests/unit/db/types/test_trace_retention.py
new file mode 100644
index 0000000000..5ed8f664bf
--- /dev/null
+++ b/tests/unit/db/types/test_trace_retention.py
@@ -0,0 +1,440 @@
+from collections import defaultdict
+from contextlib import nullcontext
+from datetime import datetime, timedelta, timezone
+from secrets import token_hex
+from typing import Any, Dict, Type, Union
+
+import pytest
+import sqlalchemy as sa
+from freezegun import freeze_time
+from pydantic import ValidationError
+
+from phoenix.db import models
+from phoenix.db.types.trace_retention import (
+ MaxCountRule,
+ MaxDaysOrCountRule,
+ MaxDaysRule,
+ TraceRetentionRule,
+ _MaxCount,
+ _MaxDays,
+ _time_of_next_run,
+)
+from phoenix.server.types import DbSessionFactory
+
+
+class TestMaxDaysMixin:
+ @pytest.mark.parametrize(
+ "max_days,is_valid",
+ [
+ pytest.param(0, True, id="zero_days"),
+ pytest.param(0.5, True, id="half_days"),
+ pytest.param(-10, False, id="negative_days"),
+ ],
+ )
+ def test_init(self, max_days: float, is_valid: bool) -> None:
+ """Test that _MaxDays fails with invalid inputs."""
+ with nullcontext() if is_valid else pytest.raises(ValidationError):
+ _MaxDays(max_days=max_days)
+
+ @pytest.mark.parametrize(
+ "max_days,expected",
+ [
+ pytest.param(0, "false", id="zero_days"),
+ pytest.param(0.5, "traces.start_time < '2023-01-15 00:00:00+00:00'", id="half_days"),
+ ],
+ )
+ def test_filter(self, max_days: int, expected: str) -> None:
+ """Test that max_days_filter generates correct SQL query."""
+ rule: _MaxDays = _MaxDays(max_days=max_days)
+ with freeze_time("2023-01-15 12:00:00", tz_offset=0):
+ actual = str(rule.max_days_filter.compile(compile_kwargs={"literal_binds": True}))
+ assert actual == expected
+
+
+class TestMaxCountMixin:
+ @pytest.mark.parametrize(
+ "max_count,is_valid",
+ [
+ pytest.param(0, True, id="zero_count"),
+ pytest.param(10, True, id="ten_count"),
+ pytest.param(0.5, False, id="float_count"),
+ pytest.param(-10, False, id="negative_count"),
+ ],
+ )
+ def test_init(self, max_count: int, is_valid: bool) -> None:
+ """Test that _MaxCount fails with invalid inputs."""
+ with nullcontext() if is_valid else pytest.raises(ValidationError):
+ _MaxCount(max_count=max_count)
+
+ @pytest.mark.parametrize(
+ "max_count,expected",
+ [
+ pytest.param(0, "false", id="zero_count"),
+ pytest.param(
+ 10,
+ "traces.start_time < (SELECT traces.start_time FROM traces "
+ "ORDER BY traces.start_time DESC LIMIT 1 OFFSET 9)",
+ id="ten_count",
+ ),
+ ],
+ )
+ def test_filter(self, max_count: int, expected: str) -> None:
+ """Test that max_count_filter generates correct SQL query."""
+ rule: _MaxCount = _MaxCount(max_count=max_count)
+ actual = str(rule.max_count_filter.compile(compile_kwargs={"literal_binds": True}))
+ actual = " ".join(actual.split())
+ assert actual == expected
+
+
+class TestTraceRetentionRuleMaxDays:
+ async def test_delete_traces(self, db: DbSessionFactory) -> None:
+ projects: defaultdict[int, list[int]] = defaultdict(list)
+ start_time = datetime.now(timezone.utc)
+ async with db() as session:
+ for _ in range(5):
+ project = models.Project(name=token_hex(8))
+ session.add(project)
+ await session.flush()
+ for i in range(5):
+ trace = models.Trace(
+ project_rowid=project.id,
+ trace_id=token_hex(16),
+ start_time=start_time - timedelta(days=i),
+ end_time=datetime.now(timezone.utc),
+ )
+ session.add(trace)
+ await session.flush()
+ projects[project.id].append(trace.id)
+ rule = MaxDaysRule(max_days=1)
+ async with db() as session:
+ await rule.delete_traces(session, projects.keys())
+ async with db() as session:
+ remaining_traces = await session.scalars(
+ sa.select(models.Trace.id).where(models.Trace.project_rowid.in_(projects.keys()))
+ )
+ # only one trace remains per project
+ assert sorted(remaining_traces.all()) == sorted(traces[0] for traces in projects.values())
+
+
+class TestTraceRetentionRuleMaxCount:
+ async def test_delete_traces(self, db: DbSessionFactory) -> None:
+ projects: defaultdict[int, list[int]] = defaultdict(list)
+ start_time = datetime.now(timezone.utc)
+ async with db() as session:
+ for _ in range(5):
+ project = models.Project(name=token_hex(8))
+ session.add(project)
+ await session.flush()
+ for i in range(5):
+ trace = models.Trace(
+ project_rowid=project.id,
+ trace_id=token_hex(16),
+ start_time=start_time - timedelta(days=i),
+ end_time=datetime.now(timezone.utc),
+ )
+ session.add(trace)
+ await session.flush()
+ projects[project.id].append(trace.id)
+ rule = MaxCountRule(max_count=1)
+ async with db() as session:
+ await rule.delete_traces(session, projects.keys())
+ async with db() as session:
+ remaining_traces = await session.scalars(
+ sa.select(models.Trace.id).where(models.Trace.project_rowid.in_(projects.keys()))
+ )
+ # only one trace remains per project
+ assert sorted(remaining_traces.all()) == sorted(traces[0] for traces in projects.values())
+
+
+class TestTraceRetentionRule:
+ @pytest.mark.parametrize(
+ "rule_data,expected_type",
+ [
+ pytest.param(
+ {"type": "max_days", "max_days": 30}, MaxDaysRule, id="max_days_serialization"
+ ),
+ pytest.param(
+ {"type": "max_count", "max_count": 100}, MaxCountRule, id="max_count_serialization"
+ ),
+ pytest.param(
+ {"type": "max_days_or_count", "max_days": 30, "max_count": 100},
+ MaxDaysOrCountRule,
+ id="max_days_or_count_serialization",
+ ),
+ ],
+ )
+ def test_discriminated_union_serialization_deserialization(
+ self,
+ rule_data: Dict[str, Any],
+ expected_type: Type[Union[MaxDaysRule, MaxCountRule, MaxDaysOrCountRule]],
+ ) -> None:
+ """Test that rules can be serialized and deserialized correctly."""
+ rule: TraceRetentionRule = TraceRetentionRule.model_validate(rule_data)
+ assert isinstance(rule.root, expected_type)
+ assert rule.model_dump() == rule_data
+
+
+@pytest.mark.parametrize(
+ "cron_expression, frozen_time, expected_time, comment",
+ [
+ pytest.param(
+ "0 * * * *",
+ "2023-01-01 14:30:00+00:00",
+ "2023-01-01 15:00:00+00:00",
+ "Every hour - next hour when current time has minutes > 0",
+ id="every-hour-next",
+ ),
+ pytest.param(
+ "0 * * * *",
+ "2023-01-01 14:00:00+00:00",
+ "2023-01-01 15:00:00+00:00",
+ "Every hour - next hour when current time is exactly at the hour",
+ id="every-hour-exact",
+ ),
+ pytest.param(
+ "0 */2 * * *",
+ "2023-01-01 13:15:00+00:00",
+ "2023-01-01 14:00:00+00:00",
+ "Every even hour with current time at odd hour",
+ id="every-even-hour",
+ ),
+ pytest.param(
+ "0 */2 * * *",
+ "2023-01-01 14:15:00+00:00",
+ "2023-01-01 16:00:00+00:00",
+ "Every even hour with current time at even hour",
+ id="every-even-hour-2",
+ ),
+ pytest.param(
+ "0 9-17 * * 1-5",
+ "2023-01-01 17:30:00+00:00", # Sunday
+ "2023-01-02 09:00:00+00:00", # Monday
+ "Business hours (9-17) on weekdays, starting from weekend",
+ id="business-hours-weekend",
+ ),
+ pytest.param(
+ "0 9-17 * * 1-5",
+ "2023-01-02 08:30:00+00:00", # Monday
+ "2023-01-02 09:00:00+00:00", # Monday
+ "Business hours (9-17) on weekdays, before business hours",
+ id="business-hours-before",
+ ),
+ pytest.param(
+ "0 9-17 * * 1-5",
+ "2023-01-02 13:30:00+00:00", # Monday
+ "2023-01-02 14:00:00+00:00", # Monday
+ "Business hours (9-17) on weekdays, during business hours",
+ id="business-hours-during",
+ ),
+ pytest.param(
+ "0 9-17 * * 1-5",
+ "2023-01-02 17:30:00+00:00", # Monday
+ "2023-01-03 09:00:00+00:00", # Tuesday
+ "Business hours (9-17) on weekdays, after business hours",
+ id="business-hours-after",
+ ),
+ pytest.param(
+ "0 0,12 * * *",
+ "2023-01-01 08:30:00+00:00",
+ "2023-01-01 12:00:00+00:00",
+ "Twice a day at midnight and noon, before noon",
+ id="twice-daily-before-noon",
+ ),
+ pytest.param(
+ "0 0,12 * * *",
+ "2023-01-01 13:30:00+00:00",
+ "2023-01-02 00:00:00+00:00",
+ "Twice a day at midnight and noon, after noon",
+ id="twice-daily-after-noon",
+ ),
+ pytest.param(
+ "0 3 * * 0",
+ "2023-01-02 13:30:00+00:00", # Monday
+ "2023-01-08 03:00:00+00:00", # Sunday
+ "3 AM on Sundays only, starting from Monday",
+ id="sunday-3am",
+ ),
+ ],
+)
+def test_time_of_next_run(
+ cron_expression: str,
+ frozen_time: str,
+ expected_time: str,
+ comment: str,
+) -> None:
+ """
+ Test the time_of_next_run function with various cron expressions.
+
+ Args:
+ cron_expression: The cron expression to test
+ frozen_time: The time to freeze at for testing
+ expected_time: The expected next run time
+ comment: Description of the test case
+ """
+ with freeze_time(frozen_time):
+ actual = _time_of_next_run(cron_expression)
+ expected = datetime.fromisoformat(expected_time)
+ assert actual == expected
+
+
+@pytest.mark.parametrize(
+ "cron_expression, expected_error_msg",
+ [
+ # Tests for invalid field count
+ pytest.param(
+ "0",
+ "Invalid cron expression. Expected 5 fields",
+ id="too-few-fields",
+ ),
+ pytest.param(
+ "0 * * *",
+ "Invalid cron expression. Expected 5 fields",
+ id="missing-one-field",
+ ),
+ pytest.param(
+ "0 * * * * *",
+ "Invalid cron expression. Expected 5 fields",
+ id="too-many-fields",
+ ),
+ # Tests for invalid minute field (must be 0)
+ pytest.param(
+ "1 * * * *",
+ "Invalid cron expression. Minute field must be '0'.",
+ id="non-zero-minute",
+ ),
+ pytest.param(
+ "*/5 * * * *",
+ "Invalid cron expression. Minute field must be '0'.",
+ id="minute-with-step",
+ ),
+ pytest.param(
+ "1-59 * * * *",
+ "Invalid cron expression. Minute field must be '0'.",
+ id="minute-range",
+ ),
+ # Tests for invalid day-of-month field (must be *)
+ pytest.param(
+ "0 * 1 * *",
+ "Invalid cron expression. Day-of-month and month fields must be '*'.",
+ id="specific-day-of-month",
+ ),
+ pytest.param(
+ "0 * 1-15 * *",
+ "Invalid cron expression. Day-of-month and month fields must be '*'.",
+ id="day-of-month-range",
+ ),
+ pytest.param(
+ "0 * */2 * *",
+ "Invalid cron expression. Day-of-month and month fields must be '*'.",
+ id="day-of-month-step",
+ ),
+ # Tests for invalid month field (must be *)
+ pytest.param(
+ "0 * * 1 *",
+ "Invalid cron expression. Day-of-month and month fields must be '*'.",
+ id="specific-month",
+ ),
+ pytest.param(
+ "0 * * 1-6 *",
+ "Invalid cron expression. Day-of-month and month fields must be '*'.",
+ id="month-range",
+ ),
+ pytest.param(
+ "0 * * */3 *",
+ "Invalid cron expression. Day-of-month and month fields must be '*'.",
+ id="month-step",
+ ),
+ # Tests for invalid value ranges
+ pytest.param(
+ "0 24 * * *",
+ "Value 24 out of range (0-23)",
+ id="hour-out-of-range-high",
+ ),
+ pytest.param(
+ "0 -1 * * *",
+ "Invalid range format: -1",
+ id="hour-negative",
+ ),
+ pytest.param(
+ "0 * * * 7",
+ "Value 7 out of range (0-6)",
+ id="day-of-week-out-of-range",
+ ),
+ pytest.param(
+ "0 * * * -1",
+ "Invalid range format: -1",
+ id="day-of-week-negative",
+ ),
+ # Tests for invalid range formats
+ pytest.param(
+ "0 5-2 * * *",
+ "Invalid range: 5-2 (start > end)",
+ id="invalid-hour-range",
+ ),
+ pytest.param(
+ "0 * * * 6-2",
+ "Invalid range: 6-2 (start > end)",
+ id="invalid-day-range",
+ ),
+ pytest.param(
+ "0 a-b * * *",
+ "Invalid range format: a-b",
+ id="non-numeric-range",
+ ),
+ # Tests for invalid step values
+ pytest.param(
+ "0 */0 * * *",
+ "Step value must be positive: 0",
+ id="zero-step",
+ ),
+ pytest.param(
+ "0 */-2 * * *",
+ "Step value must be positive: -2",
+ id="negative-step",
+ ),
+ pytest.param(
+ "0 */a * * *",
+ "Invalid step value: a",
+ id="non-numeric-step",
+ ),
+ # Tests for invalid single values
+ pytest.param(
+ "0 a * * *",
+ "Invalid value: a",
+ id="non-numeric-hour",
+ ),
+ pytest.param(
+ "0 * * * a",
+ "Invalid value: a",
+ id="non-numeric-day",
+ ),
+ # Tests for combined invalid formats
+ pytest.param(
+ "0 1-a * * *",
+ "Invalid range format: 1-a",
+ id="invalid-range-end",
+ ),
+ pytest.param(
+ "0 a-5 * * *",
+ "Invalid range format: a-5",
+ id="invalid-range-start",
+ ),
+ pytest.param(
+ "0 5-10/a * * *",
+ "Invalid step value: a",
+ id="invalid-step-in-range",
+ ),
+ ],
+)
+def test_invalid_cron_expressions(cron_expression: str, expected_error_msg: str) -> None:
+ """
+ Test that the time_of_next_run function correctly raises ValueErrors
+ for invalid cron expressions.
+
+ Args:
+ cron_expression: An invalid cron expression
+ expected_error_msg: The expected error message prefix
+ """
+ with pytest.raises(ValueError) as exc_info:
+ _time_of_next_run(cron_expression)
+ assert str(exc_info.value).startswith(expected_error_msg)
diff --git a/tests/unit/graphql.py b/tests/unit/graphql.py
index a1a2330478..7fad2810ee 100644
--- a/tests/unit/graphql.py
+++ b/tests/unit/graphql.py
@@ -14,6 +14,9 @@ class GraphQLError(Exception):
def __init__(self, message: str) -> None:
self.message = message
+ def __repr__(self) -> str:
+ return f'GraphQLError(message="{self.message}")'
+
@dataclass
class GraphQLExecutionResult:
diff --git a/tests/unit/server/api/dataloaders/conftest.py b/tests/unit/server/api/dataloaders/conftest.py
index ed48674ffd..3fa2d65549 100644
--- a/tests/unit/server/api/dataloaders/conftest.py
+++ b/tests/unit/server/api/dataloaders/conftest.py
@@ -60,6 +60,9 @@ async def data_for_testing_dataloaders(
score=random(),
metadata_={},
annotator_kind="LLM",
+ identifier="",
+ source="APP",
+ user_id=None,
)
)
for k in range(num_spans_per_trace):
@@ -106,5 +109,333 @@ async def data_for_testing_dataloaders(
score=random(),
metadata_={},
annotator_kind="LLM",
+ identifier="",
+ source="APP",
+ user_id=None,
)
)
+
+
+@pytest.fixture
+async def data_with_multiple_annotations(db: DbSessionFactory) -> None:
+ """
+ Creates one project, one trace, and three spans for testing "quality" annotations.
+
+ Span 1: two "good" annotations (scores: 0.85, 0.95) and one "bad" (0.3).
+ Span 2: one "good" (0.85) and one "bad" (0.3).
+ Span 3: one "good" (0.85).
+
+ The fixture uses fixed values for span attributes so that non-null constraints are met.
+ """
+ orig_time = datetime.fromisoformat("2021-01-01T00:00:00.000+00:00")
+ async with db() as session:
+ project_id = await session.scalar(
+ insert(models.Project).values(name="simple_multiple").returning(models.Project.id)
+ )
+ trace_id = await session.scalar(
+ insert(models.Trace)
+ .values(
+ trace_id="trace1",
+ project_rowid=project_id,
+ start_time=orig_time,
+ end_time=orig_time + timedelta(minutes=1),
+ )
+ .returning(models.Trace.id)
+ )
+ span_ids = []
+ for i in range(3):
+ span_id_val = await session.scalar(
+ insert(models.Span)
+ .values(
+ trace_rowid=trace_id,
+ span_id=f"span{i+1}",
+ name=f"span{i+1}",
+ parent_id="",
+ span_kind="UNKNOWN",
+ start_time=orig_time + timedelta(seconds=10 * i),
+ end_time=orig_time + timedelta(seconds=10 * i + 5),
+ attributes={"llm": {"token_count": {"prompt": 100, "completion": 100}}},
+ events=[], # ensure non-null list
+ status_code="OK",
+ status_message="okay",
+ cumulative_error_count=0,
+ cumulative_llm_token_count_prompt=0,
+ cumulative_llm_token_count_completion=0,
+ llm_token_count_prompt=100,
+ llm_token_count_completion=100,
+ )
+ .returning(models.Span.id)
+ )
+ span_ids.append(span_id_val)
+ # Span 1 annotations
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="quality",
+ span_rowid=span_ids[0],
+ label="good",
+ score=0.85,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_1_annotation_1",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="quality",
+ span_rowid=span_ids[0],
+ label="good",
+ score=0.95,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_1_annotation_2",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="quality",
+ span_rowid=span_ids[0],
+ label="bad",
+ score=0.3,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_1_annotation_3",
+ source="APP",
+ user_id=None,
+ )
+ )
+ # Span 2 annotations
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="quality",
+ span_rowid=span_ids[1],
+ label="good",
+ score=0.85,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_2_annotation_1",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="quality",
+ span_rowid=span_ids[1],
+ label="bad",
+ score=0.3,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_2_annotation_2",
+ source="APP",
+ user_id=None,
+ )
+ )
+ # Span 3 annotations
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="quality",
+ span_rowid=span_ids[2],
+ label="good",
+ score=0.85,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_3_annotation_1",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.commit()
+
+
+@pytest.fixture
+async def data_with_missing_labels(db: DbSessionFactory) -> None:
+ """
+ Creates one project, one trace, and three spans for testing "distribution" annotations.
+
+ Span 1: two "X" annotations (score=0.8) and one "Y" annotation (score=0.6).
+ Span 2: one "X" annotation (score=0.8).
+ Span 3: one "X" annotation (score=0.8).
+
+ Non-null constraints are satisfied by providing fixed attributes and events.
+ """
+ orig_time = datetime.fromisoformat("2021-01-01T00:00:00.000+00:00")
+ async with db() as session:
+ project_id = await session.scalar(
+ insert(models.Project).values(name="simple_missing").returning(models.Project.id)
+ )
+ trace_id = await session.scalar(
+ insert(models.Trace)
+ .values(
+ trace_id="trace_missing",
+ project_rowid=project_id,
+ start_time=orig_time,
+ end_time=orig_time + timedelta(minutes=1),
+ )
+ .returning(models.Trace.id)
+ )
+
+ span_ids = []
+ for i in range(3):
+ span_id_val = await session.scalar(
+ insert(models.Span)
+ .values(
+ trace_rowid=trace_id,
+ span_id=f"missing_span{i+1}",
+ name=f"missing_span{i+1}",
+ parent_id="",
+ span_kind="UNKNOWN",
+ start_time=orig_time + timedelta(seconds=10 * i),
+ end_time=orig_time + timedelta(seconds=10 * i + 5),
+ attributes={"llm": {"token_count": {"prompt": 100, "completion": 100}}},
+ events=[],
+ status_code="OK",
+ status_message="okay",
+ cumulative_error_count=0,
+ cumulative_llm_token_count_prompt=0,
+ cumulative_llm_token_count_completion=0,
+ llm_token_count_prompt=100,
+ llm_token_count_completion=100,
+ )
+ .returning(models.Span.id)
+ )
+ span_ids.append(span_id_val)
+ # Span 1: two "X" and one "Y"
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="distribution",
+ span_rowid=span_ids[0],
+ label="X",
+ score=0.8,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_1_annotation_1",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="distribution",
+ span_rowid=span_ids[0],
+ label="X",
+ score=0.8,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_1_annotation_2",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="distribution",
+ span_rowid=span_ids[0],
+ label="Y",
+ score=0.6,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="span_1_annotation_3",
+ source="APP",
+ user_id=None,
+ )
+ )
+ # Span 2: only "X"
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="distribution",
+ span_rowid=span_ids[1],
+ label="X",
+ score=0.8,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="",
+ source="APP",
+ user_id=None,
+ )
+ )
+ # Span 3: only "X"
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="distribution",
+ span_rowid=span_ids[2],
+ label="X",
+ score=0.8,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier="",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.commit()
+
+
+@pytest.fixture
+async def data_with_null_labels(db: DbSessionFactory) -> None:
+ """
+ Creates one project, one trace, and three spans for testing annotations with NULL labels.
+
+ All annotations have `label=None` but valid score values, ensuring that the
+ loader can handle the case where `total_label_count == 0` (which previously
+ caused division-by-zero errors).
+ """
+ orig_time = datetime.fromisoformat("2021-01-01T00:00:00.000+00:00")
+ async with db() as session:
+ project_id = await session.scalar(
+ insert(models.Project).values(name="null_labels").returning(models.Project.id)
+ )
+ trace_id = await session.scalar(
+ insert(models.Trace)
+ .values(
+ trace_id="trace_null_labels",
+ project_rowid=project_id,
+ start_time=orig_time,
+ end_time=orig_time + timedelta(minutes=1),
+ )
+ .returning(models.Trace.id)
+ )
+
+ # Desired per-span average score is 0.7 for each span (0.5+0.9)/2 etc.
+ span_scores: list[list[float]] = [[0.5, 0.9], [0.6, 0.8], [0.4, 1.0]]
+ for ii, scores in enumerate(span_scores, start=1):
+ span_id_val = await session.scalar(
+ insert(models.Span)
+ .values(
+ trace_rowid=trace_id,
+ span_id=f"null_span{ii}",
+ name=f"null_span{ii}",
+ parent_id="",
+ span_kind="UNKNOWN",
+ start_time=orig_time + timedelta(seconds=10 * ii),
+ end_time=orig_time + timedelta(seconds=10 * ii + 5),
+ attributes={"llm": {"token_count": {"prompt": 100, "completion": 100}}},
+ events=[],
+ status_code="OK",
+ status_message="okay",
+ cumulative_error_count=0,
+ cumulative_llm_token_count_prompt=0,
+ cumulative_llm_token_count_completion=0,
+ llm_token_count_prompt=100,
+ llm_token_count_completion=100,
+ )
+ .returning(models.Span.id)
+ )
+ for jj, score in enumerate(scores):
+ await session.execute(
+ insert(models.SpanAnnotation).values(
+ name="unlabeled",
+ span_rowid=span_id_val,
+ label=None, # NULL label
+ score=score,
+ metadata_={},
+ annotator_kind="LLM",
+ identifier=f"span_{ii}_annotation_{jj+1}",
+ source="APP",
+ user_id=None,
+ )
+ )
+ await session.commit()
diff --git a/tests/unit/server/api/dataloaders/test_annotation_summaries.py b/tests/unit/server/api/dataloaders/test_annotation_summaries.py
index 9ca00d6c77..1234399c17 100644
--- a/tests/unit/server/api/dataloaders/test_annotation_summaries.py
+++ b/tests/unit/server/api/dataloaders/test_annotation_summaries.py
@@ -77,3 +77,130 @@ async def test_evaluation_summaries(
summary.mean_score(), # type: ignore[call-arg]
)
assert actual == pytest.approx(expected, 1e-7)
+
+
+async def test_multiple_annotations_score_weighting(
+ db: DbSessionFactory,
+ data_with_multiple_annotations: None,
+) -> None:
+ # Using the "quality" annotations fixture.
+ start_time = datetime.fromisoformat("2021-01-01T00:00:00.000+00:00")
+ end_time = datetime.fromisoformat("2021-01-01T01:00:00.000+00:00")
+ # Based on the fixture:
+ # Span 1: avg score = (0.85+0.95+0.3)/3 = 0.70
+ # Span 2: avg score = (0.85+0.3)/2 = 0.575
+ # Span 3: avg score = 0.85
+ # Overall average score = (0.70+0.575+0.85)/3 ≈ 0.70833.
+ expected_avg_score = 0.70833
+
+ async with db() as session:
+ project_id = await session.scalar(
+ select(models.Project.id).where(models.Project.name == "simple_multiple")
+ )
+ assert isinstance(project_id, int)
+
+ loader = AnnotationSummaryDataLoader(db)
+ result = await loader.load(
+ (
+ "span",
+ project_id,
+ TimeRange(start=start_time, end=end_time),
+ None,
+ "quality",
+ )
+ )
+ assert result is not None
+ assert result.mean_score() == pytest.approx(expected_avg_score, rel=1e-4) # type: ignore[call-arg]
+
+ # Expected fractions:
+ # "good": (2/3 + 1/2 + 1) / 3 ≈ 0.722
+ # "bad": (1/3 + 1/2 + 0) / 3 ≈ 0.277
+ label_fracs = {lf.label: lf.fraction for lf in result.label_fractions()} # type: ignore[call-arg, attr-defined]
+ assert label_fracs["good"] == pytest.approx(0.722, rel=1e-2)
+ assert label_fracs["bad"] == pytest.approx(0.277, rel=1e-2)
+ assert abs(label_fracs["good"] + label_fracs["bad"] - 1.0) < 1e-2
+
+
+async def test_missing_label_aggregation(
+ db: DbSessionFactory,
+ data_with_missing_labels: None,
+) -> None:
+ # Using the "distribution" annotations fixture.
+ start_time = datetime.fromisoformat("2021-01-01T00:00:00.000+00:00")
+ end_time = datetime.fromisoformat("2021-01-01T01:00:00.000+00:00")
+ # Based on the fixture:
+ # Span 1: For "distribution": "X" fraction = 2/3, "Y" fraction = 1/3.
+ # Span 2: "X" fraction = 1.
+ # Span 3: "X" fraction = 1.
+ # Overall label fractions for "distribution" annotation:
+ # "X": (0.667 + 1 + 1) / 3 ≈ 0.889,
+ # "Y": (0.333 + 0 + 0) / 3 ≈ 0.111.
+ loader = AnnotationSummaryDataLoader(db)
+
+ async with db() as session:
+ project_id = await session.scalar(
+ select(models.Project.id).where(models.Project.name == "simple_missing")
+ )
+ assert isinstance(project_id, int)
+ result = await loader.load(
+ (
+ "span",
+ project_id,
+ TimeRange(start=start_time, end=end_time),
+ None,
+ "distribution",
+ )
+ )
+ assert result is not None
+
+ label_fracs = {lf.label: lf.fraction for lf in result.label_fractions()} # type: ignore[call-arg, attr-defined]
+ assert label_fracs["X"] == pytest.approx(0.889, rel=1e-2)
+ assert label_fracs["Y"] == pytest.approx(0.111, rel=1e-2)
+ assert abs(sum(label_fracs.values()) - 1.0) < 1e-7
+
+ # The "distribution" annotation is grouped as follows:
+ # Span 1: .8, .8, .6
+ # Span 2: .8
+ # Span 3: .8
+ # Overall average = ((0.8 + 0.8 + 0.6) / 3 + 0.8 + 0.8) / 3 ≈ 0.777
+ assert result.mean_score() == pytest.approx(0.777, rel=1e-2) # type: ignore[call-arg]
+
+
+async def test_null_label_handling(
+ db: DbSessionFactory,
+ data_with_null_labels: None,
+) -> None:
+ """Ensure that the loader does not raise when all labels are NULL.
+
+ The expected behavior is:
+ * label_fractions() returns an empty list.
+ * mean_score() computes the per-entity average score correctly.
+ """
+ start_time = datetime.fromisoformat("2021-01-01T00:00:00.000+00:00")
+ end_time = datetime.fromisoformat("2021-01-01T01:00:00.000+00:00")
+
+ async with db() as session:
+ project_id = await session.scalar(
+ select(models.Project.id).where(models.Project.name == "null_labels")
+ )
+ assert isinstance(project_id, int)
+
+ loader = AnnotationSummaryDataLoader(db)
+ result = await loader.load(
+ (
+ "span",
+ project_id,
+ TimeRange(start=start_time, end=end_time),
+ None,
+ "unlabeled",
+ )
+ )
+
+ # Should not be None and should have no label fractions.
+ assert result is not None
+ assert result.label_fractions() == [] # type: ignore
+
+ # Each span has 2 scores. Compute expected overall average.
+ # Span averages: (0.5+0.9)/2 = 0.7, (0.6+0.8)/2 = 0.7, (0.4+1.0)/2 = 0.7.
+ expected_avg = 0.7
+ assert result.mean_score() == pytest.approx(expected_avg, rel=1e-4) # type: ignore[call-arg]
diff --git a/tests/unit/server/api/input_types/test_SpanAnnotationFilter.py b/tests/unit/server/api/input_types/test_SpanAnnotationFilter.py
new file mode 100644
index 0000000000..5dc1528a70
--- /dev/null
+++ b/tests/unit/server/api/input_types/test_SpanAnnotationFilter.py
@@ -0,0 +1,349 @@
+import pytest
+from strawberry.relay import GlobalID
+
+from phoenix.db.models import SpanAnnotation
+from phoenix.server.api.input_types.SpanAnnotationFilter import (
+ SpanAnnotationFilter,
+ SpanAnnotationFilterCondition,
+ satisfies_filter,
+)
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
+
+
+@pytest.mark.parametrize(
+ "span_annotation, filter, expected_satisfies",
+ [
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(include=SpanAnnotationFilterCondition(names=["test-name"])),
+ True,
+ id="matches-included-name",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="span-annotation-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(include=SpanAnnotationFilterCondition(names=["missing-name"])),
+ False,
+ id="does-not-match-included-name",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(exclude=SpanAnnotationFilterCondition(names=["test-name"])),
+ False,
+ id="matches-excluded-name",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(exclude=SpanAnnotationFilterCondition(names=["different-name"])),
+ True,
+ id="does-not-match-excluded-name",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(sources=[AnnotationSource.API])
+ ),
+ True,
+ id="matches-included-source",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(sources=[AnnotationSource.API])
+ ),
+ False,
+ id="does-not-match-included-source",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ ),
+ SpanAnnotationFilter(
+ exclude=SpanAnnotationFilterCondition(sources=[AnnotationSource.APP])
+ ),
+ False,
+ id="matches-excluded-source",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(
+ exclude=SpanAnnotationFilterCondition(sources=[AnnotationSource.APP])
+ ),
+ True,
+ id="does-not-match-excluded-source",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=1,
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(user_ids=[GlobalID("User", "1")])
+ ),
+ True,
+ id="matches-included-user-id",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=1,
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(user_ids=[GlobalID("User", "2")])
+ ),
+ False,
+ id="does-not-match-included-user-id",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=None,
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(user_ids=[GlobalID("User", "1")])
+ ),
+ False,
+ id="does-not-match-included-user-id-with-null-user-id",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=1,
+ ),
+ SpanAnnotationFilter(
+ exclude=SpanAnnotationFilterCondition(user_ids=[GlobalID("User", "1")])
+ ),
+ False,
+ id="matches-excluded-user-id",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=2,
+ ),
+ SpanAnnotationFilter(
+ exclude=SpanAnnotationFilterCondition(user_ids=[GlobalID("User", "1")])
+ ),
+ True,
+ id="does-not-match-excluded-user-id",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=None,
+ ),
+ SpanAnnotationFilter(
+ exclude=SpanAnnotationFilterCondition(user_ids=[GlobalID("User", "1")])
+ ),
+ True,
+ id="does-not-match-excluded-user-id-with-null-user-id",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=1,
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(
+ names=["test-name"],
+ sources=[AnnotationSource.API],
+ user_ids=[GlobalID("User", "1")],
+ ),
+ exclude=SpanAnnotationFilterCondition(
+ names=["other-name"],
+ sources=[AnnotationSource.APP],
+ user_ids=[GlobalID("User", "2")],
+ ),
+ ),
+ True,
+ id="matches-all-include-fields-and-no-exclude-fields",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ user_id=1,
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(
+ names=["test-name"],
+ sources=[AnnotationSource.API],
+ user_ids=[GlobalID("User", "1")],
+ ),
+ exclude=SpanAnnotationFilterCondition(names=["test-name"]),
+ ),
+ False,
+ id="matches-all-include-fields-but-fails-on-exclude-name",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(
+ include=SpanAnnotationFilterCondition(
+ names=["test-name", "other-name"],
+ sources=[AnnotationSource.API, AnnotationSource.APP],
+ ),
+ ),
+ True,
+ id="matches-multiple-included-names-and-sources",
+ ),
+ pytest.param(
+ SpanAnnotation(
+ span_rowid=1,
+ name="test-name",
+ label="label",
+ score=1.0,
+ explanation="explanation",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="API",
+ ),
+ SpanAnnotationFilter(
+ exclude=SpanAnnotationFilterCondition(
+ names=["test-name", "other-name"],
+ sources=[AnnotationSource.API, AnnotationSource.APP],
+ ),
+ ),
+ False,
+ id="matches-multiple-excluded-names-and-sources",
+ ),
+ ],
+)
+def test_satisfies_filter(
+ span_annotation: SpanAnnotation,
+ filter: SpanAnnotationFilter,
+ expected_satisfies: bool,
+) -> None:
+ assert satisfies_filter(span_annotation, filter) == expected_satisfies
diff --git a/tests/unit/server/api/mutations/test_annotation_config_mutations.py b/tests/unit/server/api/mutations/test_annotation_config_mutations.py
new file mode 100644
index 0000000000..a0f856c54c
--- /dev/null
+++ b/tests/unit/server/api/mutations/test_annotation_config_mutations.py
@@ -0,0 +1,1059 @@
+from copy import deepcopy
+from typing import Any
+
+import pytest
+from strawberry.relay import GlobalID
+
+from phoenix.config import DEFAULT_PROJECT_NAME
+from phoenix.db import models
+from phoenix.db.types.annotation_configs import (
+ AnnotationType,
+)
+from phoenix.server.types import DbSessionFactory
+from tests.unit.graphql import AsyncGraphQLClient
+
+
+@pytest.fixture
+async def project(db: DbSessionFactory) -> models.Project:
+ """Inserts a project into the database."""
+ async with db() as session:
+ project = models.Project(
+ name=DEFAULT_PROJECT_NAME,
+ )
+ session.add(project)
+ await session.flush()
+ return project
+
+
+class TestAnnotationConfigMutations:
+ QUERY = """
+ mutation CreateAnnotationConfig($input: CreateAnnotationConfigInput!) {
+ createAnnotationConfig(input: $input) {
+ annotationConfig {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+
+ mutation UpdateAnnotationConfig($input: UpdateAnnotationConfigInput!) {
+ updateAnnotationConfig(input: $input) {
+ annotationConfig {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+
+ query ListAnnotationConfigs {
+ annotationConfigs(first: 10) {
+ edges {
+ node {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+ }
+
+ mutation DeleteAnnotationConfigs($input: DeleteAnnotationConfigsInput!) {
+ deleteAnnotationConfigs(input: $input) {
+ annotationConfigs {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+
+ mutation AddAnnotationConfigToProject($input: [AddAnnotationConfigToProjectInput!]!) {
+ addAnnotationConfigToProject(input: $input) {
+ project {
+ annotationConfigs {
+ edges {
+ node {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ mutation RemoveAnnotationConfigFromProject($input: [RemoveAnnotationConfigFromProjectInput!]!) {
+ removeAnnotationConfigFromProject(input: $input) {
+ project {
+ annotationConfigs {
+ edges {
+ node {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ query GetProject($id: GlobalID!) {
+ project: node(id: $id) {
+ ... on Project {
+ annotationConfigs {
+ edges {
+ annotationConfig: node {
+ ... on CategoricalAnnotationConfig {
+ ...CategoricalAnnotationConfigFields
+ }
+ ... on ContinuousAnnotationConfig {
+ ...ContinuousAnnotationConfigFields
+ }
+ ... on FreeformAnnotationConfig {
+ ...FreeformAnnotationConfigFields
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fragment CategoricalAnnotationConfigFields on CategoricalAnnotationConfig {
+ id
+ name
+ annotationType
+ optimizationDirection
+ description
+ values {
+ label
+ score
+ }
+ }
+
+ fragment ContinuousAnnotationConfigFields on ContinuousAnnotationConfig {
+ id
+ name
+ annotationType
+ optimizationDirection
+ description
+ lowerBound
+ upperBound
+ }
+
+ fragment FreeformAnnotationConfigFields on FreeformAnnotationConfig {
+ id
+ name
+ annotationType
+ description
+ }
+ """
+
+ @pytest.mark.parametrize(
+ "create_config,update_config,annotation_type",
+ [
+ pytest.param(
+ {
+ "name": "Test Categorical Config",
+ "description": "Test description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ {
+ "name": "Updated Categorical Config",
+ "description": "Updated description",
+ "optimizationDirection": "MINIMIZE",
+ "values": [
+ {"label": "Excellent", "score": 1.0},
+ {"label": "Poor", "score": 0.0},
+ ],
+ },
+ AnnotationType.CATEGORICAL.value,
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "Test Continuous Config",
+ "description": "Test description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 0.0,
+ "upperBound": 1.0,
+ },
+ {
+ "name": "Updated Continuous Config",
+ "description": "Updated description",
+ "optimizationDirection": "MINIMIZE",
+ "lowerBound": -1.0,
+ "upperBound": 2.0,
+ },
+ AnnotationType.CONTINUOUS.value,
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "Test Freeform Config",
+ "description": "Test description",
+ },
+ {
+ "name": "Updated Freeform Config",
+ "description": "Updated description",
+ },
+ AnnotationType.FREEFORM.value,
+ id="freeform",
+ ),
+ ],
+ )
+ async def test_annotation_config_crud_operations(
+ self,
+ gql_client: AsyncGraphQLClient,
+ project: models.Project,
+ create_config: dict[str, Any],
+ update_config: dict[str, Any],
+ annotation_type: str,
+ ) -> None:
+ # Create a categorical annotation config
+ annotation_type_key = annotation_type.lower()
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ annotation_type_key: create_config,
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert not create_response.errors
+ assert (data := create_response.data) is not None
+ created_config = data["createAnnotationConfig"]["annotationConfig"]
+ config_id = created_config["id"]
+ expected_config = deepcopy(create_config)
+ expected_config["id"] = config_id
+ expected_config["annotationType"] = annotation_type
+ assert created_config == expected_config
+
+ # List annotation configs
+ list_response = await gql_client.execute(
+ query=self.QUERY,
+ operation_name="ListAnnotationConfigs",
+ )
+ assert not list_response.errors
+ assert (data := list_response.data) is not None
+ configs = data["annotationConfigs"]["edges"]
+ assert len(configs) == 1
+ assert configs[0]["node"] == created_config
+
+ # Update the annotation config
+ update_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "id": config_id,
+ "annotationConfig": {
+ annotation_type_key: update_config,
+ },
+ }
+ },
+ operation_name="UpdateAnnotationConfig",
+ )
+ assert not update_response.errors
+ assert (data := update_response.data) is not None
+ updated_config = data["updateAnnotationConfig"]["annotationConfig"]
+ expected_config = deepcopy(update_config)
+ expected_config["id"] = config_id
+ expected_config["annotationType"] = annotation_type
+ assert updated_config == expected_config
+
+ # Add annotation config to project
+ project_id = str(GlobalID("Project", str(project.id)))
+ add_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": config_id,
+ }
+ ]
+ },
+ operation_name="AddAnnotationConfigToProject",
+ )
+ assert not add_response.errors
+ assert (data := add_response.data) is not None
+ project_configs = data["addAnnotationConfigToProject"]["project"]["annotationConfigs"][
+ "edges"
+ ]
+ assert len(project_configs) == 1
+ assert project_configs[0]["node"] == expected_config
+
+ # Remove annotation config from project
+ remove_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": config_id,
+ }
+ ]
+ },
+ operation_name="RemoveAnnotationConfigFromProject",
+ )
+ assert not remove_response.errors
+ assert (data := remove_response.data) is not None
+ project_configs = data["removeAnnotationConfigFromProject"]["project"]["annotationConfigs"][
+ "edges"
+ ]
+ assert len(project_configs) == 0
+
+ # Delete the annotation config
+ delete_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "ids": [config_id],
+ }
+ },
+ operation_name="DeleteAnnotationConfigs",
+ )
+ assert not delete_response.errors
+ assert (data := delete_response.data) is not None
+ deleted_configs = data["deleteAnnotationConfigs"]["annotationConfigs"]
+ assert len(deleted_configs) == 1
+ assert deleted_configs[0] == expected_config
+
+ # Verify the config is deleted by listing
+ list_response = await gql_client.execute(
+ query=self.QUERY,
+ operation_name="ListAnnotationConfigs",
+ )
+ assert not list_response.errors
+ assert (data := list_response.data) is not None
+ configs = data["annotationConfigs"]["edges"]
+ assert len(configs) == 0
+
+ @pytest.mark.parametrize(
+ "config,annotation_type",
+ [
+ pytest.param(
+ {
+ "name": "note",
+ "description": "Test description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ AnnotationType.CATEGORICAL.value,
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "note",
+ "description": "Test description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 0.0,
+ "upperBound": 1.0,
+ },
+ AnnotationType.CONTINUOUS.value,
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "note",
+ "description": "Test description",
+ },
+ AnnotationType.FREEFORM.value,
+ id="freeform",
+ ),
+ ],
+ )
+ async def test_cannot_create_annotation_config_with_reserved_name_for_notes(
+ self,
+ gql_client: AsyncGraphQLClient,
+ config: dict[str, Any],
+ annotation_type: str,
+ ) -> None:
+ annotation_type_key = annotation_type.lower()
+ response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ annotation_type_key: config,
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert response.data is None
+ assert response.errors
+ assert len(response.errors) == 1
+ error = response.errors[0]
+ assert "The name 'note' is reserved for span notes" in error.message
+
+ @pytest.mark.parametrize(
+ ("update_config", "annotation_type"),
+ [
+ pytest.param(
+ {
+ "name": "note",
+ "description": "Test description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ AnnotationType.CATEGORICAL.value,
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "note",
+ "description": "Test description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 0.0,
+ "upperBound": 1.0,
+ },
+ AnnotationType.CONTINUOUS.value,
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "note",
+ "description": "Test description",
+ },
+ AnnotationType.FREEFORM.value,
+ id="freeform",
+ ),
+ ],
+ )
+ async def test_cannot_update_annotation_config_with_reserved_name_for_notes(
+ self,
+ gql_client: AsyncGraphQLClient,
+ update_config: dict[str, Any],
+ annotation_type: str,
+ ) -> None:
+ annotation_type_key = annotation_type.lower()
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "freeform": {
+ "name": "config-name",
+ "description": "config-description",
+ },
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+ created_config = create_response.data["createAnnotationConfig"]["annotationConfig"]
+ config_id = created_config["id"]
+
+ # Try to update with reserved name
+ update_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "id": config_id,
+ "annotationConfig": {
+ annotation_type_key: update_config,
+ },
+ }
+ },
+ operation_name="UpdateAnnotationConfig",
+ )
+ assert update_response.data is None
+ assert update_response.errors
+ assert len(update_response.errors) == 1
+ error = update_response.errors[0]
+ assert "The name 'note' is reserved for span notes" in error.message
+
+ @pytest.mark.parametrize(
+ ("annotation_type", "config"),
+ [
+ pytest.param(
+ AnnotationType.CATEGORICAL.value,
+ {
+ "name": "duplicate-name",
+ "description": "config description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ AnnotationType.CONTINUOUS.value,
+ {
+ "name": "duplicate-name",
+ "description": "config description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 0.0,
+ "upperBound": 1.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ AnnotationType.FREEFORM.value,
+ {
+ "name": "duplicate-name",
+ "description": "config description",
+ },
+ id="freeform",
+ ),
+ ],
+ )
+ async def test_cannot_create_annotation_config_with_duplicate_name(
+ self,
+ gql_client: AsyncGraphQLClient,
+ annotation_type: str,
+ config: dict[str, Any],
+ ) -> None:
+ annotation_type_key = annotation_type.lower()
+
+ # Create first config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={"input": {"annotationConfig": {annotation_type_key: config}}},
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+
+ # Try to create duplicate config
+ duplicate_create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={"input": {"annotationConfig": {annotation_type_key: config}}},
+ operation_name="CreateAnnotationConfig",
+ )
+ assert duplicate_create_response.data is None
+ assert duplicate_create_response.errors
+ assert len(duplicate_create_response.errors) == 1
+ error = duplicate_create_response.errors[0]
+ assert "Annotation configuration with name 'duplicate-name' already exists" in error.message
+
+ async def test_create_categorical_config_with_empty_values_returns_expected_error(
+ self, gql_client: AsyncGraphQLClient
+ ) -> None:
+ response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "categorical": {
+ "name": "test_categorical",
+ "optimizationDirection": "NONE",
+ "values": [], # empty values are disallowed
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+
+ assert response.data is None
+ assert response.errors
+ assert len(response.errors) == 1
+ error = response.errors[0]
+ assert "Values must be non-empty" in error.message
+
+ @pytest.mark.parametrize(
+ ("annotation_type", "config"),
+ [
+ pytest.param(
+ AnnotationType.CATEGORICAL.value,
+ {
+ "name": "config-name",
+ "description": "config description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ AnnotationType.CONTINUOUS.value,
+ {
+ "name": "config-name",
+ "description": "config description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 0.0,
+ "upperBound": 1.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ AnnotationType.FREEFORM.value,
+ {
+ "name": "config-name",
+ "description": "config description",
+ },
+ id="freeform",
+ ),
+ ],
+ )
+ async def test_updated_annotation_config_name_cannot_collide_with_existing_config_name(
+ self,
+ gql_client: AsyncGraphQLClient,
+ annotation_type: str,
+ config: dict[str, Any],
+ ) -> None:
+ annotation_type_key = annotation_type.lower()
+
+ # Create first config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "freeform": {
+ "name": "collide-config-name",
+ "description": "config description",
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+
+ # Create second config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={"input": {"annotationConfig": {annotation_type_key: config}}},
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+ config_id = create_response.data["createAnnotationConfig"]["annotationConfig"]["id"]
+
+ # Try to update the name to collide with the existing config
+ update_config = deepcopy(config)
+ update_config["name"] = "collide-config-name"
+ update_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "id": config_id,
+ "annotationConfig": {annotation_type_key: update_config},
+ },
+ },
+ operation_name="UpdateAnnotationConfig",
+ )
+ assert update_response.data is None
+ assert update_response.errors
+ assert len(update_response.errors) == 1
+ error = update_response.errors[0]
+ assert (
+ "Annotation configuration with name 'collide-config-name' already exists"
+ in error.message
+ )
+
+ async def test_update_annotation_config_not_found_returns_error(
+ self,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "id": str(GlobalID(type_name="CategoricalAnnotationConfig", node_id="999999")),
+ "annotationConfig": {
+ "freeform": {
+ "name": "test-config",
+ "description": "test description",
+ }
+ },
+ }
+ },
+ operation_name="UpdateAnnotationConfig",
+ )
+ assert response.data is None
+ assert response.errors
+ assert len(response.errors) == 1
+ error = response.errors[0]
+ assert "Annotation config not found" in error.message
+
+ async def test_update_continuous_annotation_config_with_invalid_bounds_returns_expected_error(
+ self,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ # First create a valid continuous config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "continuous": {
+ "name": "test-config",
+ "description": "test description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 0.0,
+ "upperBound": 1.0,
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ config_id = create_response.data["createAnnotationConfig"]["annotationConfig"]["id"]
+
+ # Try to update with invalid bounds
+ response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "id": config_id,
+ "annotationConfig": {
+ "continuous": {
+ "name": "test-config",
+ "description": "test description",
+ "optimizationDirection": "MAXIMIZE",
+ "lowerBound": 1.0,
+ "upperBound": 0.0,
+ }
+ },
+ }
+ },
+ operation_name="UpdateAnnotationConfig",
+ )
+ assert response.data is None
+ assert response.errors
+ assert len(response.errors) == 1
+ error = response.errors[0]
+ assert "Lower bound must be strictly less than upper bound" in error.message
+
+ async def test_delete_annotation_configs_aborts_if_some_configs_not_found(
+ self,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ # Create a categorical annotation config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "categorical": {
+ "name": "test-config",
+ "description": "test description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+ config_id = create_response.data["createAnnotationConfig"]["annotationConfig"]["id"]
+
+ # Try to delete the existing config along with a non-existent one
+ non_existent_id = str(GlobalID(type_name="CategoricalAnnotationConfig", node_id="999999"))
+ delete_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "ids": [config_id, non_existent_id],
+ }
+ },
+ operation_name="DeleteAnnotationConfigs",
+ )
+ assert delete_response.data is None
+ assert delete_response.errors
+ assert len(delete_response.errors) == 1
+ error = delete_response.errors[0]
+ assert (
+ "Could not find one or more annotation configs to delete, deletion aborted"
+ in error.message
+ )
+
+ # Verify the config still exists by listing
+ list_response = await gql_client.execute(
+ query=self.QUERY,
+ operation_name="ListAnnotationConfigs",
+ )
+ assert not list_response.errors
+ assert (data := list_response.data) is not None
+ configs = data["annotationConfigs"]["edges"]
+ assert len(configs) == 1
+ assert configs[0]["node"]["id"] == config_id
+
+ async def test_cannot_add_same_annotation_config_to_project_twice(
+ self,
+ gql_client: AsyncGraphQLClient,
+ project: models.Project,
+ ) -> None:
+ # Create a categorical annotation config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "categorical": {
+ "name": "test-config",
+ "description": "test description",
+ "optimizationDirection": "MAXIMIZE",
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ config_id = create_response.data["createAnnotationConfig"]["annotationConfig"]["id"]
+ project_id = str(GlobalID("Project", str(project.id)))
+
+ # Add the config to the project
+ add_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": config_id,
+ }
+ ]
+ },
+ operation_name="AddAnnotationConfigToProject",
+ )
+ assert not add_response.errors
+ assert (data := add_response.data) is not None
+ project_configs = data["addAnnotationConfigToProject"]["project"]["annotationConfigs"][
+ "edges"
+ ]
+ assert len(project_configs) == 1
+ project_config = project_configs[0]["node"]
+ assert project_config["id"] == config_id
+
+ # Try to add the same config again
+ duplicate_add_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": config_id,
+ }
+ ]
+ },
+ operation_name="AddAnnotationConfigToProject",
+ )
+ assert duplicate_add_response.data is None
+ assert duplicate_add_response.errors
+ assert len(duplicate_add_response.errors) == 1
+ error = duplicate_add_response.errors[0]
+ assert (
+ "One or more annotation configs have already been added to the project" in error.message
+ )
+
+ async def test_adding_annotation_config_to_nonexistent_project_fails_with_expected_error(
+ self,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ # First create an annotation config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "freeform": {
+ "name": "test-config",
+ "description": "test description",
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+ config_id = create_response.data["createAnnotationConfig"]["annotationConfig"]["id"]
+
+ # Try to add the config to a non-existent project
+ nonexistent_project_id = str(GlobalID("Project", "999"))
+ add_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": nonexistent_project_id,
+ "annotationConfigId": config_id,
+ }
+ ]
+ },
+ operation_name="AddAnnotationConfigToProject",
+ )
+ assert add_response.data is None
+ assert add_response.errors
+ assert len(add_response.errors) == 1
+ error = add_response.errors[0]
+ assert error.message == "One or more projects were not found"
+
+ async def test_adding_nonexistent_annotation_config_to_project_fails_with_expected_error(
+ self,
+ gql_client: AsyncGraphQLClient,
+ project: models.Project,
+ ) -> None:
+ project_id = str(GlobalID("Project", str(project.id)))
+ nonexistent_config_id = str(GlobalID("CategoricalAnnotationConfig", "999"))
+
+ add_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": nonexistent_config_id,
+ }
+ ]
+ },
+ operation_name="AddAnnotationConfigToProject",
+ )
+ assert add_response.data is None
+ assert add_response.errors
+ assert len(add_response.errors) == 1
+ error = add_response.errors[0]
+ assert error.message == "One or more annotation configs were not found"
+
+ async def test_removing_unknown_annotation_config_from_project_rolls_back(
+ self,
+ gql_client: AsyncGraphQLClient,
+ project: models.Project,
+ ) -> None:
+ # First create an annotation config
+ create_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": {
+ "annotationConfig": {
+ "freeform": {
+ "name": "test-config",
+ "description": "test description",
+ }
+ }
+ }
+ },
+ operation_name="CreateAnnotationConfig",
+ )
+ assert create_response.data is not None
+ assert not create_response.errors
+ config_id = create_response.data["createAnnotationConfig"]["annotationConfig"]["id"]
+ project_id = str(GlobalID("Project", str(project.id)))
+
+ # Add the config to the project
+ add_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": config_id,
+ }
+ ]
+ },
+ operation_name="AddAnnotationConfigToProject",
+ )
+ assert not add_response.errors
+
+ # Try to remove both the existing config and a non-existent one
+ fake_config_id = str(GlobalID("CategoricalAnnotationConfig", "999"))
+ remove_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={
+ "input": [
+ {
+ "projectId": project_id,
+ "annotationConfigId": config_id,
+ },
+ {
+ "projectId": project_id,
+ "annotationConfigId": fake_config_id,
+ },
+ ]
+ },
+ operation_name="RemoveAnnotationConfigFromProject",
+ )
+
+ # Verify the operation failed
+ assert remove_response.data is None
+ assert remove_response.errors
+ assert len(remove_response.errors) == 1
+ error = remove_response.errors[0]
+ assert "Could not find one or more input project annotation configs" in error.message
+
+ # Verify the original config is still associated with the project
+ query_response = await gql_client.execute(
+ query=self.QUERY,
+ variables={"id": project_id},
+ operation_name="GetProject",
+ )
+ assert not query_response.errors
+ assert (data := query_response.data) is not None
+ project_configs = data["project"]["annotationConfigs"]["edges"]
+ assert len(project_configs) == 1
+ assert project_configs[0]["annotationConfig"]["id"] == config_id
diff --git a/tests/unit/server/api/mutations/test_dataset_mutations.py b/tests/unit/server/api/mutations/test_dataset_mutations.py
index 84431ee312..7e6afd70f9 100644
--- a/tests/unit/server/api/mutations/test_dataset_mutations.py
+++ b/tests/unit/server/api/mutations/test_dataset_mutations.py
@@ -148,7 +148,7 @@ async def test_updating_a_single_field_leaves_remaining_fields_unchannged(
async def test_add_span_to_dataset(
gql_client: AsyncGraphQLClient,
empty_dataset: None,
- spans: None,
+ spans: list[models.Span],
span_annotation: None,
) -> None:
dataset_id = GlobalID(type_name="Dataset", node_id=str(1))
@@ -176,10 +176,7 @@ async def test_add_span_to_dataset(
query=mutation,
variables={
"datasetId": str(dataset_id),
- "spanIds": [
- str(GlobalID(type_name="Span", node_id=span_id))
- for span_id in map(str, range(1, 4))
- ],
+ "spanIds": [str(GlobalID(type_name="Span", node_id=str(span.id))) for span in spans],
},
)
assert not response.errors
@@ -199,6 +196,7 @@ async def test_add_span_to_dataset(
},
"metadata": {
"span_kind": "LLM",
+ "annotations": {},
},
"output": {
"messages": [
@@ -225,6 +223,7 @@ async def test_add_span_to_dataset(
},
"metadata": {
"span_kind": "RETRIEVER",
+ "annotations": {},
},
}
}
@@ -237,13 +236,18 @@ async def test_add_span_to_dataset(
"metadata": {
"span_kind": "CHAIN",
"annotations": {
- "test annotation": {
- "label": "ambiguous",
- "score": 0.5,
- "explanation": "meaningful words",
- "metadata": {},
- "annotator_kind": "HUMAN",
- }
+ "test annotation": [
+ {
+ "label": "ambiguous",
+ "score": 0.5,
+ "explanation": "meaningful words",
+ "metadata": {},
+ "annotator_kind": "HUMAN",
+ "user_id": None,
+ "username": None,
+ "email": None,
+ }
+ ]
},
},
}
@@ -524,11 +528,12 @@ async def empty_dataset(db: DbSessionFactory) -> None:
@pytest.fixture
-async def spans(db: DbSessionFactory) -> None:
+async def spans(db: DbSessionFactory) -> list[models.Span]:
"""
Inserts three spans from a single trace: a chain root span, a retriever
child span, and an llm child span.
"""
+ spans = []
async with db() as session:
project_row_id = await session.scalar(
insert(models.Project).values(name=DEFAULT_PROJECT_NAME).returning(models.Project.id)
@@ -543,7 +548,7 @@ async def spans(db: DbSessionFactory) -> None:
)
.returning(models.Trace.id)
)
- await session.execute(
+ span = await session.scalar(
insert(models.Span)
.values(
trace_rowid=trace_row_id,
@@ -564,10 +569,13 @@ async def spans(db: DbSessionFactory) -> None:
cumulative_llm_token_count_prompt=0,
cumulative_llm_token_count_completion=0,
)
- .returning(models.Span.id)
+ .returning(models.Span)
)
- await session.execute(
- insert(models.Span).values(
+ assert span is not None
+ spans.append(span)
+ span = await session.scalar(
+ insert(models.Span)
+ .values(
trace_rowid=trace_row_id,
span_id="2",
parent_id="1",
@@ -593,9 +601,13 @@ async def spans(db: DbSessionFactory) -> None:
cumulative_llm_token_count_prompt=0,
cumulative_llm_token_count_completion=0,
)
+ .returning(models.Span)
)
- await session.execute(
- insert(models.Span).values(
+ assert span is not None
+ spans.append(span)
+ span = await session.scalar(
+ insert(models.Span)
+ .values(
trace_rowid=trace_row_id,
span_id="3",
parent_id="1",
@@ -626,7 +638,11 @@ async def spans(db: DbSessionFactory) -> None:
cumulative_llm_token_count_prompt=0,
cumulative_llm_token_count_completion=0,
)
+ .returning(models.Span)
)
+ assert span is not None
+ spans.append(span)
+ return spans
@pytest.fixture
@@ -639,6 +655,9 @@ async def span_annotation(db: DbSessionFactory) -> None:
label="ambiguous",
score=0.5,
explanation="meaningful words",
+ identifier="",
+ source="APP",
+ user_id=None,
)
session.add(span_annotation)
await session.flush()
diff --git a/tests/unit/server/api/mutations/test_project_trace_retention_policy_mutations.py b/tests/unit/server/api/mutations/test_project_trace_retention_policy_mutations.py
new file mode 100644
index 0000000000..2f88c84e14
--- /dev/null
+++ b/tests/unit/server/api/mutations/test_project_trace_retention_policy_mutations.py
@@ -0,0 +1,452 @@
+from secrets import token_hex
+from typing import Any
+
+import pytest
+import sqlalchemy as sa
+from strawberry.relay import GlobalID
+
+from phoenix.db import models
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+from phoenix.db.types.trace_retention import MaxDaysRule
+from phoenix.server.api.types.node import from_global_id_with_expected_type
+from phoenix.server.api.types.Project import Project
+from phoenix.server.api.types.ProjectTraceRetentionPolicy import ProjectTraceRetentionPolicy
+from phoenix.server.types import DbSessionFactory
+from tests.unit.graphql import AsyncGraphQLClient
+
+
+class TestProjectTraceRetentionPolicyMutations:
+ CRUD = """
+ mutation Create($input: CreateProjectTraceRetentionPolicyInput!) {
+ createProjectTraceRetentionPolicy(input: $input) {
+ node {
+ ... PolicyFragment
+ }
+ }
+ }
+ query Read {
+ projectTraceRetentionPolicies {
+ edges {
+ node {
+ ... PolicyFragment
+ }
+ }
+ }
+ }
+ mutation Update($input: PatchProjectTraceRetentionPolicyInput!) {
+ patchProjectTraceRetentionPolicy(input: $input) {
+ node {
+ ... PolicyFragment
+ }
+ }
+ }
+ mutation Delete($input: DeleteProjectTraceRetentionPolicyInput!) {
+ deleteProjectTraceRetentionPolicy(input: $input) {
+ node {
+ ... PolicyFragment
+ }
+ }
+ }
+ query GetNode($id: GlobalID!) {
+ node(id: $id) {
+ ... on Project {
+ traceRetentionPolicy {
+ ... PolicyFragment
+ }
+ }
+ ... on ProjectTraceRetentionPolicy {
+ ... PolicyFragment
+ }
+ }
+ }
+ fragment PolicyFragment on ProjectTraceRetentionPolicy {
+ id
+ name
+ cronExpression
+ rule {
+ ... on TraceRetentionRuleMaxDays {
+ maxDays
+ }
+ ... on TraceRetentionRuleMaxCount {
+ maxCount
+ }
+ ... on TraceRetentionRuleMaxDaysOrCount {
+ maxCount
+ maxDays
+ }
+ }
+ }
+ """ # noqa: E501
+
+ @pytest.mark.parametrize(
+ "initial_rule_input, initial_rule_output, update_rule_input, update_rule_output",
+ [
+ (
+ {"maxDays": {"maxDays": 1.5}},
+ {"maxDays": 1.5},
+ {"maxCount": {"maxCount": 5}},
+ {"maxCount": 5},
+ ),
+ (
+ {"maxCount": {"maxCount": 5}},
+ {"maxCount": 5},
+ {"maxDays": {"maxDays": 1.5}},
+ {"maxDays": 1.5},
+ ),
+ (
+ {"maxDaysOrCount": {"maxDays": 5.1, "maxCount": 1}},
+ {"maxDays": 5.1, "maxCount": 1},
+ {"maxDays": {"maxDays": 1.5}},
+ {"maxDays": 1.5},
+ ),
+ ],
+ )
+ async def test_crud(
+ self,
+ db: DbSessionFactory,
+ gql_client: AsyncGraphQLClient,
+ initial_rule_input: dict[str, Any],
+ initial_rule_output: dict[str, Any],
+ update_rule_input: dict[str, Any],
+ update_rule_output: dict[str, Any],
+ ) -> None:
+ """
+ Test the complete CRUD lifecycle for ProjectTraceRetentionPolicy.
+
+ This test verifies:
+ 1. Creating a policy with different rule types
+ 2. Associating policies with projects
+ 3. Updating policies (name, cron expression, rule type)
+ 4. Transferring policies between projects
+ 5. Deleting policies and verifying fallback to default
+
+ Each test case uses different rule types (maxDays, maxCount, maxDaysOrCount)
+ to ensure all rule types work correctly throughout the CRUD operations.
+ """
+ # Create two test projects with random names
+ project1 = models.Project(name=token_hex(8))
+ project2 = models.Project(name=token_hex(8))
+ async with db() as session:
+ session.add(project1)
+ session.add(project2)
+ await session.flush()
+
+ # Convert project IDs to GlobalIDs for GraphQL operations
+ project1_gid = str(GlobalID(Project.__name__, str(project1.id)))
+ project2_gid = str(GlobalID(Project.__name__, str(project2.id)))
+
+ # Create a new retention policy
+ name1 = token_hex(8) # Random policy name
+ cron_expression1 = "0 1 * * 1" # Weekly on Monday at 1:00 AM
+
+ # Execute the Create mutation
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Create",
+ variables={
+ "input": {
+ "name": name1,
+ "cronExpression": cron_expression1,
+ "rule": initial_rule_input,
+ "addProjects": [project1_gid],
+ }
+ },
+ )
+
+ # Verify the creation was successful
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["createProjectTraceRetentionPolicy"]["node"]
+ assert policy["name"] == name1
+ assert policy["cronExpression"] == cron_expression1
+ assert policy["rule"] == initial_rule_output
+
+ # Verify policy exists in the database
+ id_ = from_global_id_with_expected_type(
+ GlobalID.from_id(policy["id"]),
+ ProjectTraceRetentionPolicy.__name__,
+ )
+ async with db() as session:
+ stmt = sa.select(models.ProjectTraceRetentionPolicy).filter_by(id=id_)
+ assert await session.scalar(stmt)
+
+ # Verify policy can be read
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Read",
+ variables={"id": project1_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ policies = [
+ e["node"]
+ for e in resp.data["projectTraceRetentionPolicies"]["edges"]
+ if e["node"]["id"] == policy["id"]
+ ]
+ assert len(policies) == 1
+ assert policies[0]["name"] == name1
+ assert policies[0]["cronExpression"] == cron_expression1
+ assert policies[0]["rule"] == initial_rule_output
+
+ # Verify the policy is a Node
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": policy["id"]},
+ )
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["node"]
+ assert policy["name"] == name1
+ assert policy["cronExpression"] == cron_expression1
+ assert policy["rule"] == initial_rule_output
+
+ # Verify the policy is associated with project1
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": project1_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["node"]["traceRetentionPolicy"]
+ assert policy["name"] == name1
+ assert policy["cronExpression"] == cron_expression1
+ assert policy["rule"] == initial_rule_output
+
+ # Update the policy and transfer from project1 to project2
+ name2 = token_hex(8) # New random policy name
+ cron_expression2 = "0 2 * * 2" # Weekly on Tuesday at 2:00 AM
+
+ # Execute the Update mutation
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Update",
+ variables={
+ "input": {
+ "id": policy["id"],
+ "name": name2,
+ "cronExpression": cron_expression2,
+ "rule": update_rule_input,
+ "removeProjects": [project1_gid], # Remove from project1
+ "addProjects": [project2_gid], # Add to project2
+ }
+ },
+ )
+
+ # Verify the update was successful
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["patchProjectTraceRetentionPolicy"]["node"]
+ assert policy["name"] == name2
+ assert policy["cronExpression"] == cron_expression2
+ assert policy["rule"] == update_rule_output
+
+ # Verify project1 now uses the default policy
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": project1_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["node"]["traceRetentionPolicy"]
+ assert policy["name"] == "Default" # Project1 now uses default policy
+
+ # Verify project2 now uses the updated policy
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": project2_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["node"]["traceRetentionPolicy"]
+ assert policy["name"] == name2
+ assert policy["cronExpression"] == cron_expression2
+ assert policy["rule"] == update_rule_output
+
+ # Delete the policy
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Delete",
+ variables={"input": {"id": policy["id"]}},
+ )
+ assert not resp.errors
+
+ # Verify the policy is deleted from the database
+ async with db() as session:
+ stmt = sa.select(models.ProjectTraceRetentionPolicy).filter_by(id=id_)
+ assert not (await session.scalar(stmt))
+
+ # Verify both projects now use the default policy
+ # Check project1
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": project1_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["node"]["traceRetentionPolicy"]
+ assert policy["name"] == "Default"
+
+ # Check project2
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": project2_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ policy = resp.data["node"]["traceRetentionPolicy"]
+ assert policy["name"] == "Default" # Project2 now also uses default policy
+
+ async def test_default_policy_modification_restrictions(
+ self,
+ db: DbSessionFactory,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ """
+ Test the modification restrictions on the default project trace retention policy.
+
+ This test verifies that:
+ 1. The default policy cannot be deleted
+ 2. The default policy cannot be renamed
+ 3. The default policy's cron expression can be modified
+ 4. The default policy's rule can be modified
+
+ The default policy (with name "Default") is a special policy that must maintain its name
+ and cannot be deleted to ensure system stability and provide a fallback for projects.
+ However, its cron expression and rule can be modified as needed.
+ """ # noqa: E501
+
+ # Create a GlobalID for the default policy to use in GraphQL operations
+ default_policy_gid = str(
+ GlobalID(
+ ProjectTraceRetentionPolicy.__name__,
+ str(DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID),
+ )
+ )
+
+ # First, get the current state of the default policy
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="GetNode",
+ variables={"id": default_policy_gid},
+ )
+ assert not resp.errors
+ assert resp.data
+ initial_policy = resp.data["node"]
+ assert initial_policy["name"] == "Default"
+ initial_cron = initial_policy["cronExpression"]
+ initial_rule = initial_policy["rule"]
+
+ # Test 1: Attempt to delete the default policy
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Delete",
+ variables={"input": {"id": default_policy_gid}},
+ )
+
+ # Verify the deletion was rejected with the expected error message
+ assert resp.errors
+ assert len(resp.errors) == 1
+ assert "Cannot delete the default project trace retention policy" in resp.errors[0].message
+
+ # Verify the default policy still exists in the database with its original name
+ async with db() as session:
+ stmt = sa.select(models.ProjectTraceRetentionPolicy).filter_by(
+ id=DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+ )
+ default_policy = await session.scalar(stmt)
+ assert default_policy is not None
+ assert default_policy.name == "Default"
+
+ # Test 2: Attempt to update the default policy's name
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Update",
+ variables={
+ "input": {
+ "id": default_policy_gid,
+ "name": "New Default Name", # Try to change the name
+ }
+ },
+ )
+
+ # Verify the rename was rejected with the expected error message
+ assert resp.errors
+ assert len(resp.errors) == 1
+ assert (
+ "Cannot change the name of the default project trace retention policy"
+ in resp.errors[0].message
+ )
+
+ # Verify the default policy still exists in the database with its original name
+ async with db() as session:
+ stmt = sa.select(models.ProjectTraceRetentionPolicy).filter_by(
+ id=DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+ )
+ default_policy = await session.scalar(stmt)
+ assert default_policy is not None
+ assert default_policy.name == "Default"
+
+ # Test 3: Update the default policy's cron expression
+ new_cron = "0 2 * * *" # Daily at 2:00 AM
+ assert new_cron != initial_cron, "New cron expression should be different from initial"
+
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Update",
+ variables={
+ "input": {
+ "id": default_policy_gid,
+ "cronExpression": new_cron,
+ }
+ },
+ )
+
+ # Verify the cron expression update was successful
+ assert not resp.errors
+ assert resp.data
+ updated_policy = resp.data["patchProjectTraceRetentionPolicy"]["node"]
+ assert updated_policy["name"] == "Default" # Name should remain unchanged
+ assert updated_policy["cronExpression"] == new_cron # Cron should be updated
+ assert updated_policy["rule"] == initial_rule # Rule should remain unchanged
+
+ # Test 4: Update the default policy's rule
+ new_rule = {"maxDays": {"maxDays": 3.0}} # New rule with 3 days retention
+ assert new_rule != initial_rule, "New rule should be different from initial"
+
+ resp = await gql_client.execute(
+ self.CRUD,
+ operation_name="Update",
+ variables={
+ "input": {
+ "id": default_policy_gid,
+ "rule": new_rule,
+ }
+ },
+ )
+
+ # Verify the rule update was successful
+ assert not resp.errors
+ assert resp.data
+ updated_policy = resp.data["patchProjectTraceRetentionPolicy"]["node"]
+ assert updated_policy["name"] == "Default" # Name should remain unchanged
+ assert updated_policy["cronExpression"] == new_cron # Cron should remain updated
+ assert updated_policy["rule"] == {"maxDays": 3.0} # Rule should be updated
+
+ # Verify all changes are persisted in the database
+ del default_policy
+ async with db() as session:
+ stmt = sa.select(models.ProjectTraceRetentionPolicy).filter_by(
+ id=DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+ )
+ default_policy = await session.scalar(stmt)
+ assert default_policy is not None
+ assert default_policy.name == "Default"
+ assert default_policy.cron_expression.root == new_cron
+ assert isinstance(default_policy.rule.root, MaxDaysRule)
+ assert default_policy.rule.root.max_days == 3.0
diff --git a/tests/unit/server/api/mutations/test_span_annotation_mutations.py b/tests/unit/server/api/mutations/test_span_annotation_mutations.py
new file mode 100644
index 0000000000..1bb79c7e08
--- /dev/null
+++ b/tests/unit/server/api/mutations/test_span_annotation_mutations.py
@@ -0,0 +1,213 @@
+import datetime
+from typing import Any
+
+import pytest
+from strawberry.relay.types import GlobalID
+
+from phoenix.db import models
+from phoenix.server.api.types.AnnotationSource import AnnotationSource
+from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
+from phoenix.server.types import DbSessionFactory
+from tests.unit.graphql import AsyncGraphQLClient
+
+
+@pytest.fixture(autouse=True)
+async def span_data(db: DbSessionFactory) -> None:
+ async with db() as session:
+ project = models.Project(name="default")
+ session.add(project)
+ await session.flush()
+
+ trace = models.Trace(
+ project_rowid=project.id,
+ trace_id="trace-1",
+ start_time=datetime.datetime.now(),
+ end_time=datetime.datetime.now(),
+ )
+ session.add(trace)
+ await session.flush()
+
+ # create two spans for tests (rowid=1 and rowid=2)
+ span1 = models.Span(
+ trace_rowid=trace.id,
+ span_id="span1",
+ name="span1",
+ span_kind="internal",
+ start_time=datetime.datetime.now(),
+ end_time=datetime.datetime.now(),
+ attributes={},
+ events=[],
+ status_code="OK",
+ status_message="",
+ cumulative_error_count=0,
+ cumulative_llm_token_count_prompt=0,
+ cumulative_llm_token_count_completion=0,
+ )
+ session.add(span1)
+
+ span2 = models.Span(
+ trace_rowid=trace.id,
+ span_id="span2",
+ name="span2",
+ span_kind="internal",
+ start_time=datetime.datetime.now(),
+ end_time=datetime.datetime.now(),
+ attributes={},
+ events=[],
+ status_code="OK",
+ status_message="",
+ cumulative_error_count=0,
+ cumulative_llm_token_count_prompt=0,
+ cumulative_llm_token_count_completion=0,
+ )
+ session.add(span2)
+
+ await session.commit()
+
+
+class TestSpanAnnotationMutations:
+ CREATE_SPAN_ANNOTATIONS_MUTATION = """
+ mutation CreateSpanAnnotations($input: [CreateSpanAnnotationInput!]!) {
+ createSpanAnnotations(input: $input) {
+ spanAnnotations {
+ id
+ name
+ label
+ score
+ explanation
+ identifier
+ }
+ }
+ }
+ """
+
+ @pytest.mark.parametrize(
+ "variables",
+ [
+ pytest.param(
+ {
+ "input": [
+ {
+ "spanId": str(GlobalID("Span", "1")),
+ "name": "test_annotation",
+ "label": "LABEL1",
+ "score": 0.75,
+ "explanation": "Initial explanation",
+ "annotatorKind": AnnotatorKind.HUMAN.name,
+ "metadata": {},
+ "identifier": None,
+ "source": AnnotationSource.API.name,
+ }
+ ]
+ },
+ id="create-basic",
+ ),
+ ],
+ )
+ async def test_create_new_annotation_succeeds(
+ self,
+ db: DbSessionFactory,
+ gql_client: AsyncGraphQLClient,
+ variables: dict[str, Any],
+ ) -> None:
+ result = await gql_client.execute(self.CREATE_SPAN_ANNOTATIONS_MUTATION, variables)
+ assert not result.errors
+ assert result.data is not None
+ data = result.data["createSpanAnnotations"]["spanAnnotations"][0]
+ expected = variables["input"][0]
+ assert data["name"] == expected["name"]
+ assert data["label"] == expected["label"]
+ assert data["score"] == expected["score"]
+ assert data["explanation"] == expected["explanation"]
+ assert data["identifier"] is None
+ assert isinstance(data["id"], str)
+
+ async def test_upsert_on_conflict_updates_existing(
+ self,
+ db: DbSessionFactory,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ # Initial creation
+ span_gid = str(GlobalID("Span", "2"))
+ base_input = {
+ "spanId": span_gid,
+ "name": "conflict_test",
+ "label": "FIRST_LABEL",
+ "score": 1.0,
+ "explanation": "First",
+ "annotatorKind": AnnotatorKind.HUMAN.name,
+ "metadata": {},
+ "identifier": "conflict",
+ "source": AnnotationSource.APP.name,
+ }
+ variables1 = {"input": [base_input]}
+ res1 = await gql_client.execute(self.CREATE_SPAN_ANNOTATIONS_MUTATION, variables1)
+ assert not res1.errors
+ ann1 = res1.data["createSpanAnnotations"]["spanAnnotations"][0] # type: ignore
+ id1 = ann1["id"]
+
+ # Upsert with updated fields
+ updated_input = base_input.copy()
+ updated_input.update(
+ {
+ "label": "UPDATED_LABEL",
+ "score": 2.0,
+ "explanation": "Updated explanation",
+ }
+ )
+ variables2 = {"input": [updated_input]}
+ res2 = await gql_client.execute(self.CREATE_SPAN_ANNOTATIONS_MUTATION, variables2)
+ assert not res2.errors
+ ann2 = res2.data["createSpanAnnotations"]["spanAnnotations"][0] # type: ignore
+ id2 = ann2["id"]
+
+ # IDs should match and values updated
+ assert id1 == id2
+ assert ann2["label"] == "UPDATED_LABEL"
+ assert ann2["score"] == 2.0
+ assert ann2["explanation"] == "Updated explanation"
+
+ async def test_upsert_on_conflict_updates_existing_with_no_identifier(
+ self,
+ db: DbSessionFactory,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ # Initial creation
+ span_gid = str(GlobalID("Span", "2"))
+ base_input = {
+ "spanId": span_gid,
+ "name": "conflict_test",
+ "label": "FIRST_LABEL",
+ "score": 1.0,
+ "explanation": "First",
+ "annotatorKind": AnnotatorKind.HUMAN.name,
+ "metadata": {},
+ "identifier": None, # Missing identifiers will upsert for backwards compatibility
+ "source": AnnotationSource.APP.name,
+ }
+ variables1 = {"input": [base_input]}
+ res1 = await gql_client.execute(self.CREATE_SPAN_ANNOTATIONS_MUTATION, variables1)
+ assert not res1.errors
+ ann1 = res1.data["createSpanAnnotations"]["spanAnnotations"][0] # type: ignore
+ id1 = ann1["id"]
+
+ # Upsert with updated fields
+ updated_input = base_input.copy()
+ updated_input.update(
+ {
+ "label": "UPDATED_LABEL",
+ "score": 2.0,
+ "explanation": "Updated explanation",
+ }
+ )
+ variables2 = {"input": [updated_input]}
+ res2 = await gql_client.execute(self.CREATE_SPAN_ANNOTATIONS_MUTATION, variables2)
+ assert not res2.errors
+ ann2 = res2.data["createSpanAnnotations"]["spanAnnotations"][0] # type: ignore
+ id2 = ann2["id"]
+
+ # IDs should match and values updated
+ assert id1 == id2
+ assert ann2["label"] == "UPDATED_LABEL"
+ assert ann2["score"] == 2.0
+ assert ann2["explanation"] == "Updated explanation"
diff --git a/tests/unit/server/api/routers/v1/test_annotation_configs.py b/tests/unit/server/api/routers/v1/test_annotation_configs.py
new file mode 100644
index 0000000000..df4b6eee70
--- /dev/null
+++ b/tests/unit/server/api/routers/v1/test_annotation_configs.py
@@ -0,0 +1,505 @@
+from copy import deepcopy
+from typing import Any, Optional
+
+import pytest
+from httpx import AsyncClient
+from starlette.status import (
+ HTTP_200_OK,
+ HTTP_400_BAD_REQUEST,
+ HTTP_404_NOT_FOUND,
+ HTTP_409_CONFLICT,
+)
+from strawberry.relay.types import GlobalID
+
+from phoenix.db import models
+from phoenix.db.types.annotation_configs import (
+ AnnotationType,
+ CategoricalAnnotationConfig,
+ CategoricalAnnotationValue,
+ OptimizationDirection,
+)
+from phoenix.server.types import DbSessionFactory
+
+
+@pytest.mark.parametrize(
+ "create_config,update_config",
+ [
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.CATEGORICAL.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ {
+ "name": "updated-config-name",
+ "type": AnnotationType.CATEGORICAL.value,
+ "description": "Updated description",
+ "optimization_direction": OptimizationDirection.MINIMIZE.value,
+ "values": [
+ {"label": "Excellent", "score": 1.0},
+ {"label": "Poor", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "lower_bound": 0.0,
+ "upper_bound": 100.0,
+ },
+ {
+ "name": "updated-config-name",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "Updated description",
+ "optimization_direction": OptimizationDirection.MINIMIZE.value,
+ "lower_bound": -10.0,
+ "upper_bound": 10.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "Test description",
+ },
+ {
+ "name": "updated-config-name",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "Updated description",
+ },
+ id="freeform",
+ ),
+ ],
+)
+async def test_crud_operations(
+ httpx_client: AsyncClient,
+ create_config: dict[str, Any],
+ update_config: dict[str, Any],
+) -> None:
+ # Create a categorical annotation config
+ create_response = await httpx_client.post(
+ "/v1/annotation_configs",
+ json=create_config,
+ )
+ assert create_response.status_code == HTTP_200_OK
+ created_config = create_response.json()["data"]
+ config_id = created_config["id"]
+
+ expected_config = create_config
+ expected_config["id"] = config_id
+ assert created_config == expected_config
+
+ # List annotation configs
+ list_response = await httpx_client.get("/v1/annotation_configs")
+ assert list_response.status_code == HTTP_200_OK
+ configs = list_response.json()["data"]
+ assert len(configs) == 1
+ assert configs[0] == created_config
+
+ # Get config by ID
+ get_response = await httpx_client.get(f"/v1/annotation_configs/{config_id}")
+ assert get_response.status_code == HTTP_200_OK
+ assert get_response.json()["data"] == created_config
+
+ # Get config by name
+ get_by_name_response = await httpx_client.get("/v1/annotation_configs/config-name")
+ assert get_by_name_response.status_code == HTTP_200_OK
+ assert get_by_name_response.json()["data"] == created_config
+
+ # Update the annotation config
+ update_response = await httpx_client.put(
+ f"/v1/annotation_configs/{config_id}",
+ json=update_config,
+ )
+ assert update_response.status_code == HTTP_200_OK
+ updated_config = update_response.json()["data"]
+ expected_updated_config = update_config
+ expected_updated_config["id"] = config_id
+ assert updated_config == expected_updated_config
+
+ # Delete the annotation config
+ delete_response = await httpx_client.delete(f"/v1/annotation_configs/{config_id}")
+ assert delete_response.status_code == HTTP_200_OK
+ assert delete_response.json()["data"] == expected_updated_config
+
+ # Verify the config is deleted by listing
+ list_response = await httpx_client.get("/v1/annotation_configs")
+ assert list_response.status_code == HTTP_200_OK
+ configs = list_response.json()["data"]
+ assert len(configs) == 0
+
+ # Verify the config is deleted by getting
+ get_response = await httpx_client.get(f"/v1/annotation_configs/{config_id}")
+ assert get_response.status_code == HTTP_404_NOT_FOUND
+
+
+@pytest.mark.parametrize(
+ "annotation_config",
+ [
+ pytest.param(
+ {
+ "name": "note",
+ "type": AnnotationType.CATEGORICAL.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "note",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "lower_bound": 0.0,
+ "upper_bound": 1.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "note",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "Test description",
+ },
+ id="freeform",
+ ),
+ ],
+)
+async def test_cannot_create_annotation_config_with_reserved_name_for_notes(
+ httpx_client: AsyncClient,
+ annotation_config: dict[str, Any],
+) -> None:
+ response = await httpx_client.post("/v1/annotation_configs", json=annotation_config)
+ assert response.status_code == HTTP_409_CONFLICT
+ assert "The name 'note' is reserved" in response.text
+
+
+@pytest.mark.parametrize(
+ "annotation_config",
+ [
+ pytest.param(
+ {
+ "name": "test-config",
+ "type": AnnotationType.CATEGORICAL.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "test-config",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "lower_bound": 0.0,
+ "upper_bound": 1.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "test-config",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "Test description",
+ },
+ id="freeform",
+ ),
+ ],
+)
+async def test_cannot_update_annotation_config_name_to_reserved_name_for_notes(
+ httpx_client: AsyncClient,
+ annotation_config: dict[str, Any],
+) -> None:
+ # First create a config
+ response = await httpx_client.post("/v1/annotation_configs", json=annotation_config)
+ assert response.status_code == HTTP_200_OK
+ config_id = response.json()["data"]["id"]
+
+ # Try to update the name to "note"
+ update_config = deepcopy(annotation_config)
+ update_config["name"] = "note"
+ response = await httpx_client.put(f"/v1/annotation_configs/{config_id}", json=update_config)
+ assert response.status_code == HTTP_409_CONFLICT
+ assert "The name 'note' is reserved" in response.text
+
+
+@pytest.mark.parametrize(
+ "annotation_config",
+ [
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.CATEGORICAL.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "Test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "lower_bound": 0.0,
+ "upper_bound": 1.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "Test description",
+ },
+ id="freeform",
+ ),
+ ],
+)
+async def test_cannot_create_annotation_config_with_duplicate_name(
+ httpx_client: AsyncClient,
+ annotation_config: dict[str, Any],
+) -> None:
+ response = await httpx_client.post("/v1/annotation_configs", json=annotation_config)
+ assert response.status_code == HTTP_200_OK
+
+ # Try to create another config with same name
+ response = await httpx_client.post("/v1/annotation_configs", json=annotation_config)
+ assert response.status_code == HTTP_409_CONFLICT
+ assert "name of the annotation configuration is already taken" in response.text
+
+
+async def test_create_categorical_config_with_empty_values_returns_expected_error(
+ httpx_client: AsyncClient,
+) -> None:
+ config = {
+ "name": "test_categorical",
+ "type": AnnotationType.CATEGORICAL.value,
+ "optimization_direction": OptimizationDirection.NONE.value,
+ "values": [], # empty values are disallowed
+ }
+ response = await httpx_client.post("/v1/annotation_configs", json=config)
+ assert response.status_code == HTTP_400_BAD_REQUEST
+ assert "Values must be non-empty" in response.text
+
+
+@pytest.mark.parametrize(
+ "config",
+ [
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.CATEGORICAL.value,
+ "description": "config description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "values": [
+ {"label": "Good", "score": 1.0},
+ {"label": "Bad", "score": 0.0},
+ ],
+ },
+ id="categorical",
+ ),
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "config description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "lower_bound": 0.0,
+ "upper_bound": 1.0,
+ },
+ id="continuous",
+ ),
+ pytest.param(
+ {
+ "name": "config-name",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "config description",
+ },
+ id="freeform",
+ ),
+ ],
+)
+async def test_updated_annotation_config_name_cannot_collide_with_existing_config_name(
+ httpx_client: AsyncClient,
+ config: dict[str, Any],
+) -> None:
+ # First create first config
+ first_config = {
+ "name": "collide-config-name",
+ "type": AnnotationType.FREEFORM.value,
+ "description": "config description",
+ }
+ response = await httpx_client.post("/v1/annotation_configs", json=first_config)
+ assert response.status_code == HTTP_200_OK
+
+ # Create second config
+ response = await httpx_client.post("/v1/annotation_configs", json=config)
+ assert response.status_code == HTTP_200_OK
+ config_id = response.json()["data"]["id"]
+
+ # Try to update second config name to collide with first
+ update_config = config.copy()
+ update_config["name"] = "collide-config-name"
+ response = await httpx_client.put(f"/v1/annotation_configs/{config_id}", json=update_config)
+ assert response.status_code == HTTP_409_CONFLICT
+ assert "name of the annotation configuration is already taken" in response.text
+
+
+async def test_update_continuous_annotation_config_with_invalid_bounds_returns_expected_error(
+ httpx_client: AsyncClient,
+) -> None:
+ # First create a valid continuous config
+ config = {
+ "name": "test-config",
+ "type": AnnotationType.CONTINUOUS.value,
+ "description": "test description",
+ "optimization_direction": OptimizationDirection.MAXIMIZE.value,
+ "lower_bound": 0.0,
+ "upper_bound": 1.0,
+ }
+ response = await httpx_client.post("/v1/annotation_configs", json=config)
+ assert response.status_code == HTTP_200_OK
+ config_id = response.json()["data"]["id"]
+
+ # Try to update with invalid bounds
+ update_config = config.copy()
+ update_config["lower_bound"] = 1.0
+ update_config["upper_bound"] = 0.0
+
+ response = await httpx_client.put(f"/v1/annotation_configs/{config_id}", json=update_config)
+ assert response.status_code == HTTP_400_BAD_REQUEST
+ assert "Lower bound must be strictly less than upper bound" in response.text
+
+
+@pytest.fixture
+async def annotation_configs(db: DbSessionFactory) -> list[models.AnnotationConfig]:
+ """
+ Creates five annotation configs.
+ """
+ configs = []
+ async with db() as session:
+ for index in range(5):
+ config = models.AnnotationConfig(
+ name=f"config-name-{index}",
+ config=CategoricalAnnotationConfig(
+ type=AnnotationType.CATEGORICAL.value,
+ description=f"config-description-{index}",
+ optimization_direction=OptimizationDirection.MAXIMIZE,
+ values=[
+ CategoricalAnnotationValue(label="Good", score=1.0),
+ CategoricalAnnotationValue(label="Bad", score=0.0),
+ ],
+ ),
+ )
+ session.add(config)
+ configs.append(config)
+ await session.flush()
+ return configs
+
+
+@pytest.mark.parametrize(
+ "limit,expected_page_size,expected_next_cursor",
+ [
+ pytest.param(
+ 4,
+ 4,
+ str(GlobalID("CategoricalAnnotationConfig", str(1))),
+ id="page_size_less_than_total_has_next_cursor",
+ ),
+ pytest.param(
+ 5,
+ 5,
+ None,
+ id="page_size_equals_total_no_next_cursor",
+ ),
+ pytest.param(
+ 6,
+ 5,
+ None,
+ id="page_size_greater_than_total_no_next_cursor",
+ ),
+ ],
+)
+async def test_list_annotation_configs_pagination_without_cursor(
+ httpx_client: AsyncClient,
+ annotation_configs: list[models.AnnotationConfig],
+ limit: int,
+ expected_page_size: int,
+ expected_next_cursor: Optional[str],
+) -> None:
+ response = await httpx_client.get(f"/v1/annotation_configs?limit={limit}")
+ assert response.status_code == HTTP_200_OK
+ data = response.json()
+ assert len(data["data"]) == expected_page_size
+ assert data["next_cursor"] == expected_next_cursor
+
+
+@pytest.mark.parametrize(
+ "limit,expected_page_size,expected_next_cursor",
+ [
+ pytest.param(
+ 2,
+ 2,
+ str(GlobalID("CategoricalAnnotationConfig", str(1))),
+ id="page_size_less_than_remaining_has_next_cursor",
+ ),
+ pytest.param(
+ 3,
+ 3,
+ None,
+ id="page_size_equals_remaining_no_next_cursor",
+ ),
+ pytest.param(
+ 4,
+ 3,
+ None,
+ id="page_size_greater_than_remaining_no_next_cursor",
+ ),
+ ],
+)
+async def test_list_annotation_configs_pagination_with_cursor(
+ httpx_client: AsyncClient,
+ annotation_configs: list[models.AnnotationConfig],
+ limit: int,
+ expected_page_size: int,
+ expected_next_cursor: Optional[str],
+) -> None:
+ # First get first page
+ first_response = await httpx_client.get("/v1/annotation_configs?limit=2")
+ assert first_response.status_code == HTTP_200_OK
+ first_data = first_response.json()
+ assert len(first_data["data"]) == 2
+ cursor = first_data["next_cursor"]
+ assert cursor is not None
+
+ # Then get second page using cursor
+ response = await httpx_client.get(f"/v1/annotation_configs?limit={limit}&cursor={cursor}")
+ assert response.status_code == HTTP_200_OK
+ data = response.json()
+ assert len(data["data"]) == expected_page_size
+ assert data["next_cursor"] == expected_next_cursor
diff --git a/tests/unit/server/api/routers/v1/test_traces.py b/tests/unit/server/api/routers/v1/test_traces.py
index bf6e3bdc80..344a4cf870 100644
--- a/tests/unit/server/api/routers/v1/test_traces.py
+++ b/tests/unit/server/api/routers/v1/test_traces.py
@@ -78,6 +78,7 @@ async def test_rest_trace_annotation(
"explanation": "This is a test annotation.",
},
"metadata": {},
+ "identifier": "identifier-name",
}
]
}
@@ -98,3 +99,6 @@ async def test_rest_trace_annotation(
assert orm_annotation.score == 0.95
assert orm_annotation.explanation == "This is a test annotation."
assert orm_annotation.metadata_ == dict()
+ assert orm_annotation.identifier == "identifier-name"
+ assert orm_annotation.source == "APP"
+ assert orm_annotation.user_id is None
diff --git a/tests/unit/server/api/types/test_Project.py b/tests/unit/server/api/types/test_Project.py
index 5b593aa6af..58d27e1e05 100644
--- a/tests/unit/server/api/types/test_Project.py
+++ b/tests/unit/server/api/types/test_Project.py
@@ -1080,6 +1080,9 @@ async def llama_index_rag_spans(db: DbSessionFactory) -> None:
"annotator_kind": "LLM",
"created_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
"updated_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
+ "identifier": "",
+ "source": "APP",
+ "user_id": None,
},
{
"span_rowid": span_rowids[5],
@@ -1091,6 +1094,9 @@ async def llama_index_rag_spans(db: DbSessionFactory) -> None:
"annotator_kind": "LLM",
"created_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
"updated_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
+ "identifier": "",
+ "source": "APP",
+ "user_id": None,
},
{
"span_rowid": span_rowids[10],
@@ -1102,6 +1108,9 @@ async def llama_index_rag_spans(db: DbSessionFactory) -> None:
"annotator_kind": "LLM",
"created_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
"updated_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
+ "identifier": "",
+ "source": "APP",
+ "user_id": None,
},
{
"span_rowid": span_rowids[0],
@@ -1113,6 +1122,9 @@ async def llama_index_rag_spans(db: DbSessionFactory) -> None:
"annotator_kind": "LLM",
"created_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
"updated_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
+ "identifier": "",
+ "source": "APP",
+ "user_id": None,
},
{
"span_rowid": span_rowids[5],
@@ -1124,6 +1136,9 @@ async def llama_index_rag_spans(db: DbSessionFactory) -> None:
"annotator_kind": "LLM",
"created_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
"updated_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
+ "identifier": "",
+ "source": "APP",
+ "user_id": None,
},
{
"span_rowid": span_rowids[10],
@@ -1135,6 +1150,9 @@ async def llama_index_rag_spans(db: DbSessionFactory) -> None:
"annotator_kind": "LLM",
"created_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
"updated_at": datetime.fromisoformat("2024-05-20T01:42:11+00:00"),
+ "identifier": "",
+ "source": "APP",
+ "user_id": None,
},
],
)
diff --git a/tests/unit/server/api/types/test_ProjectTraceRetentionPolicy.py b/tests/unit/server/api/types/test_ProjectTraceRetentionPolicy.py
new file mode 100644
index 0000000000..5a0fbc2395
--- /dev/null
+++ b/tests/unit/server/api/types/test_ProjectTraceRetentionPolicy.py
@@ -0,0 +1,174 @@
+from dataclasses import dataclass, field
+from secrets import token_hex
+
+import pytest
+from strawberry.relay import GlobalID
+
+from phoenix.db import models
+from phoenix.db.constants import DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID
+from phoenix.db.types.trace_retention import (
+ MaxCountRule,
+ TraceRetentionCronExpression,
+ TraceRetentionRule,
+)
+from phoenix.server.api.types.Project import Project
+from phoenix.server.api.types.ProjectTraceRetentionPolicy import ProjectTraceRetentionPolicy
+from phoenix.server.types import DbSessionFactory
+from tests.unit.graphql import AsyncGraphQLClient
+
+
+@dataclass
+class _Data:
+ """Data class to hold test data for projects and their trace retention policies."""
+
+ projects: list[models.Project] = field(default_factory=list)
+ project_trace_retention_policies: list[models.ProjectTraceRetentionPolicy] = field(
+ default_factory=list
+ )
+
+
+class TestProjectTraceRetentionPolicy:
+ """Test suite for ProjectTraceRetentionPolicy GraphQL type and its relationships."""
+
+ @pytest.fixture
+ async def _data(
+ self,
+ db: DbSessionFactory,
+ ) -> _Data:
+ """
+ Fixture that creates test data including:
+ - One project with default trace retention policy
+ - One custom trace retention policy
+ - Two projects using the custom trace retention policy
+ """
+ projects: list[models.Project] = []
+ project_trace_retention_policies: list[models.ProjectTraceRetentionPolicy] = []
+ async with db() as session:
+ # Create a project with default trace retention policy
+ projects.append(models.Project(name=token_hex(8)))
+
+ # Create a custom trace retention policy
+ project_trace_retention_policies.append(
+ models.ProjectTraceRetentionPolicy(
+ name=token_hex(8),
+ cron_expression=TraceRetentionCronExpression(
+ root="0 1 * * 1"
+ ), # Run weekly on Monday at 1 AM
+ rule=TraceRetentionRule(root=MaxCountRule(max_count=1)), # Keep only 1 trace
+ )
+ )
+ session.add(project_trace_retention_policies[-1])
+ await session.flush()
+
+ # Create two projects using the custom trace retention policy
+ for _ in range(2):
+ projects.append(
+ models.Project(
+ name=token_hex(8),
+ trace_retention_policy_id=project_trace_retention_policies[-1].id,
+ )
+ )
+ session.add_all(projects)
+ await session.flush()
+ return _Data(
+ projects=projects,
+ project_trace_retention_policies=project_trace_retention_policies,
+ )
+
+ async def test_projects(
+ self,
+ _data: _Data,
+ gql_client: AsyncGraphQLClient,
+ ) -> None:
+ """
+ Test the GraphQL queries for:
+ 1. Project -> TraceRetentionPolicy relationship
+ 2. ProjectTraceRetentionPolicy -> Projects relationship
+ """
+ # GraphQL query to test both directions of the relationship
+ node_query = """query ($id: GlobalID!) {
+ node(id: $id) {
+ ... on Project {
+ traceRetentionPolicy {
+ id
+ }
+ }
+ ... on ProjectTraceRetentionPolicy {
+ projects {
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ }
+ }"""
+
+ # Test: Project with default trace retention policy
+ resp = await gql_client.execute(
+ node_query,
+ variables={
+ "id": str(GlobalID(Project.__name__, str(_data.projects[0].id))),
+ },
+ )
+ assert not resp.errors
+ assert resp.data
+ assert resp.data["node"]["traceRetentionPolicy"]["id"] == str(
+ GlobalID(
+ ProjectTraceRetentionPolicy.__name__,
+ str(DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID),
+ )
+ )
+
+ # Test: Projects associated with default trace retention policy
+ resp = await gql_client.execute(
+ node_query,
+ variables={
+ "id": str(
+ GlobalID(
+ ProjectTraceRetentionPolicy.__name__,
+ str(DEFAULT_PROJECT_TRACE_RETENTION_POLICY_ID),
+ )
+ )
+ },
+ )
+ assert not resp.errors
+ assert resp.data
+ assert sorted(e["node"]["id"] for e in resp.data["node"]["projects"]["edges"]) == sorted(
+ str(GlobalID(Project.__name__, str(p.id))) for p in _data.projects[:1]
+ )
+
+ # Test: Project with custom trace retention policy
+ resp = await gql_client.execute(
+ node_query,
+ variables={
+ "id": str(GlobalID(Project.__name__, str(_data.projects[1].id))),
+ },
+ )
+ assert not resp.errors
+ assert resp.data
+ assert resp.data["node"]["traceRetentionPolicy"]["id"] == str(
+ GlobalID(
+ ProjectTraceRetentionPolicy.__name__,
+ str(_data.project_trace_retention_policies[0].id),
+ )
+ )
+
+ # Test: Projects associated with a trace retention policy
+ resp = await gql_client.execute(
+ node_query,
+ variables={
+ "id": str(
+ GlobalID(
+ ProjectTraceRetentionPolicy.__name__,
+ str(_data.project_trace_retention_policies[0].id),
+ )
+ )
+ },
+ )
+ assert not resp.errors
+ assert resp.data
+ assert sorted(e["node"]["id"] for e in resp.data["node"]["projects"]["edges"]) == sorted(
+ str(GlobalID(Project.__name__, str(p.id))) for p in _data.projects[1:]
+ )
diff --git a/tests/unit/server/api/types/test_Span.py b/tests/unit/server/api/types/test_Span.py
index 667a77df0e..26edc77c1b 100644
--- a/tests/unit/server/api/types/test_Span.py
+++ b/tests/unit/server/api/types/test_Span.py
@@ -550,3 +550,343 @@ async def simple_dataset(
)
session.add(example_0_revision_0)
await session.flush()
+
+
+@pytest.mark.parametrize(
+ "filter_config,expected_summary_count,expected_summary_name,expected_mean_score,expected_label_fractions",
+ [
+ # Test case 1: No filter
+ pytest.param(
+ None, # No filter
+ 2, # Expect both Hallucination and Relevance summaries
+ "Hallucination", # Check the Hallucination summary
+ 0.55, # Mean score: (0.0 + 1.0 + 0.5 + 0.7) / 4 = 0.55
+ [
+ {"label": "factual", "fraction": 0.75}, # 3 out of 4 annotations with labels
+ {"label": "hallucinated", "fraction": 0.25}, # 1 out of 4 annotations with labels
+ ],
+ id="no-filter",
+ ),
+ # Test case 2: Filter by name (include)
+ pytest.param(
+ {"include": {"names": ["Hallucination"]}}, # Only include Hallucination annotations
+ 1, # Expect only Hallucination summary
+ "Hallucination", # Check the Hallucination summary
+ 0.55, # Mean score: (0.0 + 1.0 + 0.5 + 0.7) / 4 = 0.55
+ [
+ {"label": "factual", "fraction": 0.75}, # 3 out of 4 annotations with labels
+ {"label": "hallucinated", "fraction": 0.25}, # 1 out of 4 annotations with labels
+ ],
+ id="filter-by-name-include",
+ ),
+ # Test case 3: Filter by name (exclude)
+ pytest.param(
+ {"exclude": {"names": ["Relevance"]}}, # Exclude Relevance annotations
+ 1, # Expect only Hallucination summary
+ "Hallucination", # Check the Hallucination summary
+ 0.55, # Mean score: (0.0 + 1.0 + 0.5 + 0.7) / 4 = 0.55
+ [
+ {"label": "factual", "fraction": 0.75}, # 3 out of 4 annotations with labels
+ {"label": "hallucinated", "fraction": 0.25}, # 1 out of 4 annotations with labels
+ ],
+ id="filter-by-name-exclude",
+ ),
+ # Test case 4: Check Relevance summary
+ pytest.param(
+ None, # No filter
+ 2, # Expect both summaries
+ "Relevance", # Check the Relevance summary
+ 0.8, # Mean score: (0.8 + 0.7 + 0.9) / 3 = 0.8
+ [
+ {"label": "high", "fraction": 1 / 3}, # 1 out of 3 annotations with labels
+ {"label": "low", "fraction": 1 / 3}, # 1 out of 3 annotations with labels
+ {"label": "medium", "fraction": 1 / 3}, # 1 out of 3 annotations with labels
+ ],
+ id="check-relevance-summary",
+ ),
+ ],
+)
+async def test_span_annotation_summaries(
+ gql_client: AsyncGraphQLClient,
+ spans_with_annotations: None,
+ filter_config: Optional[dict[str, Any]],
+ expected_summary_count: int,
+ expected_summary_name: str,
+ expected_mean_score: float,
+ expected_label_fractions: list[dict[str, Any]],
+) -> None:
+ """
+ Test the span_annotation_summaries field with various filter configurations.
+
+ This test verifies that the span_annotation_summaries field correctly:
+ 1. Returns the expected number of summaries based on the filter
+ 2. Calculates mean scores correctly, handling null scores
+ 3. Calculates label fractions correctly, handling null labels
+
+ The test uses a fixture that creates a span with multiple annotations:
+ - 5 Hallucination annotations (3 factual, 1 hallucinated, 1 with null label)
+ - 4 Relevance annotations (1 high, 1 low, 1 medium, 1 with null label)
+
+ Args:
+ gql_client: The GraphQL client for making queries
+ spans_with_annotations: Fixture that creates test data
+ filter_config: Optional filter configuration for the query
+ expected_summary_count: Expected number of summaries returned
+ expected_summary_name: Name of the summary to check
+ expected_mean_score: Expected mean score for the summary
+ expected_label_fractions: Expected label fractions for the summary
+ """
+ # Build the filter part of the query if a filter config is provided
+ filter_arg = ""
+ if filter_config:
+ filter_parts = []
+ if "include" in filter_config:
+ include = filter_config["include"]
+ if "names" in include:
+ filter_parts.append(f'include: {{ names: {json.dumps(include["names"])} }}')
+ if "exclude" in filter_config:
+ exclude = filter_config["exclude"]
+ if "names" in exclude:
+ filter_parts.append(f'exclude: {{ names: {json.dumps(exclude["names"])} }}')
+ if filter_parts:
+ filter_arg = f'(filter: {{ {", ".join(filter_parts)} }})'
+
+ query = f"""
+ query ($spanId: GlobalID!) {{
+ span: node(id: $spanId) {{
+ ... on Span {{
+ spanAnnotationSummaries{filter_arg} {{
+ name
+ meanScore
+ labelFractions {{
+ label
+ fraction
+ }}
+ }}
+ }}
+ }}
+ }}
+ """ # noqa: E501
+ span_id = str(GlobalID(Span.__name__, str(1)))
+ response = await gql_client.execute(
+ query,
+ variables={"spanId": span_id},
+ )
+ assert not response.errors, f"GraphQL query returned errors: {response.errors}" # noqa: E501
+ data = response.data
+ assert data is not None, "GraphQL response data is None" # noqa: E501
+ span = data["span"]
+ assert span is not None, "GraphQL response span is None" # noqa: E501
+ summaries = span["spanAnnotationSummaries"]
+ assert (
+ len(summaries) == expected_summary_count
+ ), f"Expected {expected_summary_count} summaries, got {len(summaries)}" # noqa: E501
+
+ # Find the summary with the expected name
+ summary = next((s for s in summaries if s["name"] == expected_summary_name), None)
+ assert summary is not None, f"Summary with name {expected_summary_name} not found"
+
+ # Use a small tolerance for floating-point comparison
+ assert (
+ abs(summary["meanScore"] - expected_mean_score) < 1e-10
+ ), f"Expected mean score {expected_mean_score}, got {summary['meanScore']}" # noqa: E501
+
+ # Check label fractions
+ label_fractions = summary["labelFractions"]
+ assert len(label_fractions) == len(expected_label_fractions), (
+ f"Expected {len(expected_label_fractions)} label fractions, " f"got {len(label_fractions)}" # noqa: E501
+ )
+
+ # Sort both lists by label to ensure consistent comparison
+ label_fractions.sort(key=lambda x: x["label"])
+ expected_label_fractions.sort(key=lambda x: x["label"])
+
+ for actual, expected in zip(label_fractions, expected_label_fractions):
+ assert (
+ actual["label"] == expected["label"]
+ ), f"Expected label {expected['label']}, got {actual['label']}"
+ assert abs(actual["fraction"] - expected["fraction"]) < 1e-10, (
+ f"Expected fraction {expected['fraction']} for label {actual['label']}, "
+ f"got {actual['fraction']}"
+ )
+
+
+@pytest.fixture
+async def spans_with_annotations(
+ db: DbSessionFactory,
+) -> None:
+ """
+ Creates a project with a trace and a span, and adds annotations to the span.
+
+ This fixture sets up test data with the following structure:
+ 1. Creates a project named "test-project"
+ 2. Creates a trace with ID "test-trace-id"
+ 3. Creates a span with ID "test-span-id"
+ 4. Adds 5 Hallucination annotations to the span:
+ - 3 with label="factual" and scores 0.0, 1.0, and None
+ - 1 with label="hallucinated" and score 0.5
+ - 1 with label=None and score 0.7
+ 5. Adds 4 Relevance annotations to the span:
+ - 1 with label="high" and score 0.8
+ - 1 with label="low" and score 0.7
+ - 1 with label="medium" and score None
+ - 1 with label=None and score 0.9
+
+ This data is used to test various aspects of the span_annotation_summaries field,
+ including filtering, mean score calculation, and label fraction calculation.
+
+ Args:
+ db: Database session factory
+
+ Returns:
+ None
+ """
+ async with db() as session:
+ # Create project
+ project = models.Project(name="test-project")
+ session.add(project)
+ await session.flush()
+
+ # Create trace
+ trace = models.Trace(
+ trace_id="test-trace-id",
+ project_rowid=project.id,
+ start_time=datetime.now(timezone.utc),
+ end_time=datetime.now(timezone.utc),
+ )
+ session.add(trace)
+ await session.flush()
+
+ # Create span
+ span = models.Span(
+ trace_rowid=trace.id,
+ span_id="test-span-id",
+ name="test-span",
+ span_kind="LLM",
+ start_time=datetime.now(timezone.utc),
+ end_time=datetime.now(timezone.utc),
+ attributes={},
+ events=[],
+ status_code="OK",
+ status_message="OK",
+ cumulative_error_count=0,
+ cumulative_llm_token_count_prompt=0,
+ cumulative_llm_token_count_completion=0,
+ )
+ session.add(span)
+ await session.flush()
+
+ # Create annotations for the span
+ # Hallucination annotations
+ hallucination_annotations = [
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Hallucination",
+ label="factual",
+ score=0.0,
+ explanation="This is factual",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Hallucination",
+ label="factual",
+ score=1.0,
+ explanation="This is factual",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Hallucination",
+ label="hallucinated",
+ score=0.5,
+ explanation="This is hallucinated",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ # Add an annotation with score=None
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Hallucination",
+ label="factual",
+ score=None,
+ explanation="This is factual but no score provided",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ # Add an annotation with label=None
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Hallucination",
+ label=None,
+ score=0.7,
+ explanation="This has a score but no label",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ ]
+
+ # Relevance annotations
+ relevance_annotations = [
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Relevance",
+ label="high",
+ score=0.8,
+ explanation="This is highly relevant",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Relevance",
+ label="low",
+ score=0.7,
+ explanation="This is less relevant",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ # Add an annotation with label=None
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Relevance",
+ label=None,
+ score=0.9,
+ explanation="This is relevant but no label provided",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ # Add an annotation with score=None
+ models.SpanAnnotation(
+ span_rowid=span.id,
+ name="Relevance",
+ label="medium",
+ score=None,
+ explanation="This is relevant but no score provided",
+ metadata_={},
+ annotator_kind="HUMAN",
+ source="APP",
+ identifier=token_hex(8),
+ ),
+ ]
+
+ # Add all annotations to the session
+ session.add_all(hallucination_annotations + relevance_annotations)
diff --git a/tests/unit/server/api/types/test_SpanAnnotation.py b/tests/unit/server/api/types/test_SpanAnnotation.py
index 5186f218f1..9815fbf074 100644
--- a/tests/unit/server/api/types/test_SpanAnnotation.py
+++ b/tests/unit/server/api/types/test_SpanAnnotation.py
@@ -76,6 +76,8 @@ async def test_annotating_a_span(
score
explanation
metadata
+ identifier
+ source
}
}
}
@@ -90,6 +92,7 @@ async def test_annotating_a_span(
"score": 0.95,
"explanation": "This is a test annotation.",
"metadata": {},
+ "source": "API",
}
]
},
@@ -109,6 +112,9 @@ async def test_annotating_a_span(
assert orm_annotation.score == 0.95
assert orm_annotation.explanation == "This is a test annotation."
assert orm_annotation.metadata_ == dict()
+ assert orm_annotation.identifier == ""
+ assert orm_annotation.source == "API"
+ assert orm_annotation.user_id is None
response = await gql_client.execute(
query="""
@@ -122,6 +128,8 @@ async def test_annotating_a_span(
score
explanation
metadata
+ identifier
+ source
}
}
}
@@ -136,6 +144,7 @@ async def test_annotating_a_span(
"score": 0.95,
"explanation": "Updated explanation",
"metadata": {"updated": True},
+ "identifier": "updated-identifier",
}
]
},
@@ -149,6 +158,9 @@ async def test_annotating_a_span(
assert orm_annotation.label == "Positive"
assert orm_annotation.explanation == "Updated explanation"
assert orm_annotation.metadata_ == {"updated": True}
+ assert orm_annotation.identifier == "updated-identifier"
+ assert orm_annotation.source == "API"
+ assert orm_annotation.user_id is None
response = await gql_client.execute(
query="""
diff --git a/tests/unit/server/api/types/test_TraceAnnotation.py b/tests/unit/server/api/types/test_TraceAnnotation.py
index adaec738ab..623b94d23c 100644
--- a/tests/unit/server/api/types/test_TraceAnnotation.py
+++ b/tests/unit/server/api/types/test_TraceAnnotation.py
@@ -76,6 +76,8 @@ async def test_annotating_a_trace(
score
explanation
metadata
+ identifier
+ source
}
}
}
@@ -90,6 +92,8 @@ async def test_annotating_a_trace(
"score": 0.95,
"explanation": "This is a test annotation.",
"metadata": {},
+ "identifier": None,
+ "source": "API",
}
]
},
@@ -109,6 +113,9 @@ async def test_annotating_a_trace(
assert orm_annotation.score == 0.95
assert orm_annotation.explanation == "This is a test annotation."
assert orm_annotation.metadata_ == dict()
+ assert orm_annotation.identifier == ""
+ assert orm_annotation.source == "API"
+ assert orm_annotation.user_id is None
response = await gql_client.execute(
query="""
@@ -122,6 +129,8 @@ async def test_annotating_a_trace(
score
explanation
metadata
+ identifier
+ source
}
}
}
@@ -136,6 +145,7 @@ async def test_annotating_a_trace(
"score": 0.95,
"explanation": "Updated explanation",
"metadata": {"updated": True},
+ "identifier": "updated-identifier",
}
]
},
@@ -150,6 +160,9 @@ async def test_annotating_a_trace(
assert orm_annotation.label == "Positive"
assert orm_annotation.explanation == "Updated explanation"
assert orm_annotation.metadata_ == {"updated": True}
+ assert orm_annotation.identifier == "updated-identifier"
+ assert orm_annotation.source == "API"
+ assert orm_annotation.user_id is None
response = await gql_client.execute(
query="""
diff --git a/tests/unit/trace/dsl/conftest.py b/tests/unit/trace/dsl/conftest.py
index 3d9442c8bc..0d7b07c341 100644
--- a/tests/unit/trace/dsl/conftest.py
+++ b/tests/unit/trace/dsl/conftest.py
@@ -227,6 +227,9 @@ async def abc_project(db: DbSessionFactory) -> None:
name="0",
score=0,
metadata_={},
+ identifier="",
+ source="APP",
+ user_id=None,
)
)
span_rowid = await session.scalar(
@@ -268,6 +271,9 @@ async def abc_project(db: DbSessionFactory) -> None:
name="0",
score=1,
metadata_={},
+ identifier="",
+ source="APP",
+ user_id=None,
)
)
await session.execute(
@@ -277,6 +283,9 @@ async def abc_project(db: DbSessionFactory) -> None:
name="1",
label="1",
metadata_={},
+ identifier="",
+ source="APP",
+ user_id=None,
)
)
span_rowid = await session.scalar(
@@ -314,5 +323,8 @@ async def abc_project(db: DbSessionFactory) -> None:
name="1",
label="0",
metadata_={},
+ identifier="",
+ source="APP",
+ user_id=None,
)
)
diff --git a/tests/unit/utilities/test_datetime.py b/tests/unit/utilities/test_datetime.py
new file mode 100644
index 0000000000..14f96ac90d
--- /dev/null
+++ b/tests/unit/utilities/test_datetime.py
@@ -0,0 +1,18 @@
+from datetime import datetime
+
+import pytest
+
+from phoenix.utilities import hour_of_week
+
+
+@pytest.mark.parametrize(
+ "dt,expected",
+ [
+ pytest.param(datetime(2023, 1, 1, 0, 0, 0), 0, id="sunday_midnight"),
+ pytest.param(datetime(2023, 1, 1, 12, 0, 0), 12, id="sunday_noon"),
+ pytest.param(datetime(2023, 1, 2, 0, 0, 0), 24, id="monday_midnight"),
+ pytest.param(datetime(2023, 1, 7, 23, 0, 0), 167, id="saturday_last_hour"),
+ ],
+)
+def test_get_hour_of_week(dt: datetime, expected: int) -> None:
+ assert hour_of_week(dt) == expected
diff --git a/tox.ini b/tox.ini
index f488235d4e..a0c13f226d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -16,6 +16,7 @@ commands =
uv pip list -v
pyright .
mypy --strict .
+ pytest {posargs} tests/client
[testenv:phoenix_client_canary_tests_sdk_openai]
description = Run phoenix-client canary tests for third-party SDK: anthropic
@@ -130,7 +131,7 @@ commands_pre =
uv pip install --strict --reinstall-package arize-phoenix-client {toxinidir}/packages/phoenix-client
commands =
uv pip list -v
- pyright -p pyrightconfig.json prompts/
+ pyright -p pyrightconfig.json client/
mypy --strict .
[testenv:integration_tests]
@@ -318,7 +319,7 @@ commands =
uv tool run --from datamodel-code-generator datamodel-codegen \
--input {toxinidir}/schemas/openapi.json \
--input-file-type openapi \
- --output v1/.dataclass.txt \
+ --output v1/.dataclass.py \
--output-model-type dataclasses.dataclass \
--collapse-root-models \
--enum-field-as-literal all \
@@ -328,7 +329,7 @@ commands =
--use-generic-container-types \
--wrap-string-literal \
--disable-timestamp
- python -c "import re; file = 'v1/.dataclass.txt'; lines = [re.sub(r'\\bSequence]', 'Sequence[Any]]', line) for line in open(file).readlines()]; open(file, 'w').writelines(lines)"
+ python -c "import re; file = 'v1/.dataclass.py'; lines = [re.sub(r'\\bSequence]', 'Sequence[Any]]', line) for line in open(file).readlines()]; open(file, 'w').writelines(lines)"
python {toxinidir}/packages/phoenix-client/scripts/codegen/transform.py v1
uv pip install --strict --reinstall-package arize-phoenix-client {toxinidir}/packages/phoenix-client
uv pip list -v
diff --git a/tutorials/demos/demo_llama_index_rag.ipynb b/tutorials/demos/demo_llama_index_rag.ipynb
deleted file mode 100644
index 44c36193c3..0000000000
--- a/tutorials/demos/demo_llama_index_rag.ipynb
+++ /dev/null
@@ -1,679 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Phoenix Notebook: Demo Llama-Index Tracing\n",
- "\n",
- "All tracing fixtures generated from this notebook can be found on GCS under the `arize-phoenix-assets/traces` as `demo_llama_index_rag_(name).parquet`: [link here](https://console.cloud.google.com/storage/browser/arize-phoenix-assets/traces)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "wYET8LC-3koX"
- },
- "source": [
- "## Setup"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Install libraries"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "!pip install -qq arize-phoenix llama-index \"openai>=1\" gcsfs nest_asyncio langchain langchain-community cohere llama-index-postprocessor-cohere-rerank 'httpx<0.28'"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "EejzP1ov3wgw"
- },
- "source": [
- "Set up environment variables and enter API keys\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import os\n",
- "from getpass import getpass\n",
- "\n",
- "if not (openai_api_key := os.getenv(\"OPENAI_API_KEY\")):\n",
- " openai_api_key = getpass(\"🔑 Enter your OpenAI API key: \")\n",
- "os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n",
- "\n",
- "if not (cohere_api_key := os.getenv(\"COHERE_API_KEY\")):\n",
- " cohere_api_key = getpass(\"🔑 Enter your Cohere API key: \")\n",
- "os.environ[\"COHERE_API_KEY\"] = cohere_api_key"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "nrRN-Tur32Qd"
- },
- "source": [
- "## Launch Phoenix and Instrumentation"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import phoenix as px\n",
- "\n",
- "session = px.launch_app()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from openinference.instrumentation.llama_index import LlamaIndexInstrumentor\n",
- "\n",
- "from phoenix.otel import register\n",
- "\n",
- "tracer_provider = register(endpoint=\"http://127.0.0.1:6006/v1/traces\")\n",
- "LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "7OP5ELmg4LeS"
- },
- "source": [
- "## Parse Phoenix Documentation into Llama-Index Documents"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Imports"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import json\n",
- "import logging\n",
- "import sys"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# The nest_asyncio module enables the nesting of asynchronous functions within an already running async loop.\n",
- "# This is necessary because Jupyter notebooks inherently operate in an asynchronous loop.\n",
- "# By applying nest_asyncio, we can run additional async functions within this existing loop without conflicts.\n",
- "import nest_asyncio\n",
- "import pandas as pd\n",
- "from langchain.document_loaders import GitbookLoader\n",
- "from llama_index.core import Document, VectorStoreIndex\n",
- "\n",
- "nest_asyncio.apply()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "PWDCQJN1co7-"
- },
- "source": [
- "Enable Phoenix tracing via `LlamaIndexInstrumentor`. \n",
- "\n",
- "Phoenix uses OpenInference traces - an open-source standard for capturing and storing LLM application traces that enables LLM applications to seamlessly integrate with LLM observability solutions such as Phoenix."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "\"\"\"\n",
- "Fetches the Arize documentation from Gitbook and serializes it into LangChain format.\n",
- "\"\"\"\n",
- "\n",
- "\n",
- "def load_gitbook_docs(docs_url: str):\n",
- " \"\"\"Loads documents from a Gitbook URL.\n",
- "\n",
- " Args:\n",
- " docs_url (str): URL to Gitbook docs.\n",
- "\n",
- " Returns:\n",
- " List[LangChainDocument]: List of documents in LangChain format.\n",
- " \"\"\"\n",
- " loader = GitbookLoader(\n",
- " docs_url,\n",
- " load_all_paths=True,\n",
- " )\n",
- " return loader.load()\n",
- "\n",
- "\n",
- "logging.basicConfig(level=logging.INFO, stream=sys.stdout)\n",
- "\n",
- "# Fetch documentation\n",
- "docs_url = \"https://docs.arize.com/phoenix\"\n",
- "embedding_model_name = \"text-embedding-ada-002\"\n",
- "docs = load_gitbook_docs(docs_url)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "documents = []\n",
- "for doc in docs:\n",
- " documents.append(Document(metadata=doc.metadata, text=doc.page_content))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "documents[0].metadata"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# Convert documents to a JSON serializable format (if needed)\n",
- "documents_json = [doc.to_dict() for doc in documents]\n",
- "\n",
- "# Save documents to a JSON file\n",
- "with open(\"demo_llama_index_documents.json\", \"w\") as file:\n",
- " json.dump(documents_json, file, indent=4)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "a3pYhJwq4VwX"
- },
- "source": [
- "## Set Up VectorStore and Query Engine"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from llama_index.core.node_parser import SentenceSplitter\n",
- "\n",
- "# Build index with a chunk_size of 1024\n",
- "splitter = SentenceSplitter(chunk_size=1024, chunk_overlap=250)\n",
- "nodes = splitter.get_nodes_from_documents(documents)\n",
- "vector_index = VectorStoreIndex(nodes)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "cOmyBVoSco7-"
- },
- "source": [
- "Build a QueryEngine and set up a Cohere reranker"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from llama_index.postprocessor.cohere_rerank import CohereRerank\n",
- "\n",
- "cohere_api_key = os.environ[\"COHERE_API_KEY\"]\n",
- "cohere_rerank = CohereRerank(api_key=cohere_api_key, top_n=2)\n",
- "\n",
- "query_engine = vector_index.as_query_engine(\n",
- " similarity_top_k=5,\n",
- " node_postprocessors=[cohere_rerank],\n",
- ")"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "wmlw_irPco7_"
- },
- "source": [
- "## Import Questions"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "host = \"https://storage.googleapis.com/\"\n",
- "bucket = \"arize-phoenix-assets\"\n",
- "prefix = \"traces\"\n",
- "url = f\"{host}{bucket}/{prefix}\""
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "questions_file = \"demo_llama_index_rag_questions.parquet\"\n",
- "questions_df = pd.read_parquet(f\"{url}/{questions_file}\")\n",
- "questions_df"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "jc7hbkyu5zr3"
- },
- "source": [
- "## Generate Answers for All Questions"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Start querying"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# Loop over the questions and generate the answers\n",
- "for i, row in questions_df.iterrows():\n",
- " question = row[\"Prompt/ Question\"]\n",
- " response_vector = query_engine.query(question)\n",
- " print(f\"Question: {question}\\nAnswer: {response_vector.response}\\n\")"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## OPTIONAL: Remove index spans\n",
- "\n",
- "Indexing traces, such as document embeddings and document node parsing, might be instrumented. Remove by:\n",
- "\n",
- "1. Query spans from Phoenix without indexing spans and save as `demo_traces.parquet`\n",
- "2. Clear all spans generated from this notebook (manually)\n",
- "3. Log the same traces back to Phoenix without the indexing spans"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Step 1: Query spans from Phoenix without indexing spans and save as `demo_traces.parquet`"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix.trace.dsl import SpanQuery\n",
- "\n",
- "traces = px.Client().query_spans(\n",
- " SpanQuery().where(\n",
- " \"name != 'BaseEmbedding.get_text_embedding_batch' and name != 'MetadataAwareTextSplitter._parse_nodes' and name != 'SentenceSplitter.split_text_metadata_aware'\"\n",
- " ),\n",
- " limit=5000,\n",
- " timeout=100,\n",
- ")\n",
- "traces.to_parquet(\"demo_traces.parquet\")"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Step 2: Clear all spans manually on Phoenix"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "session.view()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Step 3: Log the same traces back to Phoenix without the indexing spans"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix import TraceDataset\n",
- "\n",
- "px.Client().log_traces(TraceDataset(pd.read_parquet(\"demo_traces.parquet\")))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Now indexing spans are removed and we're left with the traces we want"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "Jd6tXZk-59VT"
- },
- "source": [
- "## Phoenix Evals"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix.session.evaluation import get_retrieved_documents\n",
- "\n",
- "retrieved_documents_df = get_retrieved_documents(px.Client())\n",
- "retrieved_documents_df"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix.session.evaluation import get_qa_with_reference\n",
- "\n",
- "queries_df = get_qa_with_reference(px.active_session())\n",
- "queries_df"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "AFECnz04co8H"
- },
- "source": [
- "Let's now use Phoenix's LLM Evals to evaluate the relevance of the retrieved documents with regards to the query. \n",
- "\n",
- "Note, we've turned on `explanations` which prompts the LLM to explain it's reasoning. This can be useful for debugging and for figuring out potential corrective actions."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix.evals import (\n",
- " HallucinationEvaluator,\n",
- " OpenAIModel,\n",
- " QAEvaluator,\n",
- " RelevanceEvaluator,\n",
- " run_evals,\n",
- ")\n",
- "\n",
- "eval_model = OpenAIModel(model=\"gpt-4\")\n",
- "relevance_evaluator = RelevanceEvaluator(eval_model)\n",
- "hallucination_evaluator = HallucinationEvaluator(eval_model)\n",
- "qa_evaluator = QAEvaluator(eval_model)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Document relevance evaluations"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "retrieved_documents_relevance_df = run_evals(\n",
- " evaluators=[relevance_evaluator],\n",
- " dataframe=retrieved_documents_df,\n",
- " provide_explanation=True,\n",
- " concurrency=20,\n",
- ")[0]\n",
- "retrieved_documents_relevance_df"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Hallucination and QA-correctness evaluations"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "hallucination_eval_df, qa_eval_df = run_evals(\n",
- " dataframe=queries_df,\n",
- " evaluators=[hallucination_evaluator, qa_evaluator],\n",
- " provide_explanation=True,\n",
- " concurrency=20,\n",
- ")\n",
- "hallucination_eval_df"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "qa_eval_df"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "zbH5A6cS6fd-"
- },
- "source": [
- "## Log the Evaluations into Phoenix"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix.trace import DocumentEvaluations, SpanEvaluations\n",
- "\n",
- "px.Client().log_evaluations(\n",
- " SpanEvaluations(eval_name=\"Hallucination\", dataframe=hallucination_eval_df),\n",
- " SpanEvaluations(eval_name=\"QA Correctness\", dataframe=qa_eval_df),\n",
- " DocumentEvaluations(\n",
- " eval_name=\"Retrieval Relevance\", dataframe=retrieved_documents_relevance_df\n",
- " ),\n",
- ")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "session.view()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "I5IS5dvf6lkD"
- },
- "source": [
- "## Save the Traces and Evaluations"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Save all spans and evals as parquet fixtures"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import os\n",
- "\n",
- "# Specify and Create the Directory for Trace Dataset\n",
- "directory = \"fixtures\"\n",
- "os.makedirs(directory, exist_ok=True)\n",
- "\n",
- "# Save the Trace Dataset (set limit to above 2000)\n",
- "trace_id = px.Client().get_trace_dataset(limit=5000, timeout=60).save(directory=directory)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Save LLM spans as fixtures for dataset usage"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix.trace.dsl import SpanQuery\n",
- "\n",
- "llm_open_ai = px.Client().query_spans(\n",
- " SpanQuery().where(\"span_kind == 'LLM' and name == 'OpenAI.chat'\")\n",
- ")\n",
- "\n",
- "llm_predict = px.Client().query_spans(\n",
- " SpanQuery().where(\"span_kind == 'LLM' and name == 'LLM.predict'\")\n",
- ")\n",
- "\n",
- "all_llm = px.Client().query_spans(SpanQuery().where(\"span_kind == 'LLM'\"))\n",
- "\n",
- "llm_open_ai.to_parquet(\"fixtures/demo_llama_index_llm_open_ai.parquet\")\n",
- "llm_predict.to_parquet(\"fixtures/demo_llama_index_llm_predict.parquet\")\n",
- "all_llm.to_parquet(\"fixtures/demo_llama_index_llm_all_spans.parquet\")"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "OPTIONAL: Delete on Phoenix and import again to check for validity if necessary"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from phoenix import TraceDataset\n",
- "from phoenix.trace import DocumentEvaluations, SpanEvaluations\n",
- "\n",
- "px.Client().log_traces(\n",
- " TraceDataset(pd.read_parquet(\"fixtures/demo_llama_index_rag_traces.parquet\"))\n",
- ")\n",
- "\n",
- "retrieved_documents_relevance_df = pd.read_parquet(\n",
- " \"fixtures/demo_llama_index_rag_doc_relevance_eval.parquet\"\n",
- ")\n",
- "qa_eval_df = dataframe = pd.read_parquet(\n",
- " \"fixtures/demo_llama_index_rag_qa_correctness_eval.parquet\"\n",
- ")\n",
- "hallucination_eval_df = dataframe = pd.read_parquet(\n",
- " \"fixtures/demo_llama_index_rag_hallucination_eval.parquet\"\n",
- ")\n",
- "\n",
- "px.Client().log_evaluations(\n",
- " SpanEvaluations(eval_name=\"Hallucination\", dataframe=hallucination_eval_df),\n",
- " SpanEvaluations(eval_name=\"QA Correctness\", dataframe=qa_eval_df),\n",
- " DocumentEvaluations(\n",
- " eval_name=\"Retrieval Relevance\", dataframe=retrieved_documents_relevance_df\n",
- " ),\n",
- ")"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Now we have finished generating llama-index RAG QA traces and evals and have them saved as fixtures!"
- ]
- }
- ],
- "metadata": {
- "language_info": {
- "name": "python"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
diff --git a/tutorials/human_feedback/chatbot_with_human_feedback.ipynb b/tutorials/human_feedback/chatbot_with_human_feedback.ipynb
index 4025e05c68..07c1d75c51 100644
--- a/tutorials/human_feedback/chatbot_with_human_feedback.ipynb
+++ b/tutorials/human_feedback/chatbot_with_human_feedback.ipynb
@@ -40,7 +40,6 @@
"source": [
"import json\n",
"import os\n",
- "import warnings\n",
"from getpass import getpass\n",
"from typing import Any, Dict\n",
"from uuid import uuid4\n",
@@ -56,6 +55,7 @@
"from opentelemetry import trace as trace_api\n",
"\n",
"import phoenix as px\n",
+ "from phoenix.client import Client\n",
"from phoenix.otel import register\n",
"\n",
"if not (openai_api_key := os.getenv(\"OPENAI_API_KEY\")):\n",
@@ -118,11 +118,12 @@
"metadata": {},
"outputs": [],
"source": [
+ "client = Client()\n",
"http_client = httpx.Client()\n",
"\n",
"\n",
"def generate_response(\n",
- " input_text: str, model: str = \"gpt-4o\", temperature: float = 0.1\n",
+ " input_text: str, model: str = \"gpt-4o-mini\", temperature: float = 0.1\n",
") -> Dict[str, Any]:\n",
" user_message = {\"role\": \"user\", \"content\": input_text, \"uuid\": str(uuid4())}\n",
" invocation_parameters = {\"temperature\": temperature}\n",
@@ -161,25 +162,13 @@
"\n",
"def send_feedback(span_id: str, feedback: int) -> None:\n",
" label = \"👍\" if feedback == 1 else \"👎\"\n",
- " request_body = {\n",
- " \"data\": [\n",
- " {\n",
- " \"span_id\": span_id,\n",
- " \"name\": \"user_feedback\",\n",
- " \"annotator_kind\": \"HUMAN\",\n",
- " \"result\": {\"label\": label, \"score\": feedback},\n",
- " \"metadata\": {},\n",
- " }\n",
- " ]\n",
- " }\n",
- "\n",
- " try:\n",
- " response = http_client.post(FEEDBACK_ENDPOINT, json=request_body)\n",
- " if not (200 <= response.status_code < 300):\n",
- " raise Exception(f\"Failed to send feedback: {response.text}\")\n",
- " print(f\"Feedback sent for span_id {span_id}: {label}\")\n",
- " except httpx.ConnectError:\n",
- " warnings.warn(\"Could not connect to feedback server.\")"
+ " client.annotations.add_span_annotation(\n",
+ " span_id=span_id,\n",
+ " annotation_name=\"user_feedback\",\n",
+ " label=label,\n",
+ " score=feedback,\n",
+ " )\n",
+ " print(f\"Feedback sent for span_id {span_id}: {label}\")"
]
},
{
@@ -233,6 +222,38 @@
"# Display the chat interface\n",
"display(chat_history, input_box, send_button)"
]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Analyze feedback using the Phoenix Client\n",
+ "\n",
+ "We can use the Phoenix client to pull the annotated spans. By combining `get_spans_dataframe`\n",
+ "and `get_span_annotations_dataframe` we can create a dataframe of all annotations alongside\n",
+ "span data for analysis!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "spans_df = client.spans.get_spans_dataframe(project_name=\"default\")\n",
+ "annotations_df = client.spans.get_span_annotations_dataframe(\n",
+ " spans_dataframe=spans_df, project=\"default\"\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "annotations_df.join(spans_df, how=\"inner\", lsuffix=\"_annotation\", rsuffix=\"_span\")"
+ ]
}
],
"metadata": {
diff --git a/tutorials/log_traces_to_phoenix.ipynb b/tutorials/log_traces_to_phoenix.ipynb
deleted file mode 100644
index cb1e1fff5e..0000000000
--- a/tutorials/log_traces_to_phoenix.ipynb
+++ /dev/null
@@ -1,102 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n",
- " \n",
- " \n",
- " \n",
- " Docs\n",
- " |\n",
- " GitHub\n",
- " |\n",
- " Community\n",
- " \n",
- "\n",
- "Logging traces to Phoenix\n",
- "\n",
- "In this tutorial we will learn how to launch Phoenix and upload traces using the client.\n",
- "\n",
- "As of Phoenix version `3.22.0`, the client has a `log_traces` method that allows you to upload a `TraceDataset` directly."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "First, let's download an example `TraceDataset`."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from urllib.request import urlopen\n",
- "\n",
- "from phoenix.trace.trace_dataset import TraceDataset\n",
- "from phoenix.trace.utils import json_lines_to_df\n",
- "\n",
- "traces_url = \"https://storage.googleapis.com/arize-phoenix-assets/datasets/unstructured/llm/context-retrieval/trace.jsonl\"\n",
- "with urlopen(traces_url) as response:\n",
- " lines = [line.decode(\"utf-8\") for line in response.readlines()]\n",
- "trace_ds = TraceDataset(json_lines_to_df(lines))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Launch Phoenix. You can open use Phoenix within your notebook or in a separate browser window by opening the URL."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import phoenix as px\n",
- "\n",
- "(session := px.launch_app()).view()\n",
- "session_url = session.url"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Create a client and use `log_traces` to upload the `TraceDataset`. We can optionally add these traces to a specific project."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "client = px.Client(endpoint=session_url)\n",
- "client.log_traces(trace_ds, project_name=\"old-traces\")"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "You should now see a view like this.\n",
- "\n",
- ""
- ]
- }
- ],
- "metadata": {
- "language_info": {
- "name": "python"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
| | |