diff --git a/dev-packages/node-integration-tests/package.json b/dev-packages/node-integration-tests/package.json index 40acc7510fda..30a8f784b1e9 100644 --- a/dev-packages/node-integration-tests/package.json +++ b/dev-packages/node-integration-tests/package.json @@ -61,6 +61,7 @@ "node-cron": "^3.0.3", "node-schedule": "^2.1.1", "pg": "8.16.0", + "postgres": "^3.4.7", "proxy": "^2.1.1", "redis-4": "npm:redis@^4.6.14", "reflect-metadata": "0.2.1", diff --git a/dev-packages/node-integration-tests/suites/tracing/postgresjs/docker-compose.yml b/dev-packages/node-integration-tests/suites/tracing/postgresjs/docker-compose.yml new file mode 100644 index 000000000000..301280106faa --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/postgresjs/docker-compose.yml @@ -0,0 +1,13 @@ +version: '3.9' + +services: + db: + image: postgres:13 + restart: always + container_name: integration-tests-postgresjs + ports: + - '5444:5432' + environment: + POSTGRES_USER: test + POSTGRES_PASSWORD: test + POSTGRES_DB: test_db diff --git a/dev-packages/node-integration-tests/suites/tracing/postgresjs/scenario.js b/dev-packages/node-integration-tests/suites/tracing/postgresjs/scenario.js new file mode 100644 index 000000000000..e7cb92aabf27 --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/postgresjs/scenario.js @@ -0,0 +1,62 @@ +const { loggingTransport } = require('@sentry-internal/node-integration-tests'); +const Sentry = require('@sentry/node'); + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + release: '1.0', + tracesSampleRate: 1.0, + transport: loggingTransport, +}); + +// Stop the process from exiting before the transaction is sent +setInterval(() => {}, 1000); + +const postgres = require('postgres'); + +const sql = postgres({ port: 5444, user: 'test', password: 'test', database: 'test_db' }); + +async function run() { + await Sentry.startSpan( + { + name: 'Test Transaction', + op: 'transaction', + }, + async () => { + try { + await sql` + CREATE TABLE "User" ("id" SERIAL NOT NULL,"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,"email" TEXT NOT NULL,"name" TEXT,CONSTRAINT "User_pkey" PRIMARY KEY ("id")); + `; + + await sql` + INSERT INTO "User" ("email", "name") VALUES ('Foo', 'bar@baz.com'); + `; + + await sql` + UPDATE "User" SET "name" = 'Foo' WHERE "email" = 'bar@baz.com'; + `; + + await sql` + SELECT * FROM "User" WHERE "email" = 'bar@baz.com'; + `; + + await sql`SELECT * from generate_series(1,1000) as x `.cursor(10, async rows => { + await Promise.all(rows); + }); + + await sql` + DROP TABLE "User"; + `; + + // This will be captured as an error as the table no longer exists + await sql` + SELECT * FROM "User" WHERE "email" = 'foo@baz.com'; + `; + } finally { + await sql.end(); + } + }, + ); +} + +// eslint-disable-next-line @typescript-eslint/no-floating-promises +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/postgresjs/test.ts b/dev-packages/node-integration-tests/suites/tracing/postgresjs/test.ts new file mode 100644 index 000000000000..68b1a82703a0 --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/postgresjs/test.ts @@ -0,0 +1,225 @@ +import { describe, expect, test } from 'vitest'; +import { createRunner } from '../../../utils/runner'; + +const EXISTING_TEST_EMAIL = 'bar@baz.com'; +const NON_EXISTING_TEST_EMAIL = 'foo@baz.com'; + +describe('postgresjs auto instrumentation', () => { + test('should auto-instrument `postgres` package', { timeout: 60_000 }, async () => { + const EXPECTED_TRANSACTION = { + transaction: 'Test Transaction', + spans: expect.arrayContaining([ + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'CREATE TABLE', + 'db.query.text': + 'CREATE TABLE "User" ("id" SERIAL NOT NULL,"createdAt" TIMESTAMP(?) NOT NULL DEFAULT CURRENT_TIMESTAMP,"email" TEXT NOT NULL,"name" TEXT,CONSTRAINT "User_pkey" PRIMARY KEY ("id"))', + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: + 'CREATE TABLE "User" ("id" SERIAL NOT NULL,"createdAt" TIMESTAMP(?) NOT NULL DEFAULT CURRENT_TIMESTAMP,"email" TEXT NOT NULL,"name" TEXT,CONSTRAINT "User_pkey" PRIMARY KEY ("id"))', + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'SELECT', + 'db.query.text': + "select b.oid, b.typarray from pg_catalog.pg_type a left join pg_catalog.pg_type b on b.oid = a.typelem where a.typcategory = 'A' group by b.oid, b.typarray order by b.oid", + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: + "select b.oid, b.typarray from pg_catalog.pg_type a left join pg_catalog.pg_type b on b.oid = a.typelem where a.typcategory = 'A' group by b.oid, b.typarray order by b.oid", + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'INSERT', + 'db.query.text': `INSERT INTO "User" ("email", "name") VALUES ('Foo', '${EXISTING_TEST_EMAIL}')`, + 'sentry.origin': 'auto.db.otel.postgres', + 'sentry.op': 'db', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: `INSERT INTO "User" ("email", "name") VALUES ('Foo', '${EXISTING_TEST_EMAIL}')`, + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'UPDATE', + 'db.query.text': `UPDATE "User" SET "name" = 'Foo' WHERE "email" = '${EXISTING_TEST_EMAIL}'`, + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: `UPDATE "User" SET "name" = 'Foo' WHERE "email" = '${EXISTING_TEST_EMAIL}'`, + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'SELECT', + 'db.query.text': `SELECT * FROM "User" WHERE "email" = '${EXISTING_TEST_EMAIL}'`, + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: `SELECT * FROM "User" WHERE "email" = '${EXISTING_TEST_EMAIL}'`, + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'SELECT', + 'db.query.text': 'SELECT * from generate_series(?,?) as x', + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: 'SELECT * from generate_series(?,?) as x', + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + 'db.operation.name': 'DROP TABLE', + 'db.query.text': 'DROP TABLE "User"', + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: 'DROP TABLE "User"', + op: 'db', + status: 'ok', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + expect.objectContaining({ + data: expect.objectContaining({ + 'db.namespace': 'test_db', + 'db.system.name': 'postgres', + // No db.operation.name here, as this is an errored span + 'db.response.status_code': '42P01', + 'error.type': 'PostgresError', + 'db.query.text': `SELECT * FROM "User" WHERE "email" = '${NON_EXISTING_TEST_EMAIL}'`, + 'sentry.op': 'db', + 'sentry.origin': 'auto.db.otel.postgres', + 'server.address': 'localhost', + 'server.port': 5444, + }), + description: `SELECT * FROM "User" WHERE "email" = '${NON_EXISTING_TEST_EMAIL}'`, + op: 'db', + status: 'unknown_error', + origin: 'auto.db.otel.postgres', + parent_span_id: expect.any(String), + span_id: expect.any(String), + start_timestamp: expect.any(Number), + timestamp: expect.any(Number), + trace_id: expect.any(String), + }), + ]), + }; + + const EXPECTED_ERROR_EVENT = { + event_id: expect.any(String), + contexts: { + trace: { + trace_id: expect.any(String), + span_id: expect.any(String), + }, + }, + exception: { + values: [ + { + type: 'PostgresError', + value: 'relation "User" does not exist', + stacktrace: expect.objectContaining({ + frames: expect.arrayContaining([ + expect.objectContaining({ + function: 'handle', + module: 'postgres.cjs.src:connection', + filename: expect.any(String), + lineno: expect.any(Number), + colno: expect.any(Number), + }), + ]), + }), + }, + ], + }, + }; + + await createRunner(__dirname, 'scenario.js') + .withDockerCompose({ workingDirectory: [__dirname], readyMatches: ['port 5432'] }) + .expect({ transaction: EXPECTED_TRANSACTION }) + .expect({ event: EXPECTED_ERROR_EVENT }) + .start() + .completed(); + }); +}); diff --git a/packages/astro/src/index.server.ts b/packages/astro/src/index.server.ts index 750eb05d8b10..83a135e71f21 100644 --- a/packages/astro/src/index.server.ts +++ b/packages/astro/src/index.server.ts @@ -84,6 +84,7 @@ export { onUnhandledRejectionIntegration, parameterize, postgresIntegration, + postgresJsIntegration, prismaIntegration, childProcessIntegration, createSentryWinstonTransport, diff --git a/packages/aws-serverless/src/index.ts b/packages/aws-serverless/src/index.ts index b13f69a9b6ce..f64ee53dc47c 100644 --- a/packages/aws-serverless/src/index.ts +++ b/packages/aws-serverless/src/index.ts @@ -99,6 +99,7 @@ export { redisIntegration, tediousIntegration, postgresIntegration, + postgresJsIntegration, prismaIntegration, childProcessIntegration, createSentryWinstonTransport, diff --git a/packages/bun/src/index.ts b/packages/bun/src/index.ts index 14a44e2d38fc..4a9d7fd9d71c 100644 --- a/packages/bun/src/index.ts +++ b/packages/bun/src/index.ts @@ -120,6 +120,7 @@ export { redisIntegration, tediousIntegration, postgresIntegration, + postgresJsIntegration, prismaIntegration, hapiIntegration, setupHapiErrorHandler, diff --git a/packages/google-cloud-serverless/src/index.ts b/packages/google-cloud-serverless/src/index.ts index e9586a9bd820..f0bed369acee 100644 --- a/packages/google-cloud-serverless/src/index.ts +++ b/packages/google-cloud-serverless/src/index.ts @@ -99,6 +99,7 @@ export { redisIntegration, tediousIntegration, postgresIntegration, + postgresJsIntegration, prismaIntegration, hapiIntegration, setupHapiErrorHandler, diff --git a/packages/node/src/index.ts b/packages/node/src/index.ts index cf951c3db8b6..1c02da9fff2e 100644 --- a/packages/node/src/index.ts +++ b/packages/node/src/index.ts @@ -23,6 +23,7 @@ export { mysqlIntegration } from './integrations/tracing/mysql'; export { mysql2Integration } from './integrations/tracing/mysql2'; export { redisIntegration } from './integrations/tracing/redis'; export { postgresIntegration } from './integrations/tracing/postgres'; +export { postgresJsIntegration } from './integrations/tracing/postgresjs'; export { prismaIntegration } from './integrations/tracing/prisma'; export { hapiIntegration, setupHapiErrorHandler } from './integrations/tracing/hapi'; export { koaIntegration, setupKoaErrorHandler } from './integrations/tracing/koa'; diff --git a/packages/node/src/integrations/tracing/index.ts b/packages/node/src/integrations/tracing/index.ts index 425710cae0ce..e7122562d619 100644 --- a/packages/node/src/integrations/tracing/index.ts +++ b/packages/node/src/integrations/tracing/index.ts @@ -15,6 +15,7 @@ import { instrumentMongoose, mongooseIntegration } from './mongoose'; import { instrumentMysql, mysqlIntegration } from './mysql'; import { instrumentMysql2, mysql2Integration } from './mysql2'; import { instrumentPostgres, postgresIntegration } from './postgres'; +import { instrumentPostgresJs, postgresJsIntegration } from './postgresjs'; import { prismaIntegration } from './prisma'; import { instrumentRedis, redisIntegration } from './redis'; import { instrumentTedious, tediousIntegration } from './tedious'; @@ -44,6 +45,7 @@ export function getAutoPerformanceIntegrations(): Integration[] { amqplibIntegration(), lruMemoizerIntegration(), vercelAIIntegration(), + postgresJsIntegration(), ]; } @@ -75,5 +77,6 @@ export function getOpenTelemetryInstrumentationToPreload(): (((options?: any) => instrumentGenericPool, instrumentAmqplib, instrumentVercelAi, + instrumentPostgresJs, ]; } diff --git a/packages/node/src/integrations/tracing/postgresjs.ts b/packages/node/src/integrations/tracing/postgresjs.ts new file mode 100644 index 000000000000..c5efb7f6bef7 --- /dev/null +++ b/packages/node/src/integrations/tracing/postgresjs.ts @@ -0,0 +1,327 @@ +// Instrumentation for https://github.com/porsager/postgres +import { context, trace } from '@opentelemetry/api'; +import type { InstrumentationConfig } from '@opentelemetry/instrumentation'; +import { + InstrumentationBase, + InstrumentationNodeModuleDefinition, + InstrumentationNodeModuleFile, + safeExecuteInTheMiddle, +} from '@opentelemetry/instrumentation'; +import { + ATTR_DB_NAMESPACE, + ATTR_DB_OPERATION_NAME, + ATTR_DB_QUERY_TEXT, + ATTR_DB_RESPONSE_STATUS_CODE, + ATTR_DB_SYSTEM_NAME, + ATTR_ERROR_TYPE, + ATTR_SERVER_ADDRESS, + ATTR_SERVER_PORT, +} from '@opentelemetry/semantic-conventions'; +import type { IntegrationFn, Span } from '@sentry/core'; +import { + defineIntegration, + getCurrentScope, + logger, + SDK_VERSION, + SPAN_STATUS_ERROR, + startSpanManual, +} from '@sentry/core'; +import { generateInstrumentOnce } from '../../otel/instrument'; +import { addOriginToSpan } from '../../utils/addOriginToSpan'; + +const INTEGRATION_NAME = 'PostgresJs'; +const SUPPORTED_VERSIONS = ['>=3.0.0 <4']; + +type PostgresConnectionContext = { + ATTR_DB_NAMESPACE?: string; // Database name + ATTR_SERVER_ADDRESS?: string; // Hostname or IP address of the database server + ATTR_SERVER_PORT?: string; // Port number of the database server +}; + +type PostgresJsInstrumentationConfig = InstrumentationConfig & { + /** + * Whether to require a parent span for the instrumentation. + * If set to true, the instrumentation will only create spans if there is a parent span + * available in the current scope. + * @default true + */ + requireParentSpan?: boolean; + /** + * Hook to modify the span before it is started. + * This can be used to set additional attributes or modify the span in any way. + */ + requestHook?: (span: Span, sanitizedSqlQuery: string, postgresConnectionContext?: PostgresConnectionContext) => void; +}; + +export const instrumentPostgresJs = generateInstrumentOnce( + INTEGRATION_NAME, + (options?: PostgresJsInstrumentationConfig) => + new PostgresJsInstrumentation({ + requireParentSpan: options?.requireParentSpan ?? true, + requestHook: options?.requestHook, + }), +); + +/** + * Instrumentation for the [postgres](https://www.npmjs.com/package/postgres) library. + * This instrumentation captures postgresjs queries and their attributes, + */ +export class PostgresJsInstrumentation extends InstrumentationBase { + public constructor(config: PostgresJsInstrumentationConfig) { + super('sentry-postgres-js', SDK_VERSION, config); + } + + /** + * Initializes the instrumentation. + */ + public init(): InstrumentationNodeModuleDefinition[] { + const instrumentationModule = new InstrumentationNodeModuleDefinition('postgres', SUPPORTED_VERSIONS); + + ['src', 'cf/src', 'cjs/src'].forEach(path => { + instrumentationModule.files.push( + new InstrumentationNodeModuleFile( + `postgres/${path}/connection.js`, + ['*'], + this._patchConnection.bind(this), + this._unwrap.bind(this), + ), + ); + + instrumentationModule.files.push( + new InstrumentationNodeModuleFile( + `postgres/${path}/query.js`, + SUPPORTED_VERSIONS, + this._patchQuery.bind(this), + this._unwrap.bind(this), + ), + ); + }); + + return [instrumentationModule]; + } + + /** + * Determines whether a span should be created based on the current context. + * If `requireParentSpan` is set to true in the configuration, a span will + * only be created if there is a parent span available. + */ + private _shouldCreateSpans(): boolean { + const config = this.getConfig(); + const hasParentSpan = trace.getSpan(context.active()) !== undefined; + return hasParentSpan || !config.requireParentSpan; + } + + /** + * Patches the reject method of the Query class to set the span status and end it + */ + private _patchReject(rejectTarget: any, span: Span): any { + return new Proxy(rejectTarget, { + apply: ( + rejectTarget, + rejectThisArg, + rejectArgs: { + message?: string; + code?: string; + name?: string; + }[], + ) => { + span.setStatus({ + code: SPAN_STATUS_ERROR, + // This message is the error message from the rejectArgs, when available + // e.g "relation 'User' does not exist" + message: rejectArgs?.[0]?.message || 'unknown_error', + }); + + const result = Reflect.apply(rejectTarget, rejectThisArg, rejectArgs); + + // This status code is PG error code, e.g. '42P01' for "relation does not exist" + // https://www.postgresql.org/docs/current/errcodes-appendix.html + span.setAttribute(ATTR_DB_RESPONSE_STATUS_CODE, rejectArgs?.[0]?.code || 'Unknown error'); + // This is the error type, e.g. 'PostgresError' for a Postgres error + span.setAttribute(ATTR_ERROR_TYPE, rejectArgs?.[0]?.name || 'Unknown error'); + + span.end(); + return result; + }, + }); + } + + /** + * Patches the resolve method of the Query class to end the span when the query is resolved. + */ + private _patchResolve(resolveTarget: any, span: Span): any { + return new Proxy(resolveTarget, { + apply: (resolveTarget, resolveThisArg, resolveArgs: [{ command?: string }]) => { + const result = Reflect.apply(resolveTarget, resolveThisArg, resolveArgs); + const sqlCommand = resolveArgs?.[0]?.command; + + if (sqlCommand) { + // SQL command is only available when the query is resolved successfully + span.setAttribute(ATTR_DB_OPERATION_NAME, sqlCommand); + } + span.end(); + return result; + }, + }); + } + + /** + * Patches the Query class to instrument the handle method. + */ + private _patchQuery(moduleExports: { + Query: { + prototype: { + handle: any; + }; + }; + }): any { + moduleExports.Query.prototype.handle = new Proxy(moduleExports.Query.prototype.handle, { + apply: async ( + handleTarget, + handleThisArg: { + resolve: any; + reject: any; + strings?: string[]; + }, + handleArgs, + ) => { + if (!this._shouldCreateSpans()) { + // If we don't need to create spans, just call the original method + return Reflect.apply(handleTarget, handleThisArg, handleArgs); + } + + const sanitizedSqlQuery = this._sanitizeSqlQuery(handleThisArg.strings?.[0]); + + return startSpanManual( + { + name: sanitizedSqlQuery || 'postgresjs.query', + op: 'db', + }, + (span: Span) => { + const scope = getCurrentScope(); + const postgresConnectionContext = scope.getScopeData().contexts['postgresjsConnection'] as + | PostgresConnectionContext + | undefined; + + addOriginToSpan(span, 'auto.db.otel.postgres'); + + const { requestHook } = this.getConfig(); + + if (requestHook) { + safeExecuteInTheMiddle( + () => requestHook(span, sanitizedSqlQuery, postgresConnectionContext), + error => { + if (error) { + logger.error(`Error in requestHook for ${INTEGRATION_NAME} integration:`, error); + } + }, + ); + } + + // ATTR_DB_NAMESPACE is used to indicate the database name and the schema name + // It's only the database name as we don't have the schema information + const databaseName = postgresConnectionContext?.ATTR_DB_NAMESPACE || ''; + const databaseHost = postgresConnectionContext?.ATTR_SERVER_ADDRESS || ''; + const databasePort = postgresConnectionContext?.ATTR_SERVER_PORT || ''; + + span.setAttribute(ATTR_DB_SYSTEM_NAME, 'postgres'); + span.setAttribute(ATTR_DB_NAMESPACE, databaseName); + span.setAttribute(ATTR_SERVER_ADDRESS, databaseHost); + span.setAttribute(ATTR_SERVER_PORT, databasePort); + span.setAttribute(ATTR_DB_QUERY_TEXT, sanitizedSqlQuery); + + handleThisArg.resolve = this._patchResolve(handleThisArg.resolve, span); + handleThisArg.reject = this._patchReject(handleThisArg.reject, span); + + try { + return Reflect.apply(handleTarget, handleThisArg, handleArgs); + } catch (error) { + span.setStatus({ + code: SPAN_STATUS_ERROR, + }); + span.end(); + throw error; // Re-throw the error to propagate it + } + }, + ); + }, + }); + + return moduleExports; + } + + /** + * Patches the Connection class to set the database, host, and port attributes + * when a new connection is created. + */ + private _patchConnection(Connection: any): any { + return new Proxy(Connection, { + apply: (connectionTarget, thisArg, connectionArgs: { database: string; host: string[]; port: number[] }[]) => { + const databaseName = connectionArgs[0]?.database || ''; + const databaseHost = connectionArgs[0]?.host?.[0] || ''; + const databasePort = connectionArgs[0]?.port?.[0] || ''; + + const scope = getCurrentScope(); + scope.setContext('postgresjsConnection', { + ATTR_DB_NAMESPACE: databaseName, + ATTR_SERVER_ADDRESS: databaseHost, + ATTR_SERVER_PORT: databasePort, + }); + + return Reflect.apply(connectionTarget, thisArg, connectionArgs); + }, + }); + } + + /** + * Sanitize SQL query as per the OTEL semantic conventions + * https://opentelemetry.io/docs/specs/semconv/database/database-spans/#sanitization-of-dbquerytext + */ + private _sanitizeSqlQuery(sqlQuery: string | undefined): string { + if (!sqlQuery) { + return 'Unknown SQL Query'; + } + + return ( + sqlQuery + .replace(/\s+/g, ' ') + .trim() // Remove extra spaces including newlines and trim + .substring(0, 1024) // Truncate to 1024 characters + .replace(/--.*?(\r?\n|$)/g, '') // Single line comments + .replace(/\/\*[\s\S]*?\*\//g, '') // Multi-line comments + .replace(/;\s*$/, '') // Remove trailing semicolons + .replace(/\b\d+\b/g, '?') // Replace standalone numbers + // Collapse whitespace to a single space + .replace(/\s+/g, ' ') + // Collapse IN and in clauses + // eg. IN (?, ?, ?, ?) to IN (?) + .replace(/\bIN\b\s*\(\s*\?(?:\s*,\s*\?)*\s*\)/g, 'IN (?)') + ); + } +} + +const _postgresJsIntegration = (() => { + return { + name: INTEGRATION_NAME, + setupOnce() { + instrumentPostgresJs(); + }, + }; +}) satisfies IntegrationFn; + +/** + * Adds Sentry tracing instrumentation for the [postgres](https://www.npmjs.com/package/postgres) library. + * + * For more information, see the [`postgresIntegration` documentation](https://docs.sentry.io/platforms/javascript/guides/node/configuration/integrations/postgres/). + * + * @example + * ```javascript + * const Sentry = require('@sentry/node'); + * + * Sentry.init({ + * integrations: [Sentry.postgresJsIntegration()], + * }); + * ``` + */ + +export const postgresJsIntegration = defineIntegration(_postgresJsIntegration); diff --git a/packages/solidstart/src/server/index.ts b/packages/solidstart/src/server/index.ts index f11b9bb51077..9574b38f1e47 100644 --- a/packages/solidstart/src/server/index.ts +++ b/packages/solidstart/src/server/index.ts @@ -75,6 +75,7 @@ export { onUnhandledRejectionIntegration, parameterize, postgresIntegration, + postgresJsIntegration, prismaIntegration, redisIntegration, requestDataIntegration, diff --git a/packages/sveltekit/src/server/index.ts b/packages/sveltekit/src/server/index.ts index 717dd7387c98..07d92d03c8ce 100644 --- a/packages/sveltekit/src/server/index.ts +++ b/packages/sveltekit/src/server/index.ts @@ -77,6 +77,7 @@ export { onUnhandledRejectionIntegration, parameterize, postgresIntegration, + postgresJsIntegration, prismaIntegration, redisIntegration, requestDataIntegration, diff --git a/yarn.lock b/yarn.lock index bd6c7af98fde..ac3970cfb153 100644 --- a/yarn.lock +++ b/yarn.lock @@ -24165,6 +24165,11 @@ postgres-range@^1.1.1: resolved "https://registry.yarnpkg.com/postgres-range/-/postgres-range-1.1.3.tgz#9ccd7b01ca2789eb3c2e0888b3184225fa859f76" integrity sha512-VdlZoocy5lCP0c/t66xAfclglEapXPCIVhqqJRncYpvbCgImF0w67aPKfbqUMr72tO2k5q0TdTZwCLjPTI6C9g== +postgres@^3.4.7: + version "3.4.7" + resolved "https://registry.yarnpkg.com/postgres/-/postgres-3.4.7.tgz#122f460a808fe300cae53f592108b9906e625345" + integrity sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw== + preact@^10.19.4: version "10.19.4" resolved "https://registry.yarnpkg.com/preact/-/preact-10.19.4.tgz#735d331d5b1bd2182cc36f2ba481fd6f0da3fe3b"