Skip to content
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { instrumentOpenAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

class MockOpenAI {
constructor(config) {
this.apiKey = config.apiKey;

this.embeddings = {
create: async params => {
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
error.headers = { 'x-request-id': 'mock-request-123' };
throw error;
}

return {
object: 'list',
data: [
{
object: 'embedding',
embedding: [0.1, 0.2, 0.3],
index: 0,
},
],
model: params.model,
usage: {
prompt_tokens: 10,
total_tokens: 10,
},
};
},
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockOpenAI({
apiKey: 'mock-api-key',
});

const client = instrumentOpenAiClient(mockClient);

// First test: embeddings API
await client.embeddings.create({
input: 'Embedding test!',
model: 'text-embedding-3-small',
dimensions: 1536,
encoding_format: 'float',
});

// Second test: embeddings API error model
try {
await client.embeddings.create({
input: 'Error embedding test!',
model: 'error-model',
});
} catch {
// Error is expected and handled
}
});
}

run();
147 changes: 138 additions & 9 deletions dev-packages/node-integration-tests/suites/tracing/openai/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ describe('OpenAI integration', () => {
cleanupChildProcesses();
});

const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE = {
const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_CHAT = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - basic chat completion without PII
Expand Down Expand Up @@ -147,7 +147,7 @@ describe('OpenAI integration', () => {
]),
};

const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE = {
const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_CHAT = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - basic chat completion with PII
Expand Down Expand Up @@ -321,27 +321,27 @@ describe('OpenAI integration', () => {
]),
};

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
createEsmAndCjsTests(__dirname, 'scenario-chat.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates openai related spans with sendDefaultPii: false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE })
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_CHAT })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
createEsmAndCjsTests(__dirname, 'scenario-chat.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('creates openai related spans with sendDefaultPii: true', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE })
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_CHAT })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-options.mjs', (createRunner, test) => {
createEsmAndCjsTests(__dirname, 'scenario-chat.mjs', 'instrument-with-options.mjs', (createRunner, test) => {
test('creates openai related spans with custom options', async () => {
await createRunner()
.ignore('event')
Expand All @@ -351,6 +351,109 @@ describe('OpenAI integration', () => {
});
});

const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_EMBEDDINGS = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - embeddings API
expect.objectContaining({
data: {
'gen_ai.operation.name': 'embeddings',
'sentry.op': 'gen_ai.embeddings',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
'gen_ai.request.model': 'text-embedding-3-small',
'gen_ai.request.encoding_format': 'float',
'gen_ai.request.dimensions': 1536,
'gen_ai.response.model': 'text-embedding-3-small',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.total_tokens': 10,
'openai.response.model': 'text-embedding-3-small',
'openai.usage.prompt_tokens': 10,
},
description: 'embeddings text-embedding-3-small',
op: 'gen_ai.embeddings',
origin: 'auto.ai.openai',
status: 'ok',
}),
// Second span - embeddings API error model
expect.objectContaining({
data: {
'gen_ai.operation.name': 'embeddings',
'sentry.op': 'gen_ai.embeddings',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
'gen_ai.request.model': 'error-model',
},
description: 'embeddings error-model',
op: 'gen_ai.embeddings',
origin: 'auto.ai.openai',
status: 'internal_error',
}),
]),
};

const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_EMBEDDINGS = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - embeddings API with PII
expect.objectContaining({
data: {
'gen_ai.operation.name': 'embeddings',
'sentry.op': 'gen_ai.embeddings',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
'gen_ai.request.model': 'text-embedding-3-small',
'gen_ai.request.encoding_format': 'float',
'gen_ai.request.dimensions': 1536,
'gen_ai.request.messages': 'Embedding test!',
'gen_ai.response.model': 'text-embedding-3-small',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.total_tokens': 10,
'openai.response.model': 'text-embedding-3-small',
'openai.usage.prompt_tokens': 10,
},
description: 'embeddings text-embedding-3-small',
op: 'gen_ai.embeddings',
origin: 'auto.ai.openai',
status: 'ok',
}),
// Second span - embeddings API error model with PII
expect.objectContaining({
data: {
'gen_ai.operation.name': 'embeddings',
'sentry.op': 'gen_ai.embeddings',
'sentry.origin': 'auto.ai.openai',
'gen_ai.system': 'openai',
'gen_ai.request.model': 'error-model',
'gen_ai.request.messages': 'Error embedding test!',
},
description: 'embeddings error-model',
op: 'gen_ai.embeddings',
origin: 'auto.ai.openai',
status: 'internal_error',
}),
]),
};
createEsmAndCjsTests(__dirname, 'scenario-embeddings.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates openai related spans with sendDefaultPii: false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_EMBEDDINGS })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario-embeddings.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('creates openai related spans with sendDefaultPii: true', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_EMBEDDINGS })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario-root-span.mjs', 'instrument.mjs', (createRunner, test) => {
test('it works without a wrapping span', async () => {
await createRunner()
Expand Down Expand Up @@ -400,7 +503,7 @@ describe('OpenAI integration', () => {

createEsmAndCjsTests(
__dirname,
'scenario-message-truncation-completions.mjs',
'truncation/scenario-message-truncation-completions.mjs',
'instrument-with-pii.mjs',
(createRunner, test) => {
test('truncates messages when they exceed byte limit - keeps only last message and crops it', async () => {
Expand Down Expand Up @@ -436,7 +539,7 @@ describe('OpenAI integration', () => {

createEsmAndCjsTests(
__dirname,
'scenario-message-truncation-responses.mjs',
'truncation/scenario-message-truncation-responses.mjs',
'instrument-with-pii.mjs',
(createRunner, test) => {
test('truncates string inputs when they exceed byte limit', async () => {
Expand Down Expand Up @@ -469,4 +572,30 @@ describe('OpenAI integration', () => {
});
},
);

createEsmAndCjsTests(
__dirname,
'truncation/scenario-message-truncation-embeddings.mjs',
'instrument-with-pii.mjs',
(createRunner, test) => {
test('truncates messages when they exceed byte limit - keeps only last message and crops it', async () => {
await createRunner()
.ignore('event')
.expect({
transaction: {
transaction: 'main',
spans: expect.arrayContaining([
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'embeddings',
}),
}),
]),
},
})
.start()
.completed();
});
},
);
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import { instrumentOpenAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

class MockOpenAI {
constructor(config) {
this.apiKey = config.apiKey;

this.embeddings = {
create: async params => {
await new Promise(resolve => setTimeout(resolve, 10));

return {
object: 'list',
data: [
{
object: 'embedding',
embedding: [0.1, 0.2, 0.3],
index: 0,
},
],
model: params.model,
usage: {
prompt_tokens: 10,
total_tokens: 10,
},
};
},
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockOpenAI({
apiKey: 'mock-api-key',
});

const client = instrumentOpenAiClient(mockClient);

// Create 1 large input that gets truncated to fit within the 20KB limit
const largeContent = 'A'.repeat(25000) + 'B'.repeat(25000); // ~50KB gets truncated to include only As

await client.embeddings.create({
input: largeContent,
model: 'text-embedding-3-small',
dimensions: 1536,
encoding_format: 'float',
});

// Create 3 large inputs where:
// - First 2 inputs are very large (will be dropped)
// - Last input is large but will be truncated to fit within the 20KB limit
const largeContent1 = 'A'.repeat(15000); // ~15KB
const largeContent2 = 'B'.repeat(15000); // ~15KB
const largeContent3 = 'C'.repeat(25000); // ~25KB (will be truncated)

await client.embeddings.create({
input: [largeContent1, largeContent2, largeContent3],
model: 'text-embedding-3-small',
dimensions: 1536,
encoding_format: 'float',
});
});
}

run();
11 changes: 11 additions & 0 deletions packages/core/src/tracing/ai/gen-ai-attributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,16 @@ export const GEN_AI_REQUEST_TOP_K_ATTRIBUTE = 'gen_ai.request.top_k';
*/
export const GEN_AI_REQUEST_STOP_SEQUENCES_ATTRIBUTE = 'gen_ai.request.stop_sequences';

/**
* The encoding format for the model request
*/
export const GEN_AI_REQUEST_ENCODING_FORMAT_ATTRIBUTE = 'gen_ai.request.encoding_format';

/**
* The dimensions for the model request
*/
export const GEN_AI_REQUEST_DIMENSIONS_ATTRIBUTE = 'gen_ai.request.dimensions';

/**
* Array of reasons why the model stopped generating tokens
*/
Expand Down Expand Up @@ -208,6 +218,7 @@ export const OPENAI_USAGE_PROMPT_TOKENS_ATTRIBUTE = 'openai.usage.prompt_tokens'
export const OPENAI_OPERATIONS = {
CHAT: 'chat',
RESPONSES: 'responses',
EMBEDDINGS: 'embeddings',
} as const;

// =============================================================================
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/tracing/openai/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ export const OPENAI_INTEGRATION_NAME = 'OpenAI';

// https://platform.openai.com/docs/quickstart?api-mode=responses
// https://platform.openai.com/docs/quickstart?api-mode=chat
export const INSTRUMENTED_METHODS = ['responses.create', 'chat.completions.create'] as const;
export const INSTRUMENTED_METHODS = ['responses.create', 'chat.completions.create', 'embeddings.create'] as const;
export const RESPONSES_TOOL_CALL_EVENT_TYPES = [
'response.output_item.added',
'response.function_call_arguments.delta',
Expand Down
Loading
Loading