Skip to content

Commit c7ccf26

Browse files
authored
fix: Updated openai instrumentation to properly return APIPromise to avoid crashing when using completions.parse or responses.parse (#3382)
1 parent b976c37 commit c7ccf26

File tree

4 files changed

+76
-5
lines changed

4 files changed

+76
-5
lines changed

lib/subscribers/openai/chat.js

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ const MIN_STREAM_VERSION = '4.12.2'
2323
class OpenAIChatCompletions extends OpenAISubscriber {
2424
constructor({ agent, logger, channelName = 'nr_completionsCreate' }) {
2525
super({ agent, logger, channelName })
26+
this.events = ['asyncEnd', 'end']
2627
}
2728

2829
handler(data, ctx) {
@@ -42,6 +43,32 @@ class OpenAIChatCompletions extends OpenAISubscriber {
4243
return newCtx
4344
}
4445

46+
/**
47+
* Temporary fix as `tracePromise` wraps the promise in a native one.
48+
* We are now wrapping `openai.chat.completions.parse` in a traceSync call
49+
* and then wrapping the promise here so it returns the custom promise.
50+
* OpenAI has a [custom promise](https://github.com/openai/openai-node/blob/master/src/core/api-promise.ts) that crashes applications using `openai.chat.completions.parse`
51+
* see: https://github.com/newrelic/node-newrelic/issues/3379
52+
* see: https://github.com/nodejs/node/issues/59936
53+
* @param data
54+
*/
55+
end(data) {
56+
const promise = data?.result
57+
if (!promise.then) {
58+
return promise
59+
}
60+
61+
return promise.then((result) => {
62+
data.result = result
63+
this.channel.asyncEnd.publish(data)
64+
return result
65+
}).catch((err) => {
66+
data.error = err
67+
this.channel.asyncEnd.publish(data)
68+
return err
69+
})
70+
}
71+
4572
asyncEnd(data) {
4673
const ctx = this.agent.tracer.getContext()
4774
if (!ctx?.segment || !ctx?.transaction) {

lib/subscribers/openai/config.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ module.exports = {
1616
functionQuery: {
1717
className: 'Completions',
1818
methodName: 'create',
19-
kind: 'Async'
19+
kind: 'Sync'
2020
}
2121
},
2222
{
@@ -25,7 +25,7 @@ module.exports = {
2525
functionQuery: {
2626
className: 'Completions',
2727
methodName: 'create',
28-
kind: 'Async'
28+
kind: 'Sync'
2929
}
3030
}
3131
]
@@ -39,7 +39,7 @@ module.exports = {
3939
functionQuery: {
4040
className: 'Responses',
4141
methodName: 'create',
42-
kind: 'Async'
42+
kind: 'Sync'
4343
}
4444
}
4545
]

test/versioned/openai/chat-completions-res-api.test.js

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ const test = require('node:test')
99
const assert = require('node:assert')
1010
const fs = require('node:fs')
1111
const path = require('node:path')
12+
const { tspl } = require('@matteo.collina/tspl')
1213

1314
const { removeModules } = require('../../lib/cache-buster')
1415
const { assertSegments, assertSpanKind, match } = require('../../lib/custom-assertions')
@@ -55,6 +56,26 @@ test('responses.create', async (t) => {
5556
removeModules('openai')
5657
})
5758

59+
// Note: I cannot figure out how to get the mock server to do the right thing,
60+
// but this was failing with a different issue before
61+
await t.test('should not crash when you call `responses.parse`', async (t) => {
62+
const plan = tspl(t, { plan: 1 })
63+
const { client, agent } = t.nr
64+
await helper.runInTransaction(agent, async (tx) => {
65+
try {
66+
await client.responses.parse({
67+
input: [{ role: 'user', content: 'You are a mathematician.' }]
68+
})
69+
} catch (err) {
70+
plan.match(err.message, /Body is unusable|body used already for/)
71+
} finally {
72+
tx.end()
73+
}
74+
})
75+
76+
await plan.completed
77+
})
78+
5879
await t.test('should create span on successful chat completion create', (t, end) => {
5980
const { client, agent, host, port } = t.nr
6081
helper.runInTransaction(agent, async (tx) => {

test/versioned/openai/chat-completions.test.js

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ const TRACKING_METRIC = `Supportability/Nodejs/ML/OpenAI/${pkgVersion}`
2828

2929
const responses = require('./mock-chat-api-responses')
3030
const { assertChatCompletionMessages, assertChatCompletionSummary } = require('./common-chat-api')
31+
const { tspl } = require('@matteo.collina/tspl')
3132

3233
test('chat.completions.create', async (t) => {
3334
t.beforeEach(async (ctx) => {
@@ -57,13 +58,35 @@ test('chat.completions.create', async (t) => {
5758
removeModules('openai')
5859
})
5960

61+
// Note: I cannot figure out how to get the mock server to do the right thing,
62+
// but this was failing with a different issue before
63+
await t.test('should not crash when you call `completions.parse`', { skip: semver.lt(pkgVersion, '5.0.0') }, async (t) => {
64+
const plan = tspl(t, { plan: 1 })
65+
const { client, agent } = t.nr
66+
await helper.runInTransaction(agent, async (tx) => {
67+
try {
68+
await client.chat.completions.parse({
69+
messages: [{ role: 'user', content: 'You are a mathematician.' }]
70+
})
71+
} catch (err) {
72+
plan.match(err.message, /.*Body is unusable.*/)
73+
} finally {
74+
tx.end()
75+
}
76+
})
77+
78+
await plan.completed
79+
})
80+
6081
await t.test('should create span on successful chat completion create', (t, end) => {
6182
const { client, agent, host, port } = t.nr
6283
helper.runInTransaction(agent, async (tx) => {
63-
const results = await client.chat.completions.create({
84+
const prom = client.chat.completions.create({
6485
messages: [{ role: 'user', content: 'You are a mathematician.' }]
6586
})
6687

88+
const results = await prom
89+
6790
assert.equal(results.headers, undefined, 'should remove response headers from user result')
6891
assert.equal(results.choices[0].message.content, '1 plus 2 is 3.')
6992

@@ -138,7 +161,7 @@ test('chat.completions.create', async (t) => {
138161
})
139162

140163
if (semver.gte(pkgVersion, '4.12.2')) {
141-
await t.test('should create span on successful chat completion stream create', (t, end) => {
164+
await t.test('should create span on successful chat completion stream create', { skip: semver.lt(pkgVersion, '4.12.2') }, (t, end) => {
142165
const { client, agent, host, port } = t.nr
143166
helper.runInTransaction(agent, async (tx) => {
144167
const content = 'Streamed response'

0 commit comments

Comments
 (0)