From 5039a4003af5701b63bd769944f5e6cfed7f0b07 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 19 Dec 2025 09:37:20 +0100 Subject: [PATCH 1/6] fix(vercelai): input otken count --- packages/core/src/tracing/vercel-ai/index.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/core/src/tracing/vercel-ai/index.ts b/packages/core/src/tracing/vercel-ai/index.ts index 93be1ca33423..7d5552eb9517 100644 --- a/packages/core/src/tracing/vercel-ai/index.ts +++ b/packages/core/src/tracing/vercel-ai/index.ts @@ -119,6 +119,13 @@ function processEndedVercelAiSpan(span: SpanJSON): void { renameAttributeKey(attributes, AI_USAGE_PROMPT_TOKENS_ATTRIBUTE, GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE); renameAttributeKey(attributes, AI_USAGE_CACHED_INPUT_TOKENS_ATTRIBUTE, GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE); + // Input tokens is the sum of prompt tokens and cached input tokens + if (typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] === 'number' && + typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE] === 'number' + ) { + attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] = attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] + attributes[GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE]; + } + if ( typeof attributes[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE] === 'number' && typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] === 'number' From 18730de160e01eced0f85370c4d5cc92e20df186 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 19 Dec 2025 09:43:27 +0100 Subject: [PATCH 2/6] lint --- packages/core/src/tracing/vercel-ai/index.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/core/src/tracing/vercel-ai/index.ts b/packages/core/src/tracing/vercel-ai/index.ts index 7d5552eb9517..e64b4b1a9cbf 100644 --- a/packages/core/src/tracing/vercel-ai/index.ts +++ b/packages/core/src/tracing/vercel-ai/index.ts @@ -120,10 +120,12 @@ function processEndedVercelAiSpan(span: SpanJSON): void { renameAttributeKey(attributes, AI_USAGE_CACHED_INPUT_TOKENS_ATTRIBUTE, GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE); // Input tokens is the sum of prompt tokens and cached input tokens - if (typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] === 'number' && - typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE] === 'number' + if ( + typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] === 'number' && + typeof attributes[GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE] === 'number' ) { - attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] = attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] + attributes[GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE]; + attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] = + attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] + attributes[GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE]; } if ( From 9c15e4fcb3f7d77372c011bfc69085a34dffde52 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 19 Dec 2025 09:53:13 +0100 Subject: [PATCH 3/6] test --- .../vercelai/scenario-cached-tokens.mjs | 25 +++++++++++++++++++ .../suites/tracing/vercelai/test.ts | 21 ++++++++++++++++ 2 files changed, 46 insertions(+) create mode 100644 dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs b/dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs new file mode 100644 index 000000000000..30ce5d0ef0ad --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs @@ -0,0 +1,25 @@ +import * as Sentry from '@sentry/node'; +import { generateText } from 'ai'; +import { MockLanguageModelV1 } from 'ai/test'; + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { + await generateText({ + model: new MockLanguageModelV1({ + doGenerate: async () => ({ + rawCall: { rawPrompt: null, rawSettings: {} }, + finishReason: 'stop', + usage: { promptTokens: 100, completionTokens: 20 }, + providerMetadata: { + openai: { + cachedPromptTokens: 50, + }, + }, + }), + }), + prompt: 'Test prompt', + }); + }); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts index de228303ab0e..fd0986289c05 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts @@ -699,4 +699,25 @@ describe('Vercel AI integration', () => { expect(errorEvent!.contexts!.trace!.span_id).toBe(transactionEvent!.contexts!.trace!.span_id); }); }); + + createEsmAndCjsTests(__dirname, 'scenario-cached-tokens.mjs', 'instrument.mjs', (createRunner, test) => { + test('adds cached input tokens to total input tokens', async () => { + await createRunner() + .expect({ + transaction: { + transaction: 'main', + spans: expect.arrayContaining([ + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.usage.input_tokens': 150, + 'gen_ai.usage.input_tokens.cached': 50, + }), + }), + ]), + }, + }) + .start() + .completed(); + }); + }); }); From a5bc19c60c82a79fe3cc1a9d92ce11c36a9212c0 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 19 Dec 2025 10:14:39 +0100 Subject: [PATCH 4/6] unit test --- .../vercelai/scenario-cached-tokens.mjs | 25 ------------ .../suites/tracing/vercelai/test.ts | 21 ---------- .../tracing/vercel-ai-cached-tokens.test.ts | 40 +++++++++++++++++++ 3 files changed, 40 insertions(+), 46 deletions(-) delete mode 100644 dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs create mode 100644 packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs b/dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs deleted file mode 100644 index 30ce5d0ef0ad..000000000000 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/scenario-cached-tokens.mjs +++ /dev/null @@ -1,25 +0,0 @@ -import * as Sentry from '@sentry/node'; -import { generateText } from 'ai'; -import { MockLanguageModelV1 } from 'ai/test'; - -async function run() { - await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { - await generateText({ - model: new MockLanguageModelV1({ - doGenerate: async () => ({ - rawCall: { rawPrompt: null, rawSettings: {} }, - finishReason: 'stop', - usage: { promptTokens: 100, completionTokens: 20 }, - providerMetadata: { - openai: { - cachedPromptTokens: 50, - }, - }, - }), - }), - prompt: 'Test prompt', - }); - }); -} - -run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts index fd0986289c05..de228303ab0e 100644 --- a/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts @@ -699,25 +699,4 @@ describe('Vercel AI integration', () => { expect(errorEvent!.contexts!.trace!.span_id).toBe(transactionEvent!.contexts!.trace!.span_id); }); }); - - createEsmAndCjsTests(__dirname, 'scenario-cached-tokens.mjs', 'instrument.mjs', (createRunner, test) => { - test('adds cached input tokens to total input tokens', async () => { - await createRunner() - .expect({ - transaction: { - transaction: 'main', - spans: expect.arrayContaining([ - expect.objectContaining({ - data: expect.objectContaining({ - 'gen_ai.usage.input_tokens': 150, - 'gen_ai.usage.input_tokens.cached': 50, - }), - }), - ]), - }, - }) - .start() - .completed(); - }); - }); }); diff --git a/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts b/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts new file mode 100644 index 000000000000..4909c19eb249 --- /dev/null +++ b/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts @@ -0,0 +1,40 @@ +import { describe, expect, it } from 'vitest'; + +import type { SpanJSON } from '../../../src/types-hoist/span'; +import { addVercelAiProcessors } from '../../../src/tracing/vercel-ai'; +import { TestClient, getDefaultTestClientOptions } from '../../mocks/client'; + +describe('vercel-ai cached tokens', () => { + it('should add cached input tokens to total input tokens', () => { + const options = getDefaultTestClientOptions({ tracesSampleRate: 1.0 }); + const client = new TestClient(options); + client.init(); + addVercelAiProcessors(client); + + const mockSpan: SpanJSON = { + description: 'test', + span_id: 'test-span-id', + trace_id: 'test-trace-id', + start_timestamp: 1000, + timestamp: 2000, + origin: 'auto.vercelai.otel', + data: { + 'ai.usage.promptTokens': 100, + 'ai.usage.cachedInputTokens': 50, + }, + }; + + const event = { + type: 'transaction' as const, + spans: [mockSpan], + }; + + const eventProcessor = client['_eventProcessors'].find(processor => processor.id === 'VercelAiEventProcessor'); + expect(eventProcessor).toBeDefined(); + + const processedEvent = eventProcessor!(event, {}); + + expect(processedEvent?.spans?.[0]?.data?.['gen_ai.usage.input_tokens']).toBe(150); + expect(processedEvent?.spans?.[0]?.data?.['gen_ai.usage.input_tokens.cached']).toBe(50); + }); +}); From 64a9751490c819d159e2fe91a1ebb1c570f659dc Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 19 Dec 2025 10:19:36 +0100 Subject: [PATCH 5/6] lint --- .../core/test/lib/tracing/vercel-ai-cached-tokens.test.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts b/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts index 4909c19eb249..ee354139b2f6 100644 --- a/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts +++ b/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts @@ -1,8 +1,7 @@ import { describe, expect, it } from 'vitest'; - -import type { SpanJSON } from '../../../src/types-hoist/span'; import { addVercelAiProcessors } from '../../../src/tracing/vercel-ai'; -import { TestClient, getDefaultTestClientOptions } from '../../mocks/client'; +import type { SpanJSON } from '../../../src/types-hoist/span'; +import { getDefaultTestClientOptions,TestClient } from '../../mocks/client'; describe('vercel-ai cached tokens', () => { it('should add cached input tokens to total input tokens', () => { From 4c668abcf7e3223492504577f5720d45ea4ca676 Mon Sep 17 00:00:00 2001 From: Ogi <86684834+obostjancic@users.noreply.github.com> Date: Fri, 19 Dec 2025 10:35:25 +0100 Subject: [PATCH 6/6] fix lint, again --- packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts b/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts index ee354139b2f6..7e85121b9e92 100644 --- a/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts +++ b/packages/core/test/lib/tracing/vercel-ai-cached-tokens.test.ts @@ -1,7 +1,7 @@ import { describe, expect, it } from 'vitest'; import { addVercelAiProcessors } from '../../../src/tracing/vercel-ai'; import type { SpanJSON } from '../../../src/types-hoist/span'; -import { getDefaultTestClientOptions,TestClient } from '../../mocks/client'; +import { getDefaultTestClientOptions, TestClient } from '../../mocks/client'; describe('vercel-ai cached tokens', () => { it('should add cached input tokens to total input tokens', () => {