Skip to content

feat(node): Add Anthropic AI integration #17348

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import * as Sentry from '@sentry/node';
import { nodeContextIntegration } from '@sentry/node-core';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://[email protected]/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: false,
transport: loggingTransport,
integrations: [
Sentry.anthropicAIIntegration({
recordInputs: true,
recordOutputs: true,
}),
nodeContextIntegration(),
],
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import * as Sentry from '@sentry/node';
import { nodeContextIntegration } from '@sentry/node-core';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://[email protected]/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: true,
transport: loggingTransport,
integrations: [
Sentry.anthropicAIIntegration(),
nodeContextIntegration(),
],
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import * as Sentry from '@sentry/node';
import { nodeContextIntegration } from '@sentry/node-core';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://[email protected]/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: false,
transport: loggingTransport,
// Force include the integration
integrations: [
Sentry.anthropicAIIntegration(),
nodeContextIntegration(),
],
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
import { instrumentAnthropicAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

class MockAnthropic {
constructor(config) {
this.apiKey = config.apiKey;

// Create messages object with create and countTokens methods
this.messages = {
create: this._messagesCreate.bind(this),
countTokens: this._messagesCountTokens.bind(this)
};

this.models = {
retrieve: this._modelsRetrieve.bind(this),
};
}

/**
* Create a mock message
*/
async _messagesCreate(params) {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
error.headers = { 'x-request-id': 'mock-request-123' };
throw error;
}

return {
id: 'msg_mock123',
type: 'message',
model: params.model,
role: 'assistant',
content: [
{
type: 'text',
text: 'Hello from Anthropic mock!',
},
],
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 10,
output_tokens: 15,
},
};
}

async _messagesCountTokens() {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// For countTokens, just return input_tokens
return {
input_tokens: 15
}
}

async _modelsRetrieve(modelId) {
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// Match what the actual implementation would return
return {
id: modelId,
name: modelId,
created_at: 1715145600,
model: modelId, // Add model field to match the check in addResponseAttributes
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockAnthropic({
apiKey: 'mock-api-key',
});

const client = instrumentAnthropicAiClient(mockClient);

// First test: basic message completion
await client.messages.create({
model: 'claude-3-haiku-20240307',
system: 'You are a helpful assistant.',
messages: [
{ role: 'user', content: 'What is the capital of France?' },
],
temperature: 0.7,
max_tokens: 100,
});

// Second test: error handling
try {
await client.messages.create({
model: 'error-model',
messages: [{ role: 'user', content: 'This will fail' }],
});
} catch {
// Error is expected and handled
}

// Third test: count tokens with cached tokens
await client.messages.countTokens({
model: 'claude-3-haiku-20240307',
messages: [
{ role: 'user', content: 'What is the capital of France?' },
],
});

// Fourth test: models.retrieve
await client.models.retrieve('claude-3-haiku-20240307');
});
}

run();
222 changes: 222 additions & 0 deletions dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
import { afterAll, describe, expect } from 'vitest';
import { cleanupChildProcesses, createEsmAndCjsTests } from '../../../utils/runner';

describe('Anthropic integration', () => {
afterAll(() => {
cleanupChildProcesses();
});

const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - basic message completion without PII
expect.objectContaining({
data: {
'gen_ai.operation.name': 'messages.create',
'sentry.op': 'gen_ai.messages.create',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.temperature': 0.7,
'gen_ai.request.max_tokens': 100,
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'msg_mock123',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
},
description: 'messages.create claude-3-haiku-20240307',
op: 'gen_ai.messages.create',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Second span - error handling
expect.objectContaining({
data: {
'gen_ai.operation.name': 'messages.create',
'sentry.op': 'gen_ai.messages.create',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'error-model',
},
description: 'messages.create error-model',
op: 'gen_ai.messages.create',
origin: 'auto.ai.anthropic',
status: 'unknown_error',
}),
// Third span - token counting (no response.text because recordOutputs=false by default)
expect.objectContaining({
data: {
'gen_ai.operation.name': 'messages.countTokens',
'sentry.op': 'gen_ai.messages.countTokens',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
},
description: 'messages.countTokens claude-3-haiku-20240307',
op: 'gen_ai.messages.countTokens',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Fourth span - models.retrieve
expect.objectContaining({
data: {
'anthropic.response.timestamp': '2024-05-08T05:20:00.000Z',
'gen_ai.operation.name': 'retrieve',
'sentry.op': 'gen_ai.retrieve',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'claude-3-haiku-20240307',
'gen_ai.response.model': 'claude-3-haiku-20240307',
},
description: 'retrieve claude-3-haiku-20240307',
op: 'gen_ai.retrieve',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
]),
};

const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - basic message completion with PII
expect.objectContaining({
data: {
'gen_ai.operation.name': 'messages.create',
'sentry.op': 'gen_ai.messages.create',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.temperature': 0.7,
'gen_ai.request.max_tokens': 100,
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'msg_mock123',
'gen_ai.response.text': 'Hello from Anthropic mock!',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
},
description: 'messages.create claude-3-haiku-20240307',
op: 'gen_ai.messages.create',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Second span - error handling with PII
expect.objectContaining({
data: {
'gen_ai.operation.name': 'messages.create',
'sentry.op': 'gen_ai.messages.create',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'error-model',
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
},
description: 'messages.create error-model',
op: 'gen_ai.messages.create',

origin: 'auto.ai.anthropic',
status: 'unknown_error',
}),
// Third span - token counting with PII (response.text is present because sendDefaultPii=true enables recordOutputs)
expect.objectContaining({
data: {
'gen_ai.operation.name': 'messages.countTokens',
'sentry.op': 'gen_ai.messages.countTokens',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
'gen_ai.response.text': '15', // Only present because recordOutputs=true when sendDefaultPii=true
},
description: 'messages.countTokens claude-3-haiku-20240307',
op: 'gen_ai.messages.countTokens',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
// Fourth span - models.retrieve with PII
expect.objectContaining({
data: {
'anthropic.response.timestamp': '2024-05-08T05:20:00.000Z',
'gen_ai.operation.name': 'retrieve',
'sentry.op': 'gen_ai.retrieve',
'sentry.origin': 'auto.ai.anthropic',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'claude-3-haiku-20240307',
'gen_ai.response.model': 'claude-3-haiku-20240307',
},
description: 'retrieve claude-3-haiku-20240307',
op: 'gen_ai.retrieve',
origin: 'auto.ai.anthropic',
status: 'ok',
}),
]),
};

const EXPECTED_TRANSACTION_WITH_OPTIONS = {
transaction: 'main',
spans: expect.arrayContaining([
// Check that custom options are respected
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
'gen_ai.response.text': expect.any(String), // Should include response text when recordOutputs: true
}),
}),
// Check token counting with options
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'messages.countTokens',
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
'gen_ai.response.text': '15', // Present because recordOutputs=true is set in options
}),
op: 'gen_ai.messages.countTokens',
}),
// Check models.retrieve with options
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'retrieve',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'claude-3-haiku-20240307',
'gen_ai.response.model': 'claude-3-haiku-20240307',
}),
op: 'gen_ai.retrieve',
description: 'retrieve claude-3-haiku-20240307',
}),
]),
};

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates anthropic related spans with sendDefaultPii: false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('creates anthropic related spans with sendDefaultPii: true', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario.mjs', 'instrument-with-options.mjs', (createRunner, test) => {
test('creates anthropic related spans with custom options', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_WITH_OPTIONS })
.start()
.completed();
});
});
});
Loading
Loading