diff --git a/.github/workflows/llmobs.yml b/.github/workflows/llmobs.yml
index b8b38fdb6d0..846eb133f7f 100644
--- a/.github/workflows/llmobs.yml
+++ b/.github/workflows/llmobs.yml
@@ -141,3 +141,25 @@ jobs:
uses: ./.github/actions/testagent/logs
with:
suffix: llmobs-${{ github.job }}
+
+ anthropic:
+ runs-on: ubuntu-latest
+ env:
+ PLUGINS: anthropic
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ - uses: ./.github/actions/testagent/start
+ - uses: ./.github/actions/node/oldest-maintenance-lts
+ - uses: ./.github/actions/install
+ - run: yarn test:plugins:ci
+ - run: yarn test:llmobs:plugins:ci
+ shell: bash
+ - uses: ./.github/actions/node/latest
+ - run: yarn test:plugins:ci
+ - run: yarn test:llmobs:plugins:ci
+ shell: bash
+ - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
+ - if: always()
+ uses: ./.github/actions/testagent/logs
+ with:
+ suffix: llmobs-${{ github.job }}
diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml
index 587df5a9d3f..41834faad72 100644
--- a/.github/workflows/project.yml
+++ b/.github/workflows/project.yml
@@ -67,7 +67,7 @@ jobs:
static-analysis:
runs-on: ubuntu-latest
name: Datadog Static Analyzer
- if: github.actor != 'dependabot[bot]'
+ if: github.actor != 'dependabot[bot]' && github.event_name != 'pull_request'
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
diff --git a/.gitlab/benchmarks.yml b/.gitlab/benchmarks.yml
index 970ba997364..88acd56c19c 100644
--- a/.gitlab/benchmarks.yml
+++ b/.gitlab/benchmarks.yml
@@ -13,7 +13,7 @@ variables:
tags: ["runner:apm-k8s-tweaked-metal"]
image: $MICROBENCHMARKS_CI_IMAGE
interruptible: true
- timeout: 20m
+ timeout: 15m # TODO: Fix worker queueing and reduce this.
script:
- git clone --branch dd-trace-js https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
- bp-runner bp-runner.yml --debug
diff --git a/.gitlab/macrobenchmarks.yml b/.gitlab/macrobenchmarks.yml
index 9f439c5a821..4623e737ee2 100644
--- a/.gitlab/macrobenchmarks.yml
+++ b/.gitlab/macrobenchmarks.yml
@@ -7,17 +7,15 @@ include:
.macrobenchmarks:
stage: macrobenchmarks
rules:
- - if: ($NIGHTLY_BENCHMARKS || $CI_PIPELINE_SOURCE != "schedule") && $CI_COMMIT_REF_NAME == "master"
- when: always
+ - when: always
- when: manual
tags: ["runner:apm-k8s-same-cpu"]
needs: []
interruptible: true
- timeout: 1h
+ timeout: 15m # TODO: Fix worker queueing and reduce this.
image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:js-hapi
script:
- # TODO: Revert to js/hapi after https://github.com/DataDog/benchmarking-platform/pull/199 is merged
- - git clone --branch rochdev/parallel-experiments https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
+ - git clone --branch js/hapi https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
- bp-runner bp-runner.$EXPERIMENT.yml --debug -t
artifacts:
name: "artifacts"
@@ -27,13 +25,13 @@ include:
expire_in: 3 months
variables:
K6_OPTIONS_WARMUP_RATE: 500
- K6_OPTIONS_WARMUP_DURATION: 1m
+ K6_OPTIONS_WARMUP_DURATION: 30s
K6_OPTIONS_WARMUP_GRACEFUL_STOP: 10s
K6_OPTIONS_WARMUP_PRE_ALLOCATED_VUS: 4
K6_OPTIONS_WARMUP_MAX_VUS: 4
K6_OPTIONS_NORMAL_OPERATION_RATE: 300
- K6_OPTIONS_NORMAL_OPERATION_DURATION: 10m
+ K6_OPTIONS_NORMAL_OPERATION_DURATION: 3m
K6_OPTIONS_NORMAL_OPERATION_GRACEFUL_STOP: 10s
K6_OPTIONS_NORMAL_OPERATION_PRE_ALLOCATED_VUS: 4
K6_OPTIONS_NORMAL_OPERATION_MAX_VUS: 4
diff --git a/docs/add-redirects.sh b/docs/add-redirects.sh
index a16e102a675..700097beef8 100755
--- a/docs/add-redirects.sh
+++ b/docs/add-redirects.sh
@@ -1,28 +1,36 @@
#!/usr/bin/env bash
# Previously, URLs to plugin pages looked like this:
-# interfaces/plugins.amqp10.html
+# interfaces/export_.plugins.connect.html
#
# Now, with an updated typedoc and updated types, they look like this:
-# interfaces/export_.plugins.connect.html
-#
+# interfaces/plugins.amqp10.html
+#
# This script automatically generates basic HTML files to redirect users who
# visit the old URLs to the new URL.
echo "writing redirects..."
+# TODO(2026-10-07): Delete this file and remove from docs/package.json
+# NOTE: Do not add any new entries to this list
declare -a plugins=(
+ "aerospike"
"amqp10"
"amqplib"
+ "apollo"
"avsc"
"aws_sdk"
- "bluebird"
- "couchbase"
- "cucumber"
+ "axios"
+ "azure_functions"
+ "azure_service_bus"
"bunyan"
"cassandra_driver"
+ "child_process"
"confluentinc_kafka_javascript"
"connect"
+ "couchbase"
+ "cucumber"
+ "cypress"
"dns"
"elasticsearch"
"express"
@@ -30,9 +38,11 @@ declare -a plugins=(
"fetch"
"generic_pool"
"google_cloud_pubsub"
+ "google_cloud_vertexai"
"graphql"
"grpc"
"hapi"
+ "hono"
"http"
"http2"
"ioredis"
@@ -41,38 +51,41 @@ declare -a plugins=(
"kafkajs"
"knex"
"koa"
+ "langchain"
"ldapjs"
"mariadb"
+ "memcached"
"microgateway_core"
"mocha"
"mongodb_core"
+ "mongoose"
"mysql"
"mysql2"
"net"
"next"
- "opensearch"
"openai"
+ "opensearch"
"oracledb"
- "pino"
"pg"
+ "pino"
+ "playwright"
"prisma"
- "promise"
- "promise_js"
"protobufjs"
- "q"
"redis"
"restify"
+ "rhea"
"router"
+ "selenium"
+ "sharedb"
"tedious"
"undici"
- "when"
+ "vitest"
"winston"
- "ws"
)
for i in "${plugins[@]}"
do
- echo "" > out/interfaces/plugins.$i.html
+ echo "" > out/interfaces/export_.plugins.$i.html
done
echo "done."
diff --git a/docs/test.ts b/docs/test.ts
index ac3f56bb856..dd7bcff0d35 100644
--- a/docs/test.ts
+++ b/docs/test.ts
@@ -305,6 +305,7 @@ const openSearchOptions: plugins.opensearch = {
tracer.use('amqp10');
tracer.use('amqplib');
+tracer.use('anthropic');
tracer.use('avsc');
tracer.use('aws-sdk');
tracer.use('aws-sdk', awsSdkOptions);
diff --git a/index.d.ts b/index.d.ts
index d7f390cf7d7..19bb065654f 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -168,6 +168,7 @@ interface Plugins {
"aerospike": tracer.plugins.aerospike;
"amqp10": tracer.plugins.amqp10;
"amqplib": tracer.plugins.amqplib;
+ "anthropic": tracer.plugins.anthropic;
"apollo": tracer.plugins.apollo;
"avsc": tracer.plugins.avsc;
"aws-sdk": tracer.plugins.aws_sdk;
@@ -785,6 +786,8 @@ declare namespace tracer {
/** Whether to enable request body collection on RASP event
* @default false
+ *
+ * @deprecated Use UI and Remote Configuration to enable extended data collection
*/
bodyCollection?: boolean
},
@@ -809,20 +812,28 @@ declare namespace tracer {
},
/**
* Configuration for extended headers collection tied to security events
+ *
+ * @deprecated Use UI and Remote Configuration to enable extended data collection
*/
extendedHeadersCollection?: {
/** Whether to enable extended headers collection
* @default false
+ *
+ * @deprecated Use UI and Remote Configuration to enable extended data collection
*/
enabled: boolean,
/** Whether to redact collected headers
* @default true
+ *
+ * @deprecated Use UI and Remote Configuration to enable extended data collection
*/
redaction: boolean,
/** Specifies the maximum number of headers collected.
* @default 50
+ *
+ * @deprecated Use UI and Remote Configuration to enable extended data collection
*/
maxHeaders: number,
}
@@ -1530,6 +1541,12 @@ declare namespace tracer {
*/
interface amqplib extends Instrumentation {}
+ /**
+ * This plugin automatically instruments the
+ * [anthropic](https://www.npmjs.com/package/@anthropic-ai/sdk) module.
+ */
+ interface anthropic extends Instrumentation {}
+
/**
* Currently this plugin automatically instruments
* [@apollo/gateway](https://github.com/apollographql/federation) for module versions >= v2.3.0.
diff --git a/initialize.mjs b/initialize.mjs
index 2b681ec1943..7f2109f1cdb 100644
--- a/initialize.mjs
+++ b/initialize.mjs
@@ -36,7 +36,13 @@ ${result.source}`
const [NODE_MAJOR, NODE_MINOR] = process.versions.node.split('.').map(Number)
const brokenLoaders = NODE_MAJOR === 18 && NODE_MINOR === 0
-const iitmExclusions = [/langsmith/, /openai\/_shims/, /openai\/resources\/chat\/completions\/messages/, /openai\/agents-core\/dist\/shims/]
+const iitmExclusions = [
+ /langsmith/,
+ /openai\/_shims/,
+ /openai\/resources\/chat\/completions\/messages/,
+ /openai\/agents-core\/dist\/shims/,
+ /@anthropic-ai\/sdk\/_shims/
+]
export async function load (url, context, nextLoad) {
const iitmExclusionsMatch = iitmExclusions.some((exclusion) => exclusion.test(url))
diff --git a/integration-tests/esbuild/package.json b/integration-tests/esbuild/package.json
index f6554d14ece..eab25fc6e72 100644
--- a/integration-tests/esbuild/package.json
+++ b/integration-tests/esbuild/package.json
@@ -28,6 +28,6 @@
"express": "4.21.2",
"knex": "3.1.0",
"koa": "3.0.1",
- "openai": "6.1.0"
+ "openai": "6.2.0"
}
}
diff --git a/package.json b/package.json
index 529d0d594e5..0b6d2dae046 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "dd-trace",
- "version": "5.70.0",
+ "version": "5.71.0",
"description": "Datadog APM tracing client for JavaScript",
"main": "index.js",
"typings": "index.d.ts",
diff --git a/packages/datadog-instrumentations/src/anthropic.js b/packages/datadog-instrumentations/src/anthropic.js
new file mode 100644
index 00000000000..a0efcf105b7
--- /dev/null
+++ b/packages/datadog-instrumentations/src/anthropic.js
@@ -0,0 +1,115 @@
+'use strict'
+
+const { addHook } = require('./helpers/instrument')
+const shimmer = require('../../datadog-shimmer')
+const { channel, tracingChannel } = require('dc-polyfill')
+
+const anthropicTracingChannel = tracingChannel('apm:anthropic:request')
+const onStreamedChunkCh = channel('apm:anthropic:request:chunk')
+
+function wrapStreamIterator (iterator, ctx) {
+ return function () {
+ const itr = iterator.apply(this, arguments)
+ shimmer.wrap(itr, 'next', next => function () {
+ return next.apply(this, arguments)
+ .then(res => {
+ const { done, value: chunk } = res
+ onStreamedChunkCh.publish({ ctx, chunk, done })
+
+ if (done) {
+ finish(ctx)
+ }
+
+ return res
+ })
+ .catch(error => {
+ finish(ctx, null, error)
+ throw error
+ })
+ })
+
+ return itr
+ }
+}
+
+function wrapCreate (create) {
+ return function () {
+ if (!anthropicTracingChannel.start.hasSubscribers) {
+ return create.apply(this, arguments)
+ }
+
+ const options = arguments[0]
+ const stream = options.stream
+
+ const ctx = { options, resource: 'create' }
+
+ return anthropicTracingChannel.start.runStores(ctx, () => {
+ let apiPromise
+ try {
+ apiPromise = create.apply(this, arguments)
+ } catch (error) {
+ finish(ctx, null, error)
+ throw error
+ }
+
+ shimmer.wrap(apiPromise, 'parse', parse => function () {
+ return parse.apply(this, arguments)
+ .then(response => {
+ if (stream) {
+ shimmer.wrap(response, Symbol.asyncIterator, iterator => wrapStreamIterator(iterator, ctx))
+ } else {
+ finish(ctx, response, null)
+ }
+
+ return response
+ }).catch(error => {
+ finish(ctx, null, error)
+ throw error
+ })
+ })
+
+ anthropicTracingChannel.end.publish(ctx)
+
+ return apiPromise
+ })
+ }
+}
+
+function finish (ctx, result, error) {
+ if (error) {
+ ctx.error = error
+ anthropicTracingChannel.error.publish(ctx)
+ }
+
+ // streamed responses are handled and set separately
+ ctx.result ??= result
+
+ anthropicTracingChannel.asyncEnd.publish(ctx)
+}
+
+const extensions = ['js', 'mjs']
+for (const extension of extensions) {
+ addHook({
+ name: '@anthropic-ai/sdk',
+ file: `resources/messages.${extension}`,
+ versions: ['>=0.14.0 <0.33.0']
+ }, exports => {
+ const Messages = exports.Messages
+
+ shimmer.wrap(Messages.prototype, 'create', wrapCreate)
+
+ return exports
+ })
+
+ addHook({
+ name: '@anthropic-ai/sdk',
+ file: `resources/messages/messages.${extension}`,
+ versions: ['>=0.33.0']
+ }, exports => {
+ const Messages = exports.Messages
+
+ shimmer.wrap(Messages.prototype, 'create', wrapCreate)
+
+ return exports
+ })
+}
diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js
index ccc49dcc79a..19c47daedd5 100644
--- a/packages/datadog-instrumentations/src/helpers/hooks.js
+++ b/packages/datadog-instrumentations/src/helpers/hooks.js
@@ -1,6 +1,7 @@
'use strict'
module.exports = {
+ '@anthropic-ai/sdk': { esmFirst: true, fn: () => require('../anthropic') },
'@apollo/server': () => require('../apollo-server'),
'@apollo/gateway': () => require('../apollo'),
'apollo-server-core': () => require('../apollo-server-core'),
diff --git a/packages/datadog-plugin-anthropic/src/index.js b/packages/datadog-plugin-anthropic/src/index.js
new file mode 100644
index 00000000000..54b0856a5ae
--- /dev/null
+++ b/packages/datadog-plugin-anthropic/src/index.js
@@ -0,0 +1,17 @@
+'use strict'
+
+const CompositePlugin = require('../../dd-trace/src/plugins/composite')
+const AnthropicTracingPlugin = require('./tracing')
+const AnthropicLLMObsPlugin = require('../../dd-trace/src/llmobs/plugins/anthropic')
+
+class AnthropicPlugin extends CompositePlugin {
+ static id = 'anthropic'
+ static get plugins () {
+ return {
+ llmobs: AnthropicLLMObsPlugin,
+ tracing: AnthropicTracingPlugin
+ }
+ }
+}
+
+module.exports = AnthropicPlugin
diff --git a/packages/datadog-plugin-anthropic/src/tracing.js b/packages/datadog-plugin-anthropic/src/tracing.js
new file mode 100644
index 00000000000..c294394d95b
--- /dev/null
+++ b/packages/datadog-plugin-anthropic/src/tracing.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const TracingPlugin = require('../../dd-trace/src/plugins/tracing')
+
+class AnthropicTracingPlugin extends TracingPlugin {
+ static id = 'anthropic'
+ static operation = 'request'
+ static system = 'anthropic'
+ static prefix = 'tracing:apm:anthropic:request'
+
+ bindStart (ctx) {
+ const { resource, options } = ctx
+
+ this.startSpan('anthropic.request', {
+ meta: {
+ 'resource.name': `Messages.${resource}`,
+ 'anthropic.request.model': options.model
+ }
+ }, ctx)
+
+ return ctx.currentStore
+ }
+
+ asyncEnd (ctx) {
+ const span = ctx.currentStore?.span
+ span?.finish()
+ }
+}
+
+module.exports = AnthropicTracingPlugin
diff --git a/packages/datadog-plugin-anthropic/test/index.spec.js b/packages/datadog-plugin-anthropic/test/index.spec.js
new file mode 100644
index 00000000000..6e1637634d6
--- /dev/null
+++ b/packages/datadog-plugin-anthropic/test/index.spec.js
@@ -0,0 +1,161 @@
+'use strict'
+
+const { describe, before, after, it } = require('mocha')
+const { withVersions } = require('../../dd-trace/test/setup/mocha')
+const agent = require('../../dd-trace/test/plugins/agent')
+const assert = require('node:assert')
+const { useEnv } = require('../../../integration-tests/helpers')
+
+describe('Plugin', () => {
+ useEnv({
+ ANTHROPIC_API_KEY: ''
+ })
+
+ withVersions('anthropic', '@anthropic-ai/sdk', (version) => {
+ let client
+
+ before(async () => {
+ await agent.load('anthropic')
+
+ const { Anthropic } = require(`../../../versions/@anthropic-ai/sdk@${version}`).get()
+ client = new Anthropic({ baseURL: 'http://127.0.0.1:9126/vcr/anthropic' })
+ })
+
+ after(() => agent.close({ ritmReset: false }))
+
+ describe('messages.create', () => {
+ it('creates a span', async () => {
+ const tracesPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+
+ assert.equal(span.name, 'anthropic.request')
+ assert.equal(span.resource, 'Messages.create')
+ assert.equal(span.meta['anthropic.request.model'], 'claude-3-7-sonnet-20250219')
+ })
+
+ const promise = client.messages.create({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5,
+ })
+
+ assert.ok(
+ typeof promise.withResponse === 'function',
+ 'Expected custom Anthropic APIPromise to have a withResponse method'
+ )
+
+ const result = await promise
+ assert.ok(result)
+
+ await tracesPromise
+ })
+
+ describe('stream', () => {
+ it('creates a span', async () => {
+ const tracesPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+
+ assert.equal(span.name, 'anthropic.request')
+ assert.equal(span.resource, 'Messages.create')
+ assert.equal(span.meta['anthropic.request.model'], 'claude-3-7-sonnet-20250219')
+ })
+
+ const promise = client.messages.create({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5,
+ stream: true
+ })
+
+ assert.ok(
+ typeof promise.withResponse === 'function',
+ 'Expected custom Anthropic APIPromise to have a withResponse method'
+ )
+
+ const stream = await promise
+ for await (const chunk of stream) {
+ assert.ok(chunk)
+ }
+
+ await tracesPromise
+ })
+ })
+ })
+
+ describe('messages.stream', () => {
+ it('creates a span for async iterator consumption', async () => {
+ const tracesPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+
+ assert.equal(span.name, 'anthropic.request')
+
+ // even though we're streaming, it calls Messages.create under the hood
+ assert.equal(span.resource, 'Messages.create')
+
+ assert.equal(span.meta['anthropic.request.model'], 'claude-3-7-sonnet-20250219')
+ })
+
+ const stream = client.messages.stream({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5
+ })
+
+ for await (const chunk of stream) {
+ assert.ok(chunk)
+ }
+
+ await tracesPromise
+ })
+
+ describe('when using streaming helper methods', () => {
+ it('creates a span for stream.on', async () => {
+ const tracesPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+
+ assert.equal(span.name, 'anthropic.request')
+ assert.equal(span.resource, 'Messages.create')
+ assert.equal(span.meta['anthropic.request.model'], 'claude-3-7-sonnet-20250219')
+ })
+
+ client.messages.stream({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5
+ }).on('text', text => {
+ assert.ok(text)
+ })
+
+ await tracesPromise
+ })
+
+ it('creates a span for stream.finalMessage', async () => {
+ const tracesPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+
+ assert.equal(span.name, 'anthropic.request')
+ assert.equal(span.resource, 'Messages.create')
+ assert.equal(span.meta['anthropic.request.model'], 'claude-3-7-sonnet-20250219')
+ })
+
+ const stream = client.messages.stream({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5,
+ stream: true
+ })
+
+ const message = await stream.finalMessage()
+ assert.ok(message)
+
+ await tracesPromise
+ })
+ })
+ })
+ })
+})
diff --git a/packages/datadog-plugin-anthropic/test/integration-test/client.spec.js b/packages/datadog-plugin-anthropic/test/integration-test/client.spec.js
new file mode 100644
index 00000000000..6f06527336f
--- /dev/null
+++ b/packages/datadog-plugin-anthropic/test/integration-test/client.spec.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const {
+ FakeAgent,
+ createSandbox,
+ checkSpansForServiceName,
+ spawnPluginIntegrationTestProc
+} = require('../../../../integration-tests/helpers')
+const { withVersions } = require('../../../dd-trace/test/setup/mocha')
+const { assert } = require('chai')
+const { describe, it, beforeEach, afterEach, before, after } = require('mocha')
+
+describe('esm', () => {
+ let agent
+ let proc
+ let sandbox
+
+ withVersions('anthropic', ['@anthropic-ai/sdk'], version => {
+ before(async function () {
+ this.timeout(20000)
+ sandbox = await createSandbox([
+ `@anthropic-ai/sdk@${version}`,
+ ], false, [
+ './packages/datadog-plugin-anthropic/test/integration-test/*'
+ ])
+ })
+
+ after(async () => {
+ await sandbox.remove()
+ })
+
+ beforeEach(async () => {
+ agent = await new FakeAgent().start()
+ })
+
+ afterEach(async () => {
+ proc?.kill()
+ await agent.stop()
+ })
+
+ it('is instrumented', async () => {
+ const res = agent.assertMessageReceived(({ headers, payload }) => {
+ assert.propertyVal(headers, 'host', `127.0.0.1:${agent.port}`)
+ assert.isArray(payload)
+ assert.strictEqual(checkSpansForServiceName(payload, 'anthropic.request'), true)
+ })
+
+ proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port, null, {
+ NODE_OPTIONS: '--import dd-trace/initialize.mjs',
+ ANTHROPIC_API_KEY: ''
+ })
+
+ await res
+ }).timeout(20000)
+ })
+})
diff --git a/packages/datadog-plugin-anthropic/test/integration-test/server.mjs b/packages/datadog-plugin-anthropic/test/integration-test/server.mjs
new file mode 100644
index 00000000000..de89467609b
--- /dev/null
+++ b/packages/datadog-plugin-anthropic/test/integration-test/server.mjs
@@ -0,0 +1,10 @@
+import { Anthropic } from '@anthropic-ai/sdk'
+
+const client = new Anthropic({ baseURL: 'http://127.0.0.1:9126/vcr/anthropic' })
+
+await client.messages.create({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5
+})
diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js
index af806041e43..d9604e7eded 100644
--- a/packages/dd-trace/src/appsec/reporter.js
+++ b/packages/dd-trace/src/appsec/reporter.js
@@ -38,14 +38,20 @@ const config = {
const metricsQueue = new Map()
+const extendedDataCollectionRequest = new WeakMap()
+
// following header lists are ordered in the same way the spec orders them, it doesn't matter but it's easier to compare
const contentHeaderList = [
'content-length',
- 'content-type',
'content-encoding',
'content-language'
]
+const responseHeaderList = [
+ ...contentHeaderList,
+ 'content-type'
+]
+
const identificationHeaders = [
'x-amzn-trace-id',
'cloudfront-viewer-ja3-fingerprint',
@@ -75,15 +81,27 @@ const requestHeadersList = [
...identificationHeaders
]
+const redactedHeadersList = [
+ 'authorization',
+ 'proxy-authorization',
+ 'www-authenticate',
+ 'proxy-authenticate',
+ 'authentication-info',
+ 'proxy-authentication-info',
+ 'cookie',
+ 'set-cookie'
+]
+
// these request headers are always collected - it breaks the expected spec orders
const REQUEST_HEADERS_MAP = mapHeaderAndTags(requestHeadersList, REQUEST_HEADER_TAG_PREFIX)
const EVENT_HEADERS_MAP = mapHeaderAndTags(eventHeadersList, REQUEST_HEADER_TAG_PREFIX)
-const RESPONSE_HEADERS_MAP = mapHeaderAndTags(contentHeaderList, RESPONSE_HEADER_TAG_PREFIX)
+const RESPONSE_HEADERS_MAP = mapHeaderAndTags(responseHeaderList, RESPONSE_HEADER_TAG_PREFIX)
const NON_EXTENDED_REQUEST_HEADERS = new Set([...requestHeadersList, ...eventHeadersList])
-const NON_EXTENDED_RESPONSE_HEADERS = new Set(contentHeaderList)
+const NON_EXTENDED_RESPONSE_HEADERS = new Set(responseHeaderList)
+const REDACTED_HEADERS = new Set(redactedHeadersList)
function init (_config) {
config.headersExtendedCollectionEnabled = _config.extendedHeadersCollection.enabled
@@ -132,7 +150,9 @@ function filterExtendedHeaders (headers, excludedHeaderNames, tagPrefix, limit =
for (const [headerName, headerValue] of Object.entries(headers)) {
if (counter >= limit) break
if (!excludedHeaderNames.has(headerName)) {
- result[getHeaderTag(tagPrefix, headerName)] = String(headerValue)
+ result[getHeaderTag(tagPrefix, headerName)] = REDACTED_HEADERS.has(headerName)
+ ? ''
+ : String(headerValue)
counter++
}
}
@@ -140,7 +160,7 @@ function filterExtendedHeaders (headers, excludedHeaderNames, tagPrefix, limit =
return result
}
-function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedResponseHeaders = {}) {
+function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedResponseHeaders = {}, extendedDataCollection) {
// Mandatory
const mandatoryCollectedHeaders = filterHeaders(req.headers, REQUEST_HEADERS_MAP)
@@ -154,7 +174,8 @@ function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedRespons
const requestEventCollectedHeaders = filterHeaders(req.headers, EVENT_HEADERS_MAP)
const responseEventCollectedHeaders = filterHeaders(responseHeaders, RESPONSE_HEADERS_MAP)
- if (!config.headersExtendedCollectionEnabled || config.headersRedaction) {
+ // TODO headersExtendedCollectionEnabled and headersRedaction properties are deprecated to delete in a major
+ if ((!config.headersExtendedCollectionEnabled || config.headersRedaction) && !extendedDataCollection) {
// Standard collection
return Object.assign(
mandatoryCollectedHeaders,
@@ -163,12 +184,15 @@ function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedRespons
)
}
+ // TODO config.maxHeadersCollected is deprecated to delete in a major
+ const maxHeadersCollected = extendedDataCollection?.max_collected_headers ?? config.maxHeadersCollected
+
// Extended collection
- const requestExtendedHeadersAvailableCount =
- config.maxHeadersCollected -
- Object.keys(mandatoryCollectedHeaders).length -
+ const collectedHeadersCount = Object.keys(mandatoryCollectedHeaders).length +
Object.keys(requestEventCollectedHeaders).length
+ const requestExtendedHeadersAvailableCount = maxHeadersCollected - collectedHeadersCount
+
const requestEventExtendedCollectedHeaders =
filterExtendedHeaders(
req.headers,
@@ -178,7 +202,7 @@ function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedRespons
)
const responseExtendedHeadersAvailableCount =
- config.maxHeadersCollected -
+ maxHeadersCollected -
Object.keys(responseEventCollectedHeaders).length
const responseEventExtendedCollectedHeaders =
@@ -199,15 +223,15 @@ function getCollectedHeaders (req, res, shouldCollectEventHeaders, storedRespons
// Check discarded headers
const requestHeadersCount = Object.keys(req.headers).length
- if (requestHeadersCount > config.maxHeadersCollected) {
+ if (requestHeadersCount > maxHeadersCollected) {
headersTags['_dd.appsec.request.header_collection.discarded'] =
- requestHeadersCount - config.maxHeadersCollected
+ requestHeadersCount - maxHeadersCollected
}
const responseHeadersCount = Object.keys(responseHeaders).length
- if (responseHeadersCount > config.maxHeadersCollected) {
+ if (responseHeadersCount > maxHeadersCollected) {
headersTags['_dd.appsec.response.header_collection.discarded'] =
- responseHeadersCount - config.maxHeadersCollected
+ responseHeadersCount - maxHeadersCollected
}
return headersTags
@@ -307,7 +331,7 @@ function reportTruncationMetrics (rootSpan, metrics) {
}
}
-function reportAttack (attackData) {
+function reportAttack ({ events: attackData, actions }) {
const store = storage('legacy').getStore()
const req = store?.req
const rootSpan = web.root(req)
@@ -338,8 +362,14 @@ function reportAttack (attackData) {
rootSpan.addTags(newTags)
+ // TODO this should be deleted in a major
if (config.raspBodyCollection && isRaspAttack(attackData)) {
- reportRequestBody(rootSpan, req.body)
+ reportRequestBody(rootSpan, req.body, true)
+ }
+
+ const extendedDataCollection = actions?.extended_data_collection
+ if (extendedDataCollection) {
+ extendedDataCollectionRequest.set(req, extendedDataCollection)
}
}
@@ -398,18 +428,29 @@ function truncateRequestBody (target, depth = 0) {
}
}
-function reportRequestBody (rootSpan, requestBody) {
- if (!requestBody) return
+function reportRequestBody (rootSpan, requestBody, comesFromRaspAction = false) {
+ if (!requestBody || Object.keys(requestBody).length === 0) return
if (!rootSpan.meta_struct) {
rootSpan.meta_struct = {}
}
- if (!rootSpan.meta_struct['http.request.body']) {
+ if (rootSpan.meta_struct['http.request.body']) {
+ // If the rasp.exceed metric exists, set also the same for the new tag
+ const currentTags = rootSpan.context()._tags
+ const sizeExceedTagValue = currentTags['_dd.appsec.rasp.request_body_size.exceeded']
+
+ if (sizeExceedTagValue) {
+ rootSpan.setTag('_dd.appsec.request_body_size.exceeded', sizeExceedTagValue)
+ }
+ } else {
const { truncated, value } = truncateRequestBody(requestBody)
rootSpan.meta_struct['http.request.body'] = value
if (truncated) {
- rootSpan.setTag('_dd.appsec.rasp.request_body_size.exceeded', 'true')
+ const sizeExceedTagKey = comesFromRaspAction
+ ? '_dd.appsec.rasp.request_body_size.exceeded' // TODO old metric to delete in a major
+ : '_dd.appsec.request_body_size.exceeded'
+ rootSpan.setTag(sizeExceedTagKey, 'true')
}
}
}
@@ -496,7 +537,15 @@ function finishRequest (req, res, storedResponseHeaders) {
const tags = rootSpan.context()._tags
- const newTags = getCollectedHeaders(req, res, shouldCollectEventHeaders(tags), storedResponseHeaders)
+ const extendedDataCollection = extendedDataCollectionRequest.get(req)
+ const newTags = getCollectedHeaders(
+ req, res, shouldCollectEventHeaders(tags), storedResponseHeaders, extendedDataCollection
+ )
+
+ if (extendedDataCollection) {
+ // TODO add support for fastify, req.body is not available in fastify
+ reportRequestBody(rootSpan, req.body)
+ }
if (tags['appsec.event'] === 'true' && typeof req.route?.path === 'string') {
newTags['http.endpoint'] = req.route.path
diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js
index d9d8856982f..f684b4b703c 100644
--- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js
+++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js
@@ -141,7 +141,7 @@ class WAFContextWrapper {
metrics.wafTimeout = result.timeout
if (ruleTriggered) {
- Reporter.reportAttack(result.events)
+ Reporter.reportAttack(result)
}
Reporter.reportAttributes(result.attributes)
diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js
index 2195b86ed60..a8b22799d3c 100644
--- a/packages/dd-trace/src/config.js
+++ b/packages/dd-trace/src/config.js
@@ -668,6 +668,7 @@ class Config {
this._envUnprocessed['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON
this._setBoolean(env, 'appsec.enabled', DD_APPSEC_ENABLED)
this._setString(env, 'appsec.eventTracking.mode', DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE)
+ // TODO appsec.extendedHeadersCollection are deprecated, to delete in a major
this._setBoolean(env, 'appsec.extendedHeadersCollection.enabled', DD_APPSEC_COLLECT_ALL_HEADERS)
this._setBoolean(
env,
@@ -679,6 +680,7 @@ class Config {
this._setString(env, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP)
this._setString(env, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP)
this._setBoolean(env, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED)
+ // TODO Deprecated, to delete in a major
this._setBoolean(env, 'appsec.rasp.bodyCollection', DD_APPSEC_RASP_COLLECT_REQUEST_BODY)
env['appsec.rateLimit'] = maybeInt(DD_APPSEC_TRACE_RATE_LIMIT)
this._envUnprocessed['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT
diff --git a/packages/dd-trace/src/config_defaults.js b/packages/dd-trace/src/config_defaults.js
index 3054bb46fe8..acb6bb1cce3 100644
--- a/packages/dd-trace/src/config_defaults.js
+++ b/packages/dd-trace/src/config_defaults.js
@@ -36,12 +36,14 @@ module.exports = {
'appsec.blockedTemplateJson': undefined,
'appsec.enabled': undefined,
'appsec.eventTracking.mode': 'identification',
+ // TODO appsec.extendedHeadersCollection is deprecated, to delete in a major
'appsec.extendedHeadersCollection.enabled': false,
'appsec.extendedHeadersCollection.redaction': true,
'appsec.extendedHeadersCollection.maxHeaders': 50,
'appsec.obfuscatorKeyRegex': defaultWafObfuscatorKeyRegex,
'appsec.obfuscatorValueRegex': defaultWafObfuscatorValueRegex,
'appsec.rasp.enabled': true,
+ // TODO Deprecated, to delete in a major
'appsec.rasp.bodyCollection': false,
'appsec.rateLimit': 100,
'appsec.rules': undefined,
diff --git a/packages/dd-trace/src/llmobs/plugins/anthropic.js b/packages/dd-trace/src/llmobs/plugins/anthropic.js
new file mode 100644
index 00000000000..bf60592d65f
--- /dev/null
+++ b/packages/dd-trace/src/llmobs/plugins/anthropic.js
@@ -0,0 +1,282 @@
+'use strict'
+
+const LLMObsPlugin = require('./base')
+
+const ALLOWED_METADATA_KEYS = new Set([
+ 'max_tokens',
+ 'stop_sequences',
+ 'temperature',
+ 'top_k',
+ 'top_p',
+])
+
+class AnthropicLLMObsPlugin extends LLMObsPlugin {
+ static integration = 'anthropic' // used for llmobs telemetry
+ static id = 'anthropic'
+ static prefix = 'tracing:apm:anthropic:request'
+
+ constructor () {
+ super(...arguments)
+
+ this.addSub('apm:anthropic:request:chunk', ({ ctx, chunk, done }) => {
+ ctx.chunks ??= []
+ const chunks = ctx.chunks
+ if (chunk) chunks.push(chunk)
+
+ if (!done) return
+
+ const response = { content: [] }
+
+ for (const chunk of chunks) {
+ switch (chunk.type) {
+ case 'message_start': {
+ const { message } = chunk
+ if (!message) continue
+
+ const { role, usage } = message
+ if (role) response.role = role
+ if (usage) response.usage = usage
+ break
+ }
+ case 'content_block_start': {
+ const contentBlock = chunk.content_block
+ if (!contentBlock) continue
+
+ const { type } = contentBlock
+ if (type === 'text') {
+ response.content.push({ type, text: contentBlock.text })
+ } else if (type === 'tool_use') {
+ response.content.push({ type, name: contentBlock.name, input: '', id: contentBlock.id })
+ }
+ break
+ }
+ case 'content_block_delta': {
+ const { delta } = chunk
+ if (!delta) continue
+
+ const { text } = delta
+ if (text) response.content[response.content.length - 1].text += text
+
+ const partialJson = delta.partial_json
+ if (partialJson && delta.type === 'input_json_delta') {
+ response.content[response.content.length - 1].input += partialJson
+ }
+ break
+ }
+ case 'content_block_stop': {
+ const type = response.content[response.content.length - 1].type
+ if (type === 'tool_use') {
+ const input = response.content[response.content.length - 1].input ?? '{}'
+ response.content[response.content.length - 1].input = JSON.parse(input)
+ }
+ break
+ }
+ case 'message_delta': {
+ const { delta } = chunk
+
+ const finishReason = delta?.stop_reason
+ if (finishReason) response.finish_reason = finishReason
+
+ const { usage } = chunk
+ if (usage) {
+ const responseUsage = response.usage ?? (response.usage = { input_tokens: 0, output_tokens: 0 })
+ responseUsage.output_tokens = usage.output_tokens
+
+ const cacheCreationTokens = usage.cache_creation_input_tokens
+ const cacheReadTokens = usage.cache_read_input_tokens
+ if (cacheCreationTokens) responseUsage.cache_creation_input_tokens = cacheCreationTokens
+ if (cacheReadTokens) responseUsage.cache_read_input_tokens = cacheReadTokens
+ }
+
+ break
+ }
+ case 'error': {
+ const { error } = chunk
+ if (!error) continue
+
+ response.error = {}
+ if (error.type) response.error.type = error.type
+ if (error.message) response.error.message = error.message
+
+ break
+ }
+ }
+
+ ctx.result = response
+ }
+ })
+ }
+
+ getLLMObsSpanRegisterOptions (ctx) {
+ const { options } = ctx
+ const { model } = options
+
+ return {
+ kind: 'llm',
+ modelName: model,
+ modelProvider: 'anthropic'
+ }
+ }
+
+ setLLMObsTags (ctx) {
+ const span = ctx.currentStore?.span
+ if (!span) return
+
+ const { options, result } = ctx
+
+ this.#tagAnthropicInputMessages(span, options)
+ this.#tagAnthropicOutputMessages(span, result)
+ this.#tagAnthropicMetadata(span, options)
+ this.#tagAnthropicUsage(span, result)
+ }
+
+ #tagAnthropicInputMessages (span, options) {
+ const { system, messages } = options
+ const inputMessages = []
+
+ if (system) {
+ messages.unshift({ content: system, role: 'system' })
+ }
+
+ for (const message of messages) {
+ const { content, role } = message
+
+ if (typeof content === 'string') {
+ inputMessages.push({ content, role })
+ continue
+ }
+
+ for (const block of content) {
+ if (block.type === 'text') {
+ inputMessages.push({ content: block.text, role })
+ } else if (block.type === 'image') {
+ inputMessages.push({ content: '([IMAGE DETECTED])', role })
+ } else if (block.type === 'tool_use') {
+ const { text, name, id, type } = block
+ let input = block.input
+ if (typeof input === 'string') {
+ input = JSON.parse(input)
+ }
+
+ const toolCall = {
+ name,
+ arguments: input,
+ toolId: id,
+ type
+ }
+
+ inputMessages.push({ content: text ?? '', role, toolCalls: [toolCall] })
+ } else if (block.type === 'tool_result') {
+ const { content } = block
+ const formattedContent = this.#formatAnthropicToolResultContent(content)
+ const toolResult = {
+ result: formattedContent,
+ toolId: block.tool_use_id,
+ type: 'tool_result'
+ }
+
+ inputMessages.push({ content: '', role, toolResults: [toolResult] })
+ } else {
+ inputMessages.push({ content: JSON.stringify(block), role })
+ }
+ }
+ }
+
+ this._tagger.tagLLMIO(span, inputMessages)
+ }
+
+ #tagAnthropicOutputMessages (span, result) {
+ if (!result) return
+
+ const { content, role } = result
+
+ if (typeof content === 'string') {
+ this._tagger.tagLLMIO(span, null, [{ content, role }])
+ return
+ }
+
+ const outputMessages = []
+ for (const block of content) {
+ const { text } = block
+ if (typeof text === 'string') {
+ outputMessages.push({ content: text, role })
+ } else if (block.type === 'tool_use') {
+ let input = block.input
+ if (typeof input === 'string') {
+ input = JSON.parse(input)
+ }
+
+ const toolCall = {
+ name: block.name,
+ arguments: input,
+ toolId: block.id,
+ type: block.type
+ }
+
+ outputMessages.push({ content: text ?? '', role, toolCalls: [toolCall] })
+ }
+ }
+
+ this._tagger.tagLLMIO(span, null, outputMessages)
+ }
+
+ #tagAnthropicMetadata (span, options) {
+ const metadata = {}
+ for (const [key, value] of Object.entries(options)) {
+ if (ALLOWED_METADATA_KEYS.has(key)) {
+ metadata[key] = value
+ }
+ }
+
+ this._tagger.tagMetadata(span, metadata)
+ }
+
+ #tagAnthropicUsage (span, result) {
+ if (!result) return
+
+ const { usage } = result
+ if (!usage) return
+
+ const inputTokens = usage.input_tokens
+ const outputTokens = usage.output_tokens
+ const cacheWriteTokens = usage.cache_creation_input_tokens
+ const cacheReadTokens = usage.cache_read_input_tokens
+
+ const metrics = {}
+
+ metrics.inputTokens =
+ (inputTokens ?? 0) +
+ (cacheWriteTokens ?? 0) +
+ (cacheReadTokens ?? 0)
+
+ if (outputTokens) metrics.outputTokens = outputTokens
+ const totalTokens = metrics.inputTokens + (outputTokens ?? 0)
+ if (totalTokens) metrics.totalTokens = totalTokens
+
+ if (cacheWriteTokens != null) metrics.cacheWriteTokens = cacheWriteTokens
+ if (cacheReadTokens != null) metrics.cacheReadTokens = cacheReadTokens
+
+ this._tagger.tagMetrics(span, metrics)
+ }
+
+ // maybe can make into a util file
+ #formatAnthropicToolResultContent (content) {
+ if (typeof content === 'string') {
+ return content
+ } else if (Array.isArray(content)) {
+ const formattedContent = []
+ for (const toolResultBlock of content) {
+ if (toolResultBlock.text) {
+ formattedContent.push(toolResultBlock.text)
+ } else if (toolResultBlock.type === 'image') {
+ formattedContent.push('([IMAGE DETECTED])')
+ }
+ }
+
+ return formattedContent.join(',')
+ }
+ return JSON.stringify(content)
+ }
+}
+
+module.exports = AnthropicLLMObsPlugin
diff --git a/packages/dd-trace/src/llmobs/tagger.js b/packages/dd-trace/src/llmobs/tagger.js
index 633bd2d3104..d31f4fe4483 100644
--- a/packages/dd-trace/src/llmobs/tagger.js
+++ b/packages/dd-trace/src/llmobs/tagger.js
@@ -268,6 +268,32 @@ class LLMObsTagger {
return filteredToolCalls
}
+ #filterToolResults (toolResults) {
+ if (!Array.isArray(toolResults)) {
+ toolResults = [toolResults]
+ }
+
+ const filteredToolResults = []
+ for (const toolResult of toolResults) {
+ if (typeof toolResult !== 'object') {
+ this.#handleFailure('Tool result must be an object.', 'invalid_io_messages')
+ continue
+ }
+
+ const { result, toolId, type } = toolResult
+ const toolResultObj = {}
+
+ const condition1 = this.#tagConditionalString(result, 'Tool result', toolResultObj, 'result')
+ const condition2 = this.#tagConditionalString(toolId, 'Tool ID', toolResultObj, 'tool_id')
+ const condition3 = this.#tagConditionalString(type, 'Tool type', toolResultObj, 'type')
+
+ if (condition1 && condition2 && condition3) {
+ filteredToolResults.push(toolResultObj)
+ }
+ }
+ return filteredToolResults
+ }
+
#tagMessages (span, data, key) {
if (!data) {
return
@@ -290,6 +316,7 @@ class LLMObsTagger {
const { content = '', role } = message
const toolCalls = message.toolCalls
+ const toolResults = message.toolResults
const toolId = message.toolId
const messageObj = { content }
@@ -308,6 +335,14 @@ class LLMObsTagger {
}
}
+ if (toolResults) {
+ const filteredToolResults = this.#filterToolResults(toolResults)
+
+ if (filteredToolResults.length) {
+ messageObj.tool_results = filteredToolResults
+ }
+ }
+
if (toolId) {
if (role === 'tool') {
condition = this.#tagConditionalString(toolId, 'Tool ID', messageObj, 'tool_id')
diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js
index e2ed803e1ec..c462f7d10b7 100644
--- a/packages/dd-trace/src/plugins/index.js
+++ b/packages/dd-trace/src/plugins/index.js
@@ -1,6 +1,7 @@
'use strict'
module.exports = {
+ get '@anthropic-ai/sdk' () { return require('../../../datadog-plugin-anthropic/src') },
get '@apollo/gateway' () { return require('../../../datadog-plugin-apollo/src') },
get '@aws-sdk/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
get '@azure/functions' () { return require('../../../datadog-plugin-azure-functions/src') },
diff --git a/packages/dd-trace/src/remote_config/capabilities.js b/packages/dd-trace/src/remote_config/capabilities.js
index b46755bbc86..e8f65cf30d0 100644
--- a/packages/dd-trace/src/remote_config/capabilities.js
+++ b/packages/dd-trace/src/remote_config/capabilities.js
@@ -31,6 +31,7 @@ module.exports = {
ASM_RASP_CMDI: 1n << 37n,
ASM_DD_MULTICONFIG: 1n << 42n,
ASM_TRACE_TAGGING_RULES: 1n << 43n,
+ ASM_EXTENDED_DATA_COLLECTION: 1n << 44n,
/*
DO NOT ADD ARBITRARY CAPABILITIES IN YOUR CODE
UNLESS THEY ARE ALREADY DEFINED IN THE BACKEND SOURCE OF TRUTH
diff --git a/packages/dd-trace/src/remote_config/index.js b/packages/dd-trace/src/remote_config/index.js
index 813d7ed577c..f5c491c8668 100644
--- a/packages/dd-trace/src/remote_config/index.js
+++ b/packages/dd-trace/src/remote_config/index.js
@@ -96,6 +96,7 @@ function enableWafUpdate (appsecConfig) {
rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, true)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, true)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRACE_TAGGING_RULES, true)
+ rc.updateCapabilities(RemoteConfigCapabilities.ASM_EXTENDED_DATA_COLLECTION, true)
if (appsecConfig.rasp?.enabled) {
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, true)
@@ -134,6 +135,7 @@ function disableWafUpdate () {
rc.updateCapabilities(RemoteConfigCapabilities.ASM_HEADER_FINGERPRINT, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_DD_MULTICONFIG, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRACE_TAGGING_RULES, false)
+ rc.updateCapabilities(RemoteConfigCapabilities.ASM_EXTENDED_DATA_COLLECTION, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, false)
diff --git a/packages/dd-trace/src/supported-configurations.json b/packages/dd-trace/src/supported-configurations.json
index bb7694fca4c..c346d957fb0 100644
--- a/packages/dd-trace/src/supported-configurations.json
+++ b/packages/dd-trace/src/supported-configurations.json
@@ -176,6 +176,7 @@
"DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED": ["A"],
"DD_TRACE_AEROSPIKE_ENABLED": ["A"],
"DD_TRACE_AI_ENABLED": ["A"],
+ "DD_TRACE_ANTHROPIC_ENABLED": ["A"],
"DD_TRACE_AGENT_PORT": ["A"],
"DD_TRACE_AGENT_PROTOCOL_VERSION": ["A"],
"DD_TRACE_AGENT_URL": ["A"],
diff --git a/packages/dd-trace/test/appsec/extended-data-collection.express.plugin.spec.js b/packages/dd-trace/test/appsec/extended-data-collection.express.plugin.spec.js
new file mode 100644
index 00000000000..a100ce5d82f
--- /dev/null
+++ b/packages/dd-trace/test/appsec/extended-data-collection.express.plugin.spec.js
@@ -0,0 +1,292 @@
+'use strict'
+
+const Config = require('../../src/config')
+const path = require('path')
+const { withVersions } = require('../setup/mocha')
+const agent = require('../plugins/agent')
+const appsec = require('../../src/appsec')
+const axios = require('axios')
+const assert = require('assert')
+const msgpack = require('@msgpack/msgpack')
+
+function createDeepObject (sheetValue, currentLevel = 1, max = 20) {
+ if (currentLevel === max) {
+ return {
+ [`s-${currentLevel}`]: `s-${currentLevel}`,
+ [`o-${currentLevel}`]: sheetValue
+ }
+ }
+
+ return {
+ [`s-${currentLevel}`]: `s-${currentLevel}`,
+ [`o-${currentLevel}`]: createDeepObject(sheetValue, currentLevel + 1, max)
+ }
+}
+
+describe('extended data collection', () => {
+ before(() => {
+ return agent.load(['express', 'http'], { client: false })
+ })
+
+ after(() => {
+ return agent.close({ ritmReset: false })
+ })
+
+ withVersions('express', 'express', expressVersion => {
+ let port, server
+
+ before((done) => {
+ const express = require(`../../../../versions/express@${expressVersion}`).get()
+ const bodyParser = require('../../../../versions/body-parser').get()
+
+ const app = express()
+ app.use(bodyParser.json())
+
+ app.post('/', (req, res) => {
+ res.setHeader('custom-response-header-1', 'custom-response-header-value-1')
+ res.setHeader('custom-response-header-2', 'custom-response-header-value-2')
+ res.setHeader('custom-response-header-3', 'custom-response-header-value-3')
+ res.setHeader('custom-response-header-4', 'custom-response-header-value-4')
+ res.setHeader('custom-response-header-5', 'custom-response-header-value-5')
+ res.setHeader('custom-response-header-6', 'custom-response-header-value-6')
+ res.setHeader('custom-response-header-7', 'custom-response-header-value-7')
+ res.setHeader('custom-response-header-8', 'custom-response-header-value-8')
+ res.setHeader('custom-response-header-9', 'custom-response-header-value-9')
+ res.setHeader('custom-response-header-10', 'custom-response-header-value-10')
+
+ res.end('DONE')
+ })
+
+ app.post('/redacted-headers', (req, res) => {
+ res.setHeader('authorization', 'header-value-1')
+ res.setHeader('proxy-authorization', 'header-value-2')
+ res.setHeader('www-authenticate', 'header-value-4')
+ res.setHeader('proxy-authenticate', 'header-value-5')
+ res.setHeader('authentication-info', 'header-value-6')
+ res.setHeader('proxy-authentication-info', 'header-value-7')
+ res.setHeader('cookie', 'header-value-8')
+ res.setHeader('set-cookie', 'header-value-9')
+
+ res.end('DONE')
+ })
+
+ server = app.listen(port, () => {
+ port = server.address().port
+ done()
+ })
+ })
+
+ after(() => {
+ server.close()
+ })
+
+ beforeEach(() => {
+ appsec.enable(new Config(
+ {
+ appsec: {
+ enabled: true,
+ rules: path.join(__dirname, './extended-data-collection.rules.json')
+ }
+ }
+ ))
+ })
+
+ afterEach(() => {
+ appsec.disable()
+ })
+
+ it('Should collect nothing when no extended_data_collection is triggered', async () => {
+ const requestBody = {
+ other: 'other',
+ chained: {
+ child: 'one',
+ child2: 2
+ }
+ }
+ await axios.post(
+ `http://localhost:${port}/`,
+ requestBody,
+ {
+ headers: {
+ 'custom-header-key-1': 'custom-header-value-1',
+ 'custom-header-key-2': 'custom-header-value-2',
+ 'custom-header-key-3': 'custom-header-value-3'
+ }
+ }
+ )
+
+ await agent.assertSomeTraces((traces) => {
+ const span = traces[0][0]
+ assert.strictEqual(span.type, 'web')
+
+ assert.strictEqual(span.meta['http.request.headers.custom-request-header-1'], undefined)
+ assert.strictEqual(span.meta['http.request.headers.custom-request-header-2'], undefined)
+ assert.strictEqual(span.meta['http.request.headers.custom-request-header-3'], undefined)
+
+ assert.strictEqual(span.meta['http.response.headers.custom-response-header-1'], undefined)
+ assert.strictEqual(span.meta['http.response.headers.custom-response-header-2'], undefined)
+ assert.strictEqual(span.meta['http.response.headers.custom-response-header-3'], undefined)
+
+ const rawMetaStructBody = span.meta_struct?.['http.request.body']
+ assert.strictEqual(rawMetaStructBody, undefined)
+ })
+ })
+
+ it('Should redact request/response headers', async () => {
+ const requestBody = {
+ bodyParam: 'collect-standard'
+ }
+ await axios.post(
+ `http://localhost:${port}/redacted-headers`,
+ requestBody,
+ {
+ headers: {
+ authorization: 'header-value-1',
+ 'proxy-authorization': 'header-value-2',
+ 'www-authenticate': 'header-value-3',
+ 'proxy-authenticate': 'header-value-4',
+ 'authentication-info': 'header-value-5',
+ 'proxy-authentication-info': 'header-value-6',
+ cookie: 'header-value-7',
+ 'set-cookie': 'header-value-8'
+ }
+ }
+ )
+
+ await agent.assertSomeTraces((traces) => {
+ const span = traces[0][0]
+ assert.strictEqual(span.type, 'web')
+ assert.strictEqual(span.meta['http.request.headers.authorization'], '')
+ assert.strictEqual(span.meta['http.request.headers.proxy-authorization'], '')
+ assert.strictEqual(span.meta['http.request.headers.www-authenticate'], '')
+ assert.strictEqual(span.meta['http.request.headers.proxy-authenticate'], '')
+ assert.strictEqual(span.meta['http.request.headers.authentication-info'], '')
+ assert.strictEqual(span.meta['http.request.headers.proxy-authentication-info'], '')
+ assert.strictEqual(span.meta['http.request.headers.cookie'], '')
+ assert.strictEqual(span.meta['http.request.headers.set-cookie'], '')
+
+ assert.strictEqual(span.meta['http.response.headers.authorization'], '')
+ assert.strictEqual(span.meta['http.response.headers.proxy-authorization'], '')
+ assert.strictEqual(span.meta['http.response.headers.www-authenticate'], '')
+ assert.strictEqual(span.meta['http.response.headers.proxy-authenticate'], '')
+ assert.strictEqual(span.meta['http.response.headers.authentication-info'], '')
+ assert.strictEqual(span.meta['http.response.headers.proxy-authentication-info'], '')
+ assert.strictEqual(span.meta['http.response.headers.cookie'], '')
+ assert.strictEqual(span.meta['http.response.headers.set-cookie'], '')
+ })
+ })
+
+ it('Should collect request body and request/response with a max of 8 headers', async () => {
+ const requestBody = {
+ bodyParam: 'collect-few-headers',
+ other: 'other',
+ chained: {
+ child: 'one',
+ child2: 2
+ }
+ }
+ await axios.post(
+ `http://localhost:${port}/`,
+ requestBody,
+ {
+ headers: {
+ 'custom-request-header-1': 'custom-request-header-value-1',
+ 'custom-request-header-2': 'custom-request-header-value-2',
+ 'custom-request-header-3': 'custom-request-header-value-3',
+ 'custom-request-header-4': 'custom-request-header-value-4',
+ 'custom-request-header-5': 'custom-request-header-value-5',
+ 'custom-request-header-6': 'custom-request-header-value-6',
+ 'custom-request-header-7': 'custom-request-header-value-7',
+ 'custom-request-header-8': 'custom-request-header-value-8',
+ 'custom-request-header-9': 'custom-request-header-value-9',
+ 'custom-request-header-10': 'custom-request-header-value-10'
+ }
+ }
+ )
+
+ await agent.assertSomeTraces((traces) => {
+ const span = traces[0][0]
+ assert.strictEqual(span.type, 'web')
+ const collectedRequestHeaders = Object.keys(span.meta)
+ .filter(metaKey => metaKey.startsWith('http.request.headers.')).length
+ const collectedResponseHeaders = Object.keys(span.meta)
+ .filter(metaKey => metaKey.startsWith('http.response.headers.')).length
+ assert.strictEqual(collectedRequestHeaders, 8)
+ assert.strictEqual(collectedResponseHeaders, 8)
+
+ assert.ok(span.metrics['_dd.appsec.request.header_collection.discarded'] > 2)
+ assert.ok(span.metrics['_dd.appsec.response.header_collection.discarded'] > 2)
+
+ const metaStructBody = msgpack.decode(span.meta_struct['http.request.body'])
+ assert.deepEqual(metaStructBody, requestBody)
+ })
+ })
+
+ it('Should truncate the request body when depth is more than 20 levels', async () => {
+ const deepObject = createDeepObject('sheet')
+
+ const requestBody = {
+ bodyParam: 'collect-standard',
+ deepObject
+ }
+
+ const expectedDeepTruncatedObject = createDeepObject({ 's-19': 's-19' }, 1, 18)
+ const expectedRequestBody = {
+ bodyParam: 'collect-standard',
+ deepObject: expectedDeepTruncatedObject
+ }
+ await axios.post(`http://localhost:${port}/`, requestBody)
+
+ await agent.assertSomeTraces((traces) => {
+ const span = traces[0][0]
+ assert.strictEqual(span.type, 'web')
+
+ const metaStructBody = msgpack.decode(span.meta_struct['http.request.body'])
+ assert.deepEqual(metaStructBody, expectedRequestBody)
+ })
+ })
+
+ it('Should truncate the request body when string length is more than 4096 characters', async () => {
+ const requestBody = {
+ bodyParam: 'collect-standard',
+ longValue: Array(5000).fill('A').join('')
+ }
+
+ const expectedRequestBody = {
+ bodyParam: 'collect-standard',
+ longValue: Array(4096).fill('A').join('')
+ }
+ await axios.post(`http://localhost:${port}/`, requestBody)
+
+ await agent.assertSomeTraces((traces) => {
+ const span = traces[0][0]
+ assert.strictEqual(span.type, 'web')
+
+ const metaStructBody = msgpack.decode(span.meta_struct['http.request.body'])
+ assert.deepEqual(metaStructBody, expectedRequestBody)
+ })
+ })
+
+ it('Should truncate the request body when a node has more than 256 elements', async () => {
+ const children = Array(300).fill('item')
+ const requestBody = {
+ bodyParam: 'collect-standard',
+ children
+ }
+
+ const expectedRequestBody = {
+ bodyParam: 'collect-standard',
+ children: children.slice(0, 256)
+ }
+ await axios.post(`http://localhost:${port}/`, requestBody)
+
+ await agent.assertSomeTraces((traces) => {
+ const span = traces[0][0]
+ assert.strictEqual(span.type, 'web')
+
+ const metaStructBody = msgpack.decode(span.meta_struct['http.request.body'])
+ assert.deepEqual(metaStructBody, expectedRequestBody)
+ })
+ })
+ })
+})
diff --git a/packages/dd-trace/test/appsec/extended-data-collection.rules.json b/packages/dd-trace/test/appsec/extended-data-collection.rules.json
new file mode 100644
index 00000000000..fa45346ff85
--- /dev/null
+++ b/packages/dd-trace/test/appsec/extended-data-collection.rules.json
@@ -0,0 +1,74 @@
+{
+ "version": "2.2",
+ "metadata": {
+ "rules_version": "1.5.0"
+ },
+ "rules": [
+ {
+ "id": "test-rule-id-1",
+ "name": "test-rule-name-1",
+ "tags": {
+ "type": "a",
+ "category": "custom"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.body"
+ }
+ ],
+ "list": [
+ "collect-standard"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": ["lowercase"],
+ "on_match": ["extended_data_collection_standard"]
+ },
+ {
+ "id": "test-rule-id-2",
+ "name": "test-rule-name-2",
+ "tags": {
+ "type": "a",
+ "category": "custom"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.body"
+ }
+ ],
+ "list": [
+ "collect-few-headers"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": ["lowercase"],
+ "on_match": ["extended_data_collection_few_headers"]
+ }
+ ],
+ "actions": [
+ {
+ "id": "extended_data_collection_standard",
+ "parameters": {
+ "max_collected_headers": 50
+ },
+ "type": "extended_data_collection"
+ },
+ {
+ "id": "extended_data_collection_few_headers",
+ "parameters": {
+ "max_collected_headers": 8
+ },
+ "type": "extended_data_collection"
+ }
+ ]
+}
diff --git a/packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js
index ceeb372a29f..d25211c7f7e 100644
--- a/packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js
+++ b/packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js
@@ -121,7 +121,8 @@ describe('RASP - ssrf', () => {
// we preload axios because it's lazyloading a debug dependency
// that in turns trigger LFI
- axiosToTest.get('http://preloadaxios').catch(noop).then(done)
+
+ axiosToTest.get('http://preloadaxios', { timeout: 10 }).catch(noop).then(done)
})
it('Should not detect threat', async () => {
diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js
index 2a384c9f1db..b175869cd12 100644
--- a/packages/dd-trace/test/appsec/reporter.spec.js
+++ b/packages/dd-trace/test/appsec/reporter.spec.js
@@ -384,12 +384,14 @@ describe('reporter', () => {
it('should add tags to request span when socket is not there', () => {
delete req.socket
- Reporter.reportAttack([
- {
- rule: {},
- rule_matches: [{}]
- }
- ])
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {},
+ rule_matches: [{}]
+ }
+ ]
+ })
expect(web.root).to.have.been.calledOnceWith(req)
expect(span.addTags).to.have.been.calledOnceWithExactly({
@@ -400,12 +402,14 @@ describe('reporter', () => {
})
it('should add tags to request span', () => {
- Reporter.reportAttack([
- {
- rule: {},
- rule_matches: [{}]
- }
- ])
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {},
+ rule_matches: [{}]
+ }
+ ]
+ })
expect(web.root).to.have.been.calledOnceWith(req)
expect(span.addTags).to.have.been.calledOnceWithExactly({
@@ -419,7 +423,7 @@ describe('reporter', () => {
it('should not overwrite origin tag', () => {
span.context()._tags = { '_dd.origin': 'tracer' }
- Reporter.reportAttack([])
+ Reporter.reportAttack({ events: [] })
expect(web.root).to.have.been.calledOnceWith(req)
expect(span.addTags).to.have.been.calledOnceWithExactly({
@@ -432,15 +436,17 @@ describe('reporter', () => {
it('should merge attacks json', () => {
span.context()._tags = { '_dd.appsec.json': '{"triggers":[{"rule":{},"rule_matches":[{}]}]}' }
- Reporter.reportAttack([
- {
- rule: {}
- },
- {
- rule: {},
- rule_matches: [{}]
- }
- ])
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {}
+ },
+ {
+ rule: {},
+ rule_matches: [{}]
+ }
+ ]
+ })
expect(web.root).to.have.been.calledOnceWith(req)
expect(span.addTags).to.have.been.calledOnceWithExactly({
@@ -454,15 +460,17 @@ describe('reporter', () => {
it('should call standalone sample', () => {
span.context()._tags = { '_dd.appsec.json': '{"triggers":[{"rule":{},"rule_matches":[{}]}]}' }
- Reporter.reportAttack([
- {
- rule: {}
- },
- {
- rule: {},
- rule_matches: [{}]
- }
- ])
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {}
+ },
+ {
+ rule: {},
+ rule_matches: [{}]
+ }
+ ]
+ })
expect(web.root).to.have.been.calledOnceWith(req)
expect(span.addTags).to.have.been.calledOnceWithExactly({
@@ -505,16 +513,18 @@ describe('reporter', () => {
}
)
- Reporter.reportAttack([
- {
- rule: {
- tags: {
- module: 'rasp'
- }
- },
- rule_matches: [{}]
- }
- ])
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {
+ tags: {
+ module: 'rasp'
+ }
+ },
+ rule_matches: [{}]
+ }
+ ]
+ })
expect(span.meta_struct['http.request.body']).to.be.deep.equal(expectedBody)
})
@@ -534,16 +544,18 @@ describe('reporter', () => {
}
)
- Reporter.reportAttack([
- {
- rule: {
- tags: {
- module: 'rasp'
- }
- },
- rule_matches: [{}]
- }
- ])
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {
+ tags: {
+ module: 'rasp'
+ }
+ },
+ rule_matches: [{}]
+ }
+ ]
+ })
expect(span.meta_struct?.['http.request.body']).to.be.undefined
})
@@ -616,18 +628,64 @@ describe('reporter', () => {
req.body = requestBody
- Reporter.reportAttack([
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {
+ tags: {
+ module: 'rasp'
+ }
+ },
+ rule_matches: [{}]
+ }
+ ]
+ })
+
+ expect(span.setTag).to.have.been.calledWithExactly('_dd.appsec.rasp.request_body_size.exceeded', 'true')
+ })
+
+ it('should set request body size exceeded metric for old and new approaches when both events happen', () => {
+ Reporter.init(
{
- rule: {
- tags: {
- module: 'rasp'
- }
+ rateLimit: 100,
+ extendedHeadersCollection: {
+ enabled: false,
+ redaction: true,
+ maxHeaders: 50
},
- rule_matches: [{}]
+ rasp: {
+ bodyCollection: true
+ }
+ }
+ )
+
+ req.body = requestBody
+
+ Reporter.reportAttack({
+ events: [
+ {
+ rule: {
+ tags: {
+ module: 'rasp'
+ }
+ },
+ rule_matches: [{}]
+ }
+ ],
+ actions: {
+ extended_data_collection: {
+ max_collected_headers: 10
+ }
}
- ])
+ })
expect(span.setTag).to.have.been.calledWithExactly('_dd.appsec.rasp.request_body_size.exceeded', 'true')
+ span.context()._tags = { '_dd.appsec.rasp.request_body_size.exceeded': 'true' }
+
+ const res = {}
+ Reporter.finishRequest(req, res, {})
+
+ expect(span.setTag).to.have.been.calledWithExactly('_dd.appsec.request_body_size.exceeded', 'true')
})
})
})
diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js
index c273decd041..7d2194d9fdd 100644
--- a/packages/dd-trace/test/appsec/waf/index.spec.js
+++ b/packages/dd-trace/test/appsec/waf/index.spec.js
@@ -8,6 +8,7 @@ const proxyquire = require('proxyquire')
const Config = require('../../../src/config')
const rules = require('../../../src/appsec/recommended.json')
const Reporter = require('../../../src/appsec/reporter')
+const { match } = require('sinon')
describe('WAF Manager', () => {
const knownAddresses = new Set([
@@ -458,7 +459,7 @@ describe('WAF Manager', () => {
wafContextWrapper.run(params)
- expect(Reporter.reportAttack).to.be.calledOnceWithExactly(['ATTACK DATA'])
+ expect(Reporter.reportAttack).to.be.calledOnceWith(match({ events: ['ATTACK DATA'] }))
})
it('should report if rule is triggered', () => {
diff --git a/packages/dd-trace/test/llmobs/cassettes/anthropic/anthropic_v1_messages_post_a6302ed3.yaml b/packages/dd-trace/test/llmobs/cassettes/anthropic/anthropic_v1_messages_post_a6302ed3.yaml
new file mode 100644
index 00000000000..083f0ea2692
--- /dev/null
+++ b/packages/dd-trace/test/llmobs/cassettes/anthropic/anthropic_v1_messages_post_a6302ed3.yaml
@@ -0,0 +1,156 @@
+interactions:
+- request:
+ body: "{\n \"model\": \"claude-3-7-sonnet-20250219\",\n \"messages\": [\n {\n
+ \ \"role\": \"user\",\n \"content\": \"Hello, world!\"\n }\n ],\n
+ \ \"max_tokens\": 100,\n \"temperature\": 0.5,\n \"stream\": true\n}"
+ headers:
+ ? !!python/object/apply:multidict._multidict.istr
+ - Accept
+ : - application/json
+ ? !!python/object/apply:multidict._multidict.istr
+ - Accept-Encoding
+ : - gzip,deflate
+ ? !!python/object/apply:multidict._multidict.istr
+ - Connection
+ : - keep-alive
+ Content-Length:
+ - '192'
+ ? !!python/object/apply:multidict._multidict.istr
+ - Content-Type
+ : - application/json
+ ? !!python/object/apply:multidict._multidict.istr
+ - User-Agent
+ : - Anthropic/JS 0.14.0
+ ? !!python/object/apply:multidict._multidict.istr
+ - anthropic-version
+ : - '2023-06-01'
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-arch
+ : - arm64
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-lang
+ : - js
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-os
+ : - MacOS
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-package-version
+ : - 0.14.0
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-runtime
+ : - node
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-runtime-version
+ : - v22.17.0
+ method: POST
+ uri: https://api.anthropic.com/v1/messages
+ response:
+ body:
+ string: 'event: message_start
+
+ data: {"type":"message_start","message":{"id":"msg_016fWBPdLcDjoSC2L4xLU3Kp","type":"message","role":"assistant","model":"claude-3-7-sonnet-20250219","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":11,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":2,"service_tier":"standard"}}}
+
+
+ event: content_block_start
+
+ data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""} }
+
+
+ event: content_block_delta
+
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello!"} }
+
+
+ event: content_block_delta
+
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"
+ It''s nice to connect with you. I''m"} }
+
+
+ event: ping
+
+ data: {"type": "ping"}
+
+
+ event: content_block_delta
+
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"
+ an AI assistant here to help with information, answer"} }
+
+
+ event: content_block_delta
+
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"
+ questions, or just chat. How can I assist you today"} }
+
+
+ event: content_block_delta
+
+ data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"?"} }
+
+
+ event: content_block_stop
+
+ data: {"type":"content_block_stop","index":0 }
+
+
+ event: message_delta
+
+ data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":11,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"output_tokens":38} }
+
+
+ event: message_stop
+
+ data: {"type":"message_stop" }
+
+
+ '
+ headers:
+ CF-RAY:
+ - 97f8e960583a437f-EWR
+ Cache-Control:
+ - no-cache
+ Connection:
+ - keep-alive
+ Content-Type:
+ - text/event-stream; charset=utf-8
+ Date:
+ - Mon, 15 Sep 2025 14:46:08 GMT
+ Server:
+ - cloudflare
+ Transfer-Encoding:
+ - chunked
+ X-Robots-Tag:
+ - none
+ anthropic-ratelimit-input-tokens-limit:
+ - '8000000'
+ anthropic-ratelimit-input-tokens-remaining:
+ - '8000000'
+ anthropic-ratelimit-input-tokens-reset:
+ - '2025-09-15T14:46:06Z'
+ anthropic-ratelimit-output-tokens-limit:
+ - '1200000'
+ anthropic-ratelimit-output-tokens-remaining:
+ - '1200000'
+ anthropic-ratelimit-output-tokens-reset:
+ - '2025-09-15T14:46:06Z'
+ anthropic-ratelimit-tokens-limit:
+ - '9200000'
+ anthropic-ratelimit-tokens-remaining:
+ - '9200000'
+ anthropic-ratelimit-tokens-reset:
+ - '2025-09-15T14:46:06Z'
+ cf-cache-status:
+ - DYNAMIC
+ request-id:
+ - req_011CTANGMsVNbW82orJR1LEu
+ strict-transport-security:
+ - max-age=31536000; includeSubDomains; preload
+ via:
+ - 1.1 google
+ x-envoy-upstream-service-time:
+ - '1890'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/packages/dd-trace/test/llmobs/cassettes/anthropic/anthropic_v1_messages_post_d65e09ee.yaml b/packages/dd-trace/test/llmobs/cassettes/anthropic/anthropic_v1_messages_post_d65e09ee.yaml
new file mode 100644
index 00000000000..d8f32e926ce
--- /dev/null
+++ b/packages/dd-trace/test/llmobs/cassettes/anthropic/anthropic_v1_messages_post_d65e09ee.yaml
@@ -0,0 +1,101 @@
+interactions:
+- request:
+ body: "{\n \"model\": \"claude-3-7-sonnet-20250219\",\n \"messages\": [\n {\n
+ \ \"role\": \"user\",\n \"content\": \"Hello, world!\"\n }\n ],\n
+ \ \"max_tokens\": 100,\n \"temperature\": 0.5\n}"
+ headers:
+ ? !!python/object/apply:multidict._multidict.istr
+ - Accept
+ : - application/json
+ ? !!python/object/apply:multidict._multidict.istr
+ - Accept-Encoding
+ : - gzip,deflate
+ ? !!python/object/apply:multidict._multidict.istr
+ - Connection
+ : - keep-alive
+ Content-Length:
+ - '174'
+ ? !!python/object/apply:multidict._multidict.istr
+ - Content-Type
+ : - application/json
+ ? !!python/object/apply:multidict._multidict.istr
+ - User-Agent
+ : - Anthropic/JS 0.14.0
+ ? !!python/object/apply:multidict._multidict.istr
+ - anthropic-version
+ : - '2023-06-01'
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-arch
+ : - arm64
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-lang
+ : - js
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-os
+ : - MacOS
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-package-version
+ : - 0.14.0
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-runtime
+ : - node
+ ? !!python/object/apply:multidict._multidict.istr
+ - x-stainless-runtime-version
+ : - v22.17.0
+ method: POST
+ uri: https://api.anthropic.com/v1/messages
+ response:
+ body:
+ string: '{"id":"msg_013izVHXwZK1vaGyFNKsawsw","type":"message","role":"assistant","model":"claude-3-7-sonnet-20250219","content":[{"type":"text","text":"Hello!
+ It''s nice to meet you. I''m Claude, an AI assistant made by Anthropic. How
+ can I help you today? Whether you have questions, need information, or just
+ want to chat, I''m here and ready to assist."}],"stop_reason":"end_turn","stop_sequence":null,"usage":{"input_tokens":11,"cache_creation_input_tokens":0,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0},"output_tokens":54,"service_tier":"standard"}}'
+ headers:
+ CF-RAY:
+ - 97f8ce2adbbbc8b9-EWR
+ Connection:
+ - keep-alive
+ Content-Encoding:
+ - gzip
+ Content-Type:
+ - application/json
+ Date:
+ - Mon, 15 Sep 2025 14:27:33 GMT
+ Server:
+ - cloudflare
+ Transfer-Encoding:
+ - chunked
+ X-Robots-Tag:
+ - none
+ anthropic-ratelimit-input-tokens-limit:
+ - '8000000'
+ anthropic-ratelimit-input-tokens-remaining:
+ - '8000000'
+ anthropic-ratelimit-input-tokens-reset:
+ - '2025-09-15T14:27:33Z'
+ anthropic-ratelimit-output-tokens-limit:
+ - '1200000'
+ anthropic-ratelimit-output-tokens-remaining:
+ - '1200000'
+ anthropic-ratelimit-output-tokens-reset:
+ - '2025-09-15T14:27:33Z'
+ anthropic-ratelimit-tokens-limit:
+ - '9200000'
+ anthropic-ratelimit-tokens-remaining:
+ - '9200000'
+ anthropic-ratelimit-tokens-reset:
+ - '2025-09-15T14:27:33Z'
+ cf-cache-status:
+ - DYNAMIC
+ request-id:
+ - req_011CTALrCxBUYKxwL7A3cv5j
+ strict-transport-security:
+ - max-age=31536000; includeSubDomains; preload
+ via:
+ - 1.1 google
+ x-envoy-upstream-service-time:
+ - '2270'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js b/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js
new file mode 100644
index 00000000000..fc142d4222a
--- /dev/null
+++ b/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js
@@ -0,0 +1,144 @@
+'use strict'
+
+const { describe, before, it } = require('mocha')
+const { withVersions } = require('../../../setup/mocha')
+const assert = require('node:assert')
+const { useEnv } = require('../../../../../../integration-tests/helpers')
+
+const {
+ useLlmObs,
+ expectedLLMObsLLMSpanEvent,
+ deepEqualWithMockValues,
+ MOCK_STRING,
+ MOCK_NUMBER
+} = require('../../util')
+const chai = require('chai')
+
+chai.Assertion.addMethod('deepEqualWithMockValues', deepEqualWithMockValues)
+const { expect } = chai
+
+function assertLLMObsSpan (apmSpans, llmobsSpans) {
+ const expectedWorkflowSpan = expectedLLMObsLLMSpanEvent({
+ span: apmSpans[0],
+ name: 'anthropic.request',
+ spanKind: 'llm',
+ modelName: 'claude-3-7-sonnet-20250219',
+ modelProvider: 'anthropic',
+ inputMessages: [{ role: 'user', content: 'Hello, world!' }],
+ outputMessages: [{ role: 'assistant', content: MOCK_STRING }],
+ metadata: {
+ max_tokens: 100,
+ temperature: 0.5,
+ },
+ tokenMetrics: {
+ input_tokens: MOCK_NUMBER,
+ output_tokens: MOCK_NUMBER,
+ total_tokens: MOCK_NUMBER,
+ cache_write_input_tokens: MOCK_NUMBER,
+ cache_read_input_tokens: MOCK_NUMBER
+ },
+ tags: { ml_app: 'test', language: 'javascript', integration: 'anthropic' },
+ })
+
+ expect(llmobsSpans[0]).to.deepEqualWithMockValues(expectedWorkflowSpan)
+}
+
+describe('Plugin', () => {
+ useEnv({
+ ANTHROPIC_API_KEY: ''
+ })
+
+ const getEvents = useLlmObs({ plugin: 'anthropic' })
+
+ withVersions('anthropic', '@anthropic-ai/sdk', (version) => {
+ let client
+
+ before(() => {
+ const { Anthropic } = require(`../../../../../../versions/@anthropic-ai/sdk@${version}`).get()
+ client = new Anthropic({ baseURL: 'http://127.0.0.1:9126/vcr/anthropic' })
+ })
+
+ describe('messages.create', () => {
+ it('creates a span', async () => {
+ await client.messages.create({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5,
+ })
+
+ const { apmSpans, llmobsSpans } = await getEvents()
+ assertLLMObsSpan(apmSpans, llmobsSpans)
+ })
+
+ describe('stream', () => {
+ it('creates a span', async () => {
+ const stream = await client.messages.create({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5,
+ stream: true
+ })
+
+ for await (const chunk of stream) {
+ assert.ok(chunk)
+ }
+
+ const { apmSpans, llmobsSpans } = await getEvents()
+ assertLLMObsSpan(apmSpans, llmobsSpans)
+ })
+ })
+ })
+
+ describe('messages.stream', () => {
+ it('creates a span for async iterator consumption', async () => {
+ const stream = client.messages.stream({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5
+ })
+
+ for await (const chunk of stream) {
+ assert.ok(chunk)
+ }
+
+ const { apmSpans, llmobsSpans } = await getEvents()
+ assertLLMObsSpan(apmSpans, llmobsSpans)
+ })
+
+ describe('when using streaming helper methods', () => {
+ it('creates a span for stream.on', async () => {
+ client.messages.stream({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5
+ }).on('text', text => {
+ assert.ok(text)
+ })
+
+ const { apmSpans, llmobsSpans } = await getEvents()
+ assertLLMObsSpan(apmSpans, llmobsSpans)
+ })
+
+ it('creates a span for stream.finalMessage', async () => {
+ const stream = client.messages.stream({
+ model: 'claude-3-7-sonnet-20250219',
+ messages: [{ role: 'user', content: 'Hello, world!' }],
+ max_tokens: 100,
+ temperature: 0.5,
+ stream: true
+ })
+
+ const message = await stream.finalMessage()
+ assert.ok(message)
+
+ const { apmSpans, llmobsSpans } = await getEvents()
+ assertLLMObsSpan(apmSpans, llmobsSpans)
+ })
+ })
+ })
+ })
+})
diff --git a/packages/dd-trace/test/llmobs/tagger.spec.js b/packages/dd-trace/test/llmobs/tagger.spec.js
index 8ec2325078c..dc6be04d4be 100644
--- a/packages/dd-trace/test/llmobs/tagger.spec.js
+++ b/packages/dd-trace/test/llmobs/tagger.spec.js
@@ -402,6 +402,62 @@ describe('tagger', () => {
})
})
+ describe('tagging tool results appropriately', () => {
+ it('tags a span with tool results', () => {
+ const inputData = [
+ { content: 'hello', toolResults: [{ result: 'foo', toolId: '123', type: 'tool_result' }] }
+ ]
+
+ tagger._register(span)
+ tagger.tagLLMIO(span, inputData)
+ expect(Tagger.tagMap.get(span)).to.deep.equal({
+ '_ml_obs.meta.input.messages': [
+ { content: 'hello', tool_results: [{ result: 'foo', tool_id: '123', type: 'tool_result' }] }
+ ]
+ })
+ })
+
+ it('throws for a non-object tool result', () => {
+ const messages = [
+ { content: 'a', toolResults: 5 }
+ ]
+
+ tagger._register(span)
+
+ expect(() => tagger.tagLLMIO(span, messages, undefined)).to.throw('Tool result must be an object.')
+ })
+
+ it('throws for a non-string tool result', () => {
+ const messages = [
+ { content: 'a', toolResults: [{ result: 5 }] }
+ ]
+
+ tagger._register(span)
+
+ expect(() => tagger.tagLLMIO(span, messages, undefined)).to.throw('"Tool result" must be a string.')
+ })
+
+ it('throws for a non-string tool id', () => {
+ const messages = [
+ { content: 'a', toolResults: [{ result: 'foo', toolId: 123 }] }
+ ]
+
+ tagger._register(span)
+
+ expect(() => tagger.tagLLMIO(span, messages, undefined)).to.throw('"Tool ID" must be a string.')
+ })
+
+ it('throws for a non-string tool type', () => {
+ const messages = [
+ { content: 'a', toolResults: [{ result: 'foo', toolId: '123', type: 5 }] }
+ ]
+
+ tagger._register(span)
+
+ expect(() => tagger.tagLLMIO(span, messages, undefined)).to.throw('"Tool type" must be a string.')
+ })
+ })
+
describe('tool message tagging', () => {
it('tags a span with a tool message', () => {
const messages = [
diff --git a/packages/dd-trace/test/plugins/versions/package.json b/packages/dd-trace/test/plugins/versions/package.json
index 2aac39ee7ba..2fd5e2de0b7 100644
--- a/packages/dd-trace/test/plugins/versions/package.json
+++ b/packages/dd-trace/test/plugins/versions/package.json
@@ -5,6 +5,7 @@
"private": true,
"dependencies": {
"@ai-sdk/openai": "2.0.34",
+ "@anthropic-ai/sdk": "0.63.1",
"@openai/agents": "0.1.4",
"@openai/agents-core": "0.1.4",
"@apollo/gateway": "2.11.2",
diff --git a/register.js b/register.js
index 40bd233ba23..3c365f424d5 100644
--- a/register.js
+++ b/register.js
@@ -6,5 +6,13 @@ const { register } = require('node:module')
const { pathToFileURL } = require('node:url')
register('./loader-hook.mjs', pathToFileURL(__filename), {
- data: { exclude: [/langsmith/, /openai\/_shims/, /openai\/resources\/chat\/completions\/messages/, /openai\/agents-core\/dist\/shims/] }
+ data: {
+ exclude: [
+ /langsmith/,
+ /openai\/_shims/,
+ /openai\/resources\/chat\/completions\/messages/,
+ /openai\/agents-core\/dist\/shims/,
+ /@anthropic-ai\/sdk\/_shims/
+ ]
+ }
})
diff --git a/tsconfig.json b/tsconfig.dev.json
similarity index 100%
rename from tsconfig.json
rename to tsconfig.dev.json