Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .github/workflows/llmobs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -141,3 +141,25 @@ jobs:
uses: ./.github/actions/testagent/logs
with:
suffix: llmobs-${{ github.job }}

anthropic:
runs-on: ubuntu-latest
env:
PLUGINS: anthropic
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: ./.github/actions/testagent/start
- uses: ./.github/actions/node/oldest-maintenance-lts
- uses: ./.github/actions/install
- run: yarn test:plugins:ci
- run: yarn test:llmobs:plugins:ci
shell: bash
- uses: ./.github/actions/node/latest
- run: yarn test:plugins:ci
- run: yarn test:llmobs:plugins:ci
shell: bash
- uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
- if: always()
uses: ./.github/actions/testagent/logs
with:
suffix: llmobs-${{ github.job }}
2 changes: 1 addition & 1 deletion .github/workflows/project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ jobs:
static-analysis:
runs-on: ubuntu-latest
name: Datadog Static Analyzer
if: github.actor != 'dependabot[bot]'
if: github.actor != 'dependabot[bot]' && github.event_name != 'pull_request'
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
Expand Down
2 changes: 1 addition & 1 deletion .gitlab/benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ variables:
tags: ["runner:apm-k8s-tweaked-metal"]
image: $MICROBENCHMARKS_CI_IMAGE
interruptible: true
timeout: 20m
timeout: 15m # TODO: Fix worker queueing and reduce this.
script:
- git clone --branch dd-trace-js https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
- bp-runner bp-runner.yml --debug
Expand Down
12 changes: 5 additions & 7 deletions .gitlab/macrobenchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,15 @@ include:
.macrobenchmarks:
stage: macrobenchmarks
rules:
- if: ($NIGHTLY_BENCHMARKS || $CI_PIPELINE_SOURCE != "schedule") && $CI_COMMIT_REF_NAME == "master"
when: always
- when: always
- when: manual
tags: ["runner:apm-k8s-same-cpu"]
needs: []
interruptible: true
timeout: 1h
timeout: 15m # TODO: Fix worker queueing and reduce this.
image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:js-hapi
script:
# TODO: Revert to js/hapi after https://github.com/DataDog/benchmarking-platform/pull/199 is merged
- git clone --branch rochdev/parallel-experiments https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
- git clone --branch js/hapi https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform platform && cd platform
- bp-runner bp-runner.$EXPERIMENT.yml --debug -t
artifacts:
name: "artifacts"
Expand All @@ -27,13 +25,13 @@ include:
expire_in: 3 months
variables:
K6_OPTIONS_WARMUP_RATE: 500
K6_OPTIONS_WARMUP_DURATION: 1m
K6_OPTIONS_WARMUP_DURATION: 30s
K6_OPTIONS_WARMUP_GRACEFUL_STOP: 10s
K6_OPTIONS_WARMUP_PRE_ALLOCATED_VUS: 4
K6_OPTIONS_WARMUP_MAX_VUS: 4

K6_OPTIONS_NORMAL_OPERATION_RATE: 300
K6_OPTIONS_NORMAL_OPERATION_DURATION: 10m
K6_OPTIONS_NORMAL_OPERATION_DURATION: 3m
K6_OPTIONS_NORMAL_OPERATION_GRACEFUL_STOP: 10s
K6_OPTIONS_NORMAL_OPERATION_PRE_ALLOCATED_VUS: 4
K6_OPTIONS_NORMAL_OPERATION_MAX_VUS: 4
Expand Down
41 changes: 27 additions & 14 deletions docs/add-redirects.sh
Original file line number Diff line number Diff line change
@@ -1,38 +1,48 @@
#!/usr/bin/env bash

# Previously, URLs to plugin pages looked like this:
# interfaces/plugins.amqp10.html
# interfaces/export_.plugins.connect.html
#
# Now, with an updated typedoc and updated types, they look like this:
# interfaces/export_.plugins.connect.html
#
# interfaces/plugins.amqp10.html
#
# This script automatically generates basic HTML files to redirect users who
# visit the old URLs to the new URL.

echo "writing redirects..."
# TODO(2026-10-07): Delete this file and remove from docs/package.json
# NOTE: Do not add any new entries to this list

declare -a plugins=(
"aerospike"
"amqp10"
"amqplib"
"apollo"
"avsc"
"aws_sdk"
"bluebird"
"couchbase"
"cucumber"
"axios"
"azure_functions"
"azure_service_bus"
"bunyan"
"cassandra_driver"
"child_process"
"confluentinc_kafka_javascript"
"connect"
"couchbase"
"cucumber"
"cypress"
"dns"
"elasticsearch"
"express"
"fastify"
"fetch"
"generic_pool"
"google_cloud_pubsub"
"google_cloud_vertexai"
"graphql"
"grpc"
"hapi"
"hono"
"http"
"http2"
"ioredis"
Expand All @@ -41,38 +51,41 @@ declare -a plugins=(
"kafkajs"
"knex"
"koa"
"langchain"
"ldapjs"
"mariadb"
"memcached"
"microgateway_core"
"mocha"
"mongodb_core"
"mongoose"
"mysql"
"mysql2"
"net"
"next"
"opensearch"
"openai"
"opensearch"
"oracledb"
"pino"
"pg"
"pino"
"playwright"
"prisma"
"promise"
"promise_js"
"protobufjs"
"q"
"redis"
"restify"
"rhea"
"router"
"selenium"
"sharedb"
"tedious"
"undici"
"when"
"vitest"
"winston"
"ws"
)

for i in "${plugins[@]}"
do
echo "<meta http-equiv=\"refresh\" content=\"0; URL=./export_.plugins.$i.html\" />" > out/interfaces/plugins.$i.html
echo "<meta http-equiv=\"refresh\" content=\"0; URL=./plugins.$i.html\" />" > out/interfaces/export_.plugins.$i.html
done

echo "done."
1 change: 1 addition & 0 deletions docs/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@ const openSearchOptions: plugins.opensearch = {

tracer.use('amqp10');
tracer.use('amqplib');
tracer.use('anthropic');
tracer.use('avsc');
tracer.use('aws-sdk');
tracer.use('aws-sdk', awsSdkOptions);
Expand Down
17 changes: 17 additions & 0 deletions index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ interface Plugins {
"aerospike": tracer.plugins.aerospike;
"amqp10": tracer.plugins.amqp10;
"amqplib": tracer.plugins.amqplib;
"anthropic": tracer.plugins.anthropic;
"apollo": tracer.plugins.apollo;
"avsc": tracer.plugins.avsc;
"aws-sdk": tracer.plugins.aws_sdk;
Expand Down Expand Up @@ -785,6 +786,8 @@ declare namespace tracer {

/** Whether to enable request body collection on RASP event
* @default false
*
* @deprecated Use UI and Remote Configuration to enable extended data collection
*/
bodyCollection?: boolean
},
Expand All @@ -809,20 +812,28 @@ declare namespace tracer {
},
/**
* Configuration for extended headers collection tied to security events
*
* @deprecated Use UI and Remote Configuration to enable extended data collection
*/
extendedHeadersCollection?: {
/** Whether to enable extended headers collection
* @default false
*
* @deprecated Use UI and Remote Configuration to enable extended data collection
*/
enabled: boolean,

/** Whether to redact collected headers
* @default true
*
* @deprecated Use UI and Remote Configuration to enable extended data collection
*/
redaction: boolean,

/** Specifies the maximum number of headers collected.
* @default 50
*
* @deprecated Use UI and Remote Configuration to enable extended data collection
*/
maxHeaders: number,
}
Expand Down Expand Up @@ -1530,6 +1541,12 @@ declare namespace tracer {
*/
interface amqplib extends Instrumentation {}

/**
* This plugin automatically instruments the
* [anthropic](https://www.npmjs.com/package/@anthropic-ai/sdk) module.
*/
interface anthropic extends Instrumentation {}

/**
* Currently this plugin automatically instruments
* [@apollo/gateway](https://github.com/apollographql/federation) for module versions >= v2.3.0.
Expand Down
8 changes: 7 additions & 1 deletion initialize.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,13 @@ ${result.source}`
const [NODE_MAJOR, NODE_MINOR] = process.versions.node.split('.').map(Number)

const brokenLoaders = NODE_MAJOR === 18 && NODE_MINOR === 0
const iitmExclusions = [/langsmith/, /openai\/_shims/, /openai\/resources\/chat\/completions\/messages/, /openai\/agents-core\/dist\/shims/]
const iitmExclusions = [
/langsmith/,
/openai\/_shims/,
/openai\/resources\/chat\/completions\/messages/,
/openai\/agents-core\/dist\/shims/,
/@anthropic-ai\/sdk\/_shims/
]

export async function load (url, context, nextLoad) {
const iitmExclusionsMatch = iitmExclusions.some((exclusion) => exclusion.test(url))
Expand Down
2 changes: 1 addition & 1 deletion integration-tests/esbuild/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,6 @@
"express": "4.21.2",
"knex": "3.1.0",
"koa": "3.0.1",
"openai": "6.1.0"
"openai": "6.2.0"
}
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "dd-trace",
"version": "5.70.0",
"version": "5.71.0",
"description": "Datadog APM tracing client for JavaScript",
"main": "index.js",
"typings": "index.d.ts",
Expand Down
115 changes: 115 additions & 0 deletions packages/datadog-instrumentations/src/anthropic.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
'use strict'

const { addHook } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const { channel, tracingChannel } = require('dc-polyfill')

const anthropicTracingChannel = tracingChannel('apm:anthropic:request')
const onStreamedChunkCh = channel('apm:anthropic:request:chunk')

function wrapStreamIterator (iterator, ctx) {
return function () {
const itr = iterator.apply(this, arguments)
shimmer.wrap(itr, 'next', next => function () {
return next.apply(this, arguments)
.then(res => {
const { done, value: chunk } = res
onStreamedChunkCh.publish({ ctx, chunk, done })

if (done) {
finish(ctx)
}

return res
})
.catch(error => {
finish(ctx, null, error)
throw error
})
})

return itr
}
}

function wrapCreate (create) {
return function () {
if (!anthropicTracingChannel.start.hasSubscribers) {
return create.apply(this, arguments)
}

const options = arguments[0]
const stream = options.stream

const ctx = { options, resource: 'create' }

return anthropicTracingChannel.start.runStores(ctx, () => {
let apiPromise
try {
apiPromise = create.apply(this, arguments)
} catch (error) {
finish(ctx, null, error)
throw error
}

shimmer.wrap(apiPromise, 'parse', parse => function () {
return parse.apply(this, arguments)
.then(response => {
if (stream) {
shimmer.wrap(response, Symbol.asyncIterator, iterator => wrapStreamIterator(iterator, ctx))
} else {
finish(ctx, response, null)
}

return response
}).catch(error => {
finish(ctx, null, error)
throw error
})
})

anthropicTracingChannel.end.publish(ctx)

return apiPromise
})
}
}

function finish (ctx, result, error) {
if (error) {
ctx.error = error
anthropicTracingChannel.error.publish(ctx)
}

// streamed responses are handled and set separately
ctx.result ??= result

anthropicTracingChannel.asyncEnd.publish(ctx)
}

const extensions = ['js', 'mjs']
for (const extension of extensions) {
addHook({
name: '@anthropic-ai/sdk',
file: `resources/messages.${extension}`,
versions: ['>=0.14.0 <0.33.0']
}, exports => {
const Messages = exports.Messages

shimmer.wrap(Messages.prototype, 'create', wrapCreate)

return exports
})

addHook({
name: '@anthropic-ai/sdk',
file: `resources/messages/messages.${extension}`,
versions: ['>=0.33.0']
}, exports => {
const Messages = exports.Messages

shimmer.wrap(Messages.prototype, 'create', wrapCreate)

return exports
})
}
Loading