Skip to content

Commit 9e54192

Browse files
authored
Update tabs to code groups in LangSmith (#213)
This PR updates all of the Tabs to CodeGroups in the LangSmith directory to enable code tab syncing for users. Preview: https://langchain-5e9cc07a-preview-codegr-1755876371-43c5fc7.mintlify.app/langsmith/home
1 parent da83aab commit 9e54192

File tree

60 files changed

+6684
-6361
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+6684
-6361
lines changed

src/docs.json

Lines changed: 396 additions & 0 deletions
Large diffs are not rendered by default.

src/langsmith/access-current-span.mdx

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,12 @@ You can access the current run by calling the `get_current_run_tree`/`getCurrent
99

1010
For a full list of available properties on the `RunTree` object, see [this reference](/langsmith/run-data-format).
1111

12-
<Tabs>
13-
<Tab title="Python">
14-
```python
15-
from langsmith import traceable
16-
from langsmith.run_helpers import get_current_run_tree
17-
from openai import Client
12+
<CodeGroup>
13+
14+
```python Python
15+
from langsmith import traceable
16+
from langsmith.run_helpers import get_current_run_tree
17+
from openai import Client
1818

1919
openai = Client()
2020

@@ -62,13 +62,12 @@ For a full list of available properties on the `RunTree` object, see [this refer
6262
response = invoke_llm(messages)
6363
return parse_output(response)
6464

65-
run_pipeline()
66-
```
67-
</Tab>
68-
<Tab title="TypeScript">
69-
```typescript
70-
import { traceable, getCurrentRunTree } from "langsmith/traceable";
71-
import OpenAI from "openai";
65+
run_pipeline()
66+
```
67+
68+
```typescript TypeScript
69+
import { traceable, getCurrentRunTree } from "langsmith/traceable";
70+
import OpenAI from "openai";
7271

7372
const openai = new OpenAI();
7473

@@ -128,7 +127,7 @@ For a full list of available properties on the `RunTree` object, see [this refer
128127
{ name: "runPipeline" }
129128
);
130129

131-
await runPipeline();
132-
```
133-
</Tab>
134-
</Tabs>
130+
await runPipeline();
131+
```
132+
133+
</CodeGroup>

src/langsmith/add-metadata-tags.mdx

Lines changed: 34 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -9,18 +9,18 @@ Tags are strings that can be used to categorize or label a trace. Metadata is a
99

1010
Both are useful for associating additional information with a trace, such as the environment in which it was executed, the user who initiated it, or an internal correlation ID. For more information on tags and metadata, see the [Concepts](/langsmith/observability-concepts#tags) page. For information on how to query traces and runs by metadata and tags, see the [Filter traces in the application](/langsmith/filter-traces-in-application) page.
1111

12-
<Tabs>
13-
<Tab title="Python">
14-
```python
15-
import openai
16-
import langsmith as ls
17-
from langsmith.wrappers import wrap_openai
12+
<CodeGroup>
1813

19-
client = openai.Client()
20-
messages = [
21-
{"role": "system", "content": "You are a helpful assistant."},
22-
{"role": "user", "content": "Hello!"}
23-
]
14+
```python Python
15+
import openai
16+
import langsmith as ls
17+
from langsmith.wrappers import wrap_openai
18+
19+
client = openai.Client()
20+
messages = [
21+
{"role": "system", "content": "You are a helpful assistant."},
22+
{"role": "user", "content": "Hello!"}
23+
]
2424

2525
# You can set metadata & tags **statically** when decorating a function
2626
# Use the @traceable decorator with tags and metadata
@@ -65,25 +65,24 @@ Both are useful for associating additional information with a trace, such as the
6565
rt.metadata["some-conditional-key"] = "some-val"
6666
rt.end(outputs={"output": chat_completion})
6767

68-
# You can use the same techniques with the wrapped client
69-
patched_client = wrap_openai(
70-
client, tracing_extra={"metadata": {"my-key": "my-value"}, "tags": ["a-tag"]}
71-
)
72-
chat_completion = patched_client.chat.completions.create(
73-
model="gpt-4o-mini",
74-
messages=messages,
75-
langsmith_extra={
76-
"tags": ["my-other-tag"],
77-
"metadata": {"my-other-key": "my-value"},
78-
},
79-
)
80-
```
81-
</Tab>
82-
<Tab title="TypeScript">
83-
```typescript
84-
import OpenAI from "openai";
85-
import { traceable, getCurrentRunTree } from "langsmith/traceable";
86-
import { wrapOpenAI } from "langsmith/wrappers";
68+
# You can use the same techniques with the wrapped client
69+
patched_client = wrap_openai(
70+
client, tracing_extra={"metadata": {"my-key": "my-value"}, "tags": ["a-tag"]}
71+
)
72+
chat_completion = patched_client.chat.completions.create(
73+
model="gpt-4o-mini",
74+
messages=messages,
75+
langsmith_extra={
76+
"tags": ["my-other-tag"],
77+
"metadata": {"my-other-key": "my-value"},
78+
},
79+
)
80+
```
81+
82+
```typescript TypeScript
83+
import OpenAI from "openai";
84+
import { traceable, getCurrentRunTree } from "langsmith/traceable";
85+
import { wrapOpenAI } from "langsmith/wrappers";
8786

8887
const client = wrapOpenAI(new OpenAI());
8988
const messages: OpenAI.Chat.ChatCompletionMessageParam[] = [
@@ -113,8 +112,8 @@ Both are useful for associating additional information with a trace, such as the
113112
}
114113
);
115114

116-
// Call the traceable function
117-
await traceableCallOpenAI(messages);
118-
```
119-
</Tab>
120-
</Tabs>
115+
// Call the traceable function
116+
await traceableCallOpenAI(messages);
117+
```
118+
119+
</CodeGroup>

0 commit comments

Comments
 (0)