Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/sample-app/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ ibm-watson-machine-learning = "^1.0.367"
ollama = "^0.4.7"
mcp = "^1.7.1"
openai-agents = "^0.2.7"
fastapi = "^0.115.0"
uvicorn = "^0.32.0"
opentelemetry-instrumentation-fastapi = "^0.48b0"
fastmcp = "*"


[tool.poetry.dependencies.opentelemetry-instrumentation-openai]
Expand Down
71 changes: 71 additions & 0 deletions packages/sample-app/sample_app/fastapi_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
from fastapi import FastAPI
from openai import OpenAI
from traceloop.sdk import Traceloop
from traceloop.sdk.decorators import workflow

# Initialize Traceloop SDK
Traceloop.init(
app_name="fastapi-example",
disable_batch=True,
)

# Create FastAPI app
app = FastAPI()

# Initialize OpenAI client
client = OpenAI()


@app.get("/")
async def root():
return {"message": "Hello World"}


@app.get("/health")
async def health_check():
return {"status": "healthy"}


@app.get("/joke")
@workflow(name="openai_joke_generator")
async def generate_joke():
"""Generate a joke using OpenAI and return it via FastAPI endpoint."""
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "user",
"content": "Tell me a funny joke about APIs and web services"
}
],
max_tokens=150,
temperature=0.9
)

joke = response.choices[0].message.content
return {"joke": joke}


@app.get("/story/{topic}")
@workflow(name="openai_story_generator")
async def generate_story(topic: str, length: int = 100):
"""Generate a short story about the given topic."""
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "user",
"content": f"Write a short story about {topic} in approximately {length} words"
}
],
max_tokens=length * 2,
temperature=0.7
)

story = response.choices[0].message.content
return {"topic": topic, "story": story, "requested_length": length}


if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
187 changes: 187 additions & 0 deletions packages/sample-app/sample_app/fastapi_instrumented_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
from fastapi import FastAPI, HTTPException
from openai import OpenAI
from pydantic import BaseModel
from typing import Optional
import logging

from traceloop.sdk import Traceloop
from traceloop.sdk.decorators import workflow, task
from traceloop.sdk.instruments import Instruments
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__)

# Initialize Traceloop SDK with console exporter for debugging
from opentelemetry.sdk.trace.export import ConsoleSpanExporter

Traceloop.init(
app_name="fastapi-openllmetry-example",
disable_batch=True,
exporter=ConsoleSpanExporter(),
instruments={Instruments.FASTAPI}
)

# Create FastAPI app
app = FastAPI(
title="OpenLLMetry FastAPI Example",
description="Example FastAPI application with OpenTelemetry tracing and LLM instrumentation",
version="1.0.0"
)

# Instrument FastAPI with OpenTelemetry
FastAPIInstrumentor.instrument_app(app)

# Initialize OpenAI client
client = OpenAI()


# Pydantic models for request/response
class ChatRequest(BaseModel):
message: str
model: Optional[str] = "gpt-3.5-turbo"
temperature: Optional[float] = 0.7
max_tokens: Optional[int] = 150


class ChatResponse(BaseModel):
response: str
model: str
tokens_used: Optional[int] = None


class JokeResponse(BaseModel):
joke: str
category: str


@app.get("/")
async def root():
"""Root endpoint."""
return {"message": "Welcome to OpenLLMetry FastAPI Example", "version": "1.0.0"}


@app.get("/health")
async def health_check():
"""Health check endpoint."""
return {"status": "healthy", "service": "fastapi-openllmetry-example"}


@task(name="validate_input")
def validate_chat_input(request: ChatRequest):
"""Validate the chat request input."""
if not request.message.strip():
raise HTTPException(status_code=400, detail="Message cannot be empty")

if len(request.message) > 1000:
raise HTTPException(status_code=400, detail="Message too long (max 1000 characters)")

return True


@task(name="call_openai_api")
def call_openai_chat(request: ChatRequest):
"""Make a call to OpenAI API."""
try:
response = client.chat.completions.create(
model=request.model,
messages=[
{
"role": "system",
"content": "You are a helpful assistant. Provide clear and concise responses."
},
{
"role": "user",
"content": request.message
}
],
max_tokens=request.max_tokens,
temperature=request.temperature
)

return response
except Exception as e:
logger.error(f"OpenAI API call failed: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to generate response")


@app.post("/chat", response_model=ChatResponse)
@workflow(name="chat_completion")
async def chat_completion(request: ChatRequest):
"""Generate a chat completion using OpenAI."""
# Validate input
validate_chat_input(request)

# Call OpenAI API
response = call_openai_chat(request)

# Extract response content
content = response.choices[0].message.content
tokens_used = response.usage.total_tokens if response.usage else None

return ChatResponse(
response=content,
model=request.model,
tokens_used=tokens_used
)


@app.get("/joke/{category}", response_model=JokeResponse)
@workflow(name="joke_generator")
async def generate_joke(category: str):
"""Generate a joke in the specified category."""
if category not in ["programming", "api", "tech", "general"]:
raise HTTPException(
status_code=400,
detail="Category must be one of: programming, api, tech, general"
)

try:
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "user",
"content": f"Tell me a clean, funny joke about {category}"
}
],
max_tokens=100,
temperature=0.9
)

joke = response.choices[0].message.content
return JokeResponse(joke=joke, category=category)

except Exception as e:
logger.error(f"Joke generation failed: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to generate joke")


@app.get("/metrics")
async def get_metrics():
"""Get basic metrics about the service."""
return {
"service": "fastapi-openllmetry-example",
"tracing": "enabled",
"instrumentation": ["fastapi", "openai", "opentelemetry"],
"endpoints": [
"/",
"/health",
"/chat",
"/joke/{category}",
"/metrics"
]
}


if __name__ == "__main__":
import uvicorn

logger.info("Starting FastAPI application with OpenTelemetry instrumentation")
uvicorn.run(
app,
host="0.0.0.0",
port=8000,
log_level="info"
)
5 changes: 4 additions & 1 deletion packages/sample-app/sample_app/mcp_dev_assistant_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from mcp.client.stdio import stdio_client
from opentelemetry.sdk.trace.export import ConsoleSpanExporter
from traceloop.sdk import Traceloop
from traceloop.sdk.instruments import Instruments


class MCPDevAssistantDemo:
Expand All @@ -26,7 +27,9 @@ def __init__(self):
Traceloop.init(
app_name="mcp-dev-assistant-demo-client",
exporter=ConsoleSpanExporter(),
disable_batch=True, # For real-time tracing in demo
disable_batch=True,
instruments={Instruments.FASTAPI, Instruments.MCP}
# For real-time tracing in demo
)

async def connect_to_dev_assistant(self):
Expand Down
5 changes: 4 additions & 1 deletion packages/sample-app/sample_app/mcp_dev_assistant_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,17 @@
from fastmcp import FastMCP
from pydantic import BaseModel
from traceloop.sdk import Traceloop
from traceloop.sdk.instruments import Instruments

# Load environment variables
load_dotenv()

# Initialize OpenTelemetry with Traceloop SDK (automatically includes MCP instrumentation)
Traceloop.init(
app_name="dev-assistant-mcp-server",
disable_batch=True, # For real-time tracing in demo
disable_batch=True,
instruments={Instruments.FASTAPI, Instruments.MCP}
# For real-time tracing in demo
)

# Initialize the MCP server
Expand Down
1 change: 1 addition & 0 deletions packages/traceloop-sdk/traceloop/sdk/instruments.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ class Instruments(Enum):
COHERE = "cohere"
CREWAI = "crewai"
CREW = "crewai" # deprecated alias; remove in future major version
FASTAPI = "fastapi"
GOOGLE_GENERATIVEAI = "google_generativeai"
GROQ = "groq"
HAYSTACK = "haystack"
Expand Down
24 changes: 21 additions & 3 deletions packages/traceloop-sdk/traceloop/sdk/tracing/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,6 +458,9 @@ def init_instrumentations(
elif instrument == Instruments.CREWAI:
if init_crewai_instrumentor():
instrument_set = True
elif instrument == Instruments.FASTAPI:
if init_fastapi_instrumentor():
instrument_set = True
elif instrument == Instruments.GOOGLE_GENERATIVEAI:
if init_google_generativeai_instrumentor(should_enrich_metrics, base64_image_uploader):
instrument_set = True
Expand Down Expand Up @@ -1132,9 +1135,7 @@ def init_mcp_instrumentor():
Telemetry().capture("instrumentation:mcp:init")
from opentelemetry.instrumentation.mcp import McpInstrumentor

instrumentor = McpInstrumentor(
exception_logger=lambda e: Telemetry().log_exception(e),
)
instrumentor = McpInstrumentor()
if not instrumentor.is_instrumented_by_opentelemetry:
instrumentor.instrument()
return True
Expand All @@ -1144,6 +1145,23 @@ def init_mcp_instrumentor():
return False


def init_fastapi_instrumentor():
try:
if is_package_installed("fastapi"):
Telemetry().capture("instrumentation:fastapi:init")
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor

instrumentor = FastAPIInstrumentor()
if not instrumentor.is_instrumented_by_opentelemetry:
instrumentor.instrument()
print("NOMI - FastAPI instrumentor initialized")
return True
except Exception as e:
logging.error(f"Error initializing FastAPI instrumentor: {e}")
Telemetry().log_exception(e)
return False


def init_openai_agents_instrumentor():
try:
if is_package_installed("openai-agents"):
Expand Down
Loading