58
58
LITELLM_ROUTER_INSTANCE_KEY ,
59
59
)
60
60
61
+ LITELLM_METADATA_CHAT_KEYS = (
62
+ "timeout" ,
63
+ "temperature" ,
64
+ "top_p" ,
65
+ "n" ,
66
+ "stream" ,
67
+ "stream_options" ,
68
+ "stop" ,
69
+ "max_completion_tokens" ,
70
+ "max_tokens" ,
71
+ "modalities" ,
72
+ "prediction" ,
73
+ "presence_penalty" ,
74
+ "frequency_penalty" ,
75
+ "logit_bias" ,
76
+ "user" ,
77
+ "response_format" ,
78
+ "seed" ,
79
+ "tool_choice" ,
80
+ "parallel_tool_calls" ,
81
+ "logprobs" ,
82
+ "top_logprobs" ,
83
+ "deployment_id" ,
84
+ "reasoning_effort" ,
85
+ "base_url" ,
86
+ "api_base" ,
87
+ "api_version" ,
88
+ "model_list" ,
89
+ )
90
+ LITELLM_METADATA_COMPLETION_KEYS = (
91
+ "best_of" ,
92
+ "echo" ,
93
+ "frequency_penalty" ,
94
+ "logit_bias" ,
95
+ "logprobs" ,
96
+ "max_tokens" ,
97
+ "n" ,
98
+ "presence_penalty" ,
99
+ "stop" ,
100
+ "stream" ,
101
+ "stream_options" ,
102
+ "suffix" ,
103
+ "temperature" ,
104
+ "top_p" ,
105
+ "user" ,
106
+ "api_base" ,
107
+ "api_version" ,
108
+ "model_list" ,
109
+ "custom_llm_provider" ,
110
+ )
111
+
61
112
62
113
def extract_model_name_google (instance , model_name_attr ):
63
114
"""Extract the model name from the instance.
@@ -297,12 +348,14 @@ def get_messages_from_converse_content(role: str, content: List[Dict[str, Any]])
297
348
return messages
298
349
299
350
300
- def openai_set_meta_tags_from_completion (span : Span , kwargs : Dict [str , Any ], completions : Any ) -> None :
351
+ def openai_set_meta_tags_from_completion (
352
+ span : Span , kwargs : Dict [str , Any ], completions : Any , integration_name : str = "openai"
353
+ ) -> None :
301
354
"""Extract prompt/response tags from a completion and set them as temporary "_ml_obs.meta.*" tags."""
302
355
prompt = kwargs .get ("prompt" , "" )
303
356
if isinstance (prompt , str ):
304
357
prompt = [prompt ]
305
- parameters = { k : v for k , v in kwargs . items () if k not in OPENAI_SKIPPED_COMPLETION_TAGS }
358
+ parameters = get_metadata_from_kwargs ( kwargs , integration_name , "completion" )
306
359
output_messages = [{"content" : "" }]
307
360
if not span .error and completions :
308
361
choices = getattr (completions , "choices" , completions )
@@ -316,7 +369,9 @@ def openai_set_meta_tags_from_completion(span: Span, kwargs: Dict[str, Any], com
316
369
)
317
370
318
371
319
- def openai_set_meta_tags_from_chat (span : Span , kwargs : Dict [str , Any ], messages : Optional [Any ]) -> None :
372
+ def openai_set_meta_tags_from_chat (
373
+ span : Span , kwargs : Dict [str , Any ], messages : Optional [Any ], integration_name : str = "openai"
374
+ ) -> None :
320
375
"""Extract prompt/response tags from a chat completion and set them as temporary "_ml_obs.meta.*" tags."""
321
376
input_messages = []
322
377
for m in kwargs .get ("messages" , []):
@@ -340,7 +395,7 @@ def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages:
340
395
for tool_call in tool_calls
341
396
]
342
397
input_messages .append (processed_message )
343
- parameters = { k : v for k , v in kwargs . items () if k not in OPENAI_SKIPPED_CHAT_TAGS }
398
+ parameters = get_metadata_from_kwargs ( kwargs , integration_name , "chat" )
344
399
span ._set_ctx_items ({INPUT_MESSAGES : input_messages , METADATA : parameters })
345
400
346
401
if span .error or not messages :
@@ -412,6 +467,19 @@ def openai_set_meta_tags_from_chat(span: Span, kwargs: Dict[str, Any], messages:
412
467
span ._set_ctx_item (OUTPUT_MESSAGES , output_messages )
413
468
414
469
470
+ def get_metadata_from_kwargs (
471
+ kwargs : Dict [str , Any ], integration_name : str = "openai" , operation : str = "chat"
472
+ ) -> Dict [str , Any ]:
473
+ metadata = {}
474
+ if integration_name == "openai" :
475
+ keys_to_skip = OPENAI_SKIPPED_CHAT_TAGS if operation == "chat" else OPENAI_SKIPPED_COMPLETION_TAGS
476
+ metadata = {k : v for k , v in kwargs .items () if k not in keys_to_skip }
477
+ elif integration_name == "litellm" :
478
+ keys_to_include = LITELLM_METADATA_CHAT_KEYS if operation == "chat" else LITELLM_METADATA_COMPLETION_KEYS
479
+ metadata = {k : v for k , v in kwargs .items () if k in keys_to_include }
480
+ return metadata
481
+
482
+
415
483
def openai_get_input_messages_from_response_input (
416
484
messages : Optional [Union [str , List [Dict [str , Any ]]]]
417
485
) -> List [Dict [str , Any ]]:
0 commit comments