from traceloop.sdk.tracing.manual import LLMMessage, LLMUsage, track_llm_call
with track_llm_call(vendor="openai", type="chat") as span:
span.report_request(
model="gpt-3.5-turbo",
messages=[
LLMMessage(role="user", content="Tell me a joke about opentelemetry")
],
)
res = openai_client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": "Tell me a joke about opentelemetry"}
],
)
span.report_response(res.model, [text.message.content for text in res.choices])
span.report_usage(
LLMUsage(
prompt_tokens=...,
completion_tokens=...,
total_tokens=...,
cache_creation_input_tokens=...,
cache_read_input_tokens=...,
)
)