import os
from openai import OpenAI
from langtrace_python_sdk import langtrace
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import \
OTLPSpanExporter
# Set up the OTLP exporter with the endpoint from your collector
otlp_exporter = OTLPSpanExporter(
# This should match your collector's OTLP gRPC endpoint
endpoint=os.environ.get("OTEL_EXPORTER_OTLP_ENDPOINT"),
insecure=True # Set to False if using HTTPS
)
# set up langtrace
langtrace.init(custom_remote_exporter=otlp_exporter)
# rest of your code
@with_langtrace_root_span() # This optional annotation is used to create a parent root span for the entire application
def app():
client = OpenAI(
api_key="<YOUR_OPENAI_API_KEY>")
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{
"role": "system",
"content": "How many states of matter are there?"
}
],
)
print(response.choices[0].message.content)
app()