Learn how to use Langtrace SDK for Self-Hosted Setup
pip install langtrace-python-sdk openai
typescript
package.json
{ "dependencies": { "@langtrase/typescript-sdk": "^3.3.2", "openai": "^4.50.0" } }
from langtrace_python_sdk import langtrace langtrace.init( api_key="<YOUR API KEY>", api_host="http://localhost:3000/api/trace", )
http://localhost:3000/api/trace
from langtrace_python_sdk import langtrace from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span from openai import OpenAI langtrace.init( api_key="<YOUR API KEY>", api_host="http://localhost:3000/api/trace", ) @with_langtrace_root_span() def example(): client = OpenAI() response = client.chat.completions.create( model="gpt-3.5-turbo", messages=[ { "role": "system", "content": "How many states of matter are there?" } ], ) print(response.choices[0].message.content) example()
export OPENAI_API_KEY="<YOUR OPENAI API KEY>" python main.py
json "type": "module"