import os
from langtrace_python_sdk import langtrace # Must precede any llm module imports
from openai import OpenAI
langtrace.init(api_key = os.environ['LANGTRACE_API_KEY'])
client = OpenAI(
# This is the default and can be omitted
api_key=os.environ.get("OPENAI_API_KEY"),
)
# Generate a simple output with OPEN AI'S GPT 3.5 MODEL
chat_completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "What is LangChain?",
}
],
model="gpt-3.5-turbo",
)
print(chat_completion.choices[0].message.content)
# Lets also create some embeddings
response = client.embeddings.create(
input="Your text string goes here",
model="text-embedding-3-small"
)
print(response.data[0].embedding)