import os
from langtrace_python_sdk import langtrace # Must precede any llm module imports
from openai import AzureOpenAI
langtrace.init(api_key = os.environ['LANGTRACE_API_KEY'])
client = AzureOpenAI(
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_version=os.environ['AZURE_API_VERSION'],
azure_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"]
)
deployment_name= os.environ['AZURE_DEPLOYMENT_NAME']
print('Sending a test completion job')
# Generate a simple output with your deployment's model
response = client.chat.completions.create(
model=os.environ['AZURE_DEPLOYMENT_NAME'],
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Does Azure OpenAI support customer managed keys?"},
{"role": "assistant", "content": "Yes, customer managed keys are supported by Azure OpenAI."},
{"role": "user", "content": "Do other Azure AI services support this too?"}
]
)
print(response.choices[0].message.content)