Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions python/frameworks/azure_openai/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
## [0.1.0] - 2026-04-10
### Feature
- Initial release with Azure OpenAI instrumentation support
- Chat completions, embeddings, and completions tracing
- Azure-specific attribute capture (deployment, API version, endpoint)
- Streaming and non-streaming response support
- Async and sync client support
84 changes: 84 additions & 0 deletions python/frameworks/azure_openai/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# Azure OpenAI OpenTelemetry Integration

## Overview
This integration provides support for using OpenTelemetry with Azure OpenAI. It enables tracing and monitoring of applications built with Azure OpenAI, capturing chat completions, embeddings, and completions with Azure-specific attributes such as deployment name and API version.

## Installation

1. **Install traceAI Azure OpenAI**

```bash
pip install traceAI-azure-openai
```

### Set Environment Variables
Set up your environment variables to authenticate with FutureAGI and Azure OpenAI

```python
import os

os.environ["FI_API_KEY"] = FI_API_KEY
os.environ["FI_SECRET_KEY"] = FI_SECRET_KEY
os.environ["AZURE_OPENAI_ENDPOINT"] = AZURE_OPENAI_ENDPOINT
os.environ["AZURE_OPENAI_API_KEY"] = AZURE_OPENAI_API_KEY
os.environ["AZURE_OPENAI_DEPLOYMENT"] = AZURE_OPENAI_DEPLOYMENT
os.environ["AZURE_OPENAI_API_VERSION"] = AZURE_OPENAI_API_VERSION
```

## Quickstart

### Register Tracer Provider
Set up the trace provider to establish the observability pipeline. The trace provider:

```python
from fi_instrumentation import register
from fi_instrumentation.fi_types import ProjectType

trace_provider = register(
project_type=ProjectType.OBSERVE,
project_name="azure_openai_app"
)
```

### Configure Azure OpenAI Instrumentation
Set up your Azure OpenAI client with built-in observability.

```python
from traceai_azure_openai import AzureOpenAIInstrumentor

AzureOpenAIInstrumentor().instrument(tracer_provider=trace_provider)
```

### Create Azure OpenAI Components
Set up your Azure OpenAI client with built-in observability.

```python
from openai import AzureOpenAI

client = AzureOpenAI(
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_version=os.environ["AZURE_OPENAI_API_VERSION"],
)

response = client.chat.completions.create(
model=os.environ["AZURE_OPENAI_DEPLOYMENT"],
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Can you tell me a joke?"}
]
)

print(response.choices[0].message.content)
```

## Azure-Specific Attributes

This instrumentation captures the following Azure-specific span attributes in addition to the standard GenAI semantic conventions:

| Attribute | Description |
|-----------|-------------|
| `gen_ai.provider.name` | Set to `azure` |
| `gen_ai.azure.deployment` | The Azure OpenAI deployment name |
| `gen_ai.azure.api_version` | The Azure OpenAI API version |
| `server.address` | The Azure OpenAI endpoint hostname |
35 changes: 35 additions & 0 deletions python/frameworks/azure_openai/examples/async_chat_completions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import asyncio
import os
from openai import AsyncAzureOpenAI

from fi_instrumentation.otel import register
from fi_instrumentation.fi_types import ProjectType
from traceai_azure_openai import AzureOpenAIInstrumentor

# Configure trace provider
trace_provider = register(
project_type=ProjectType.OBSERVE,
project_name="azure_openai_async_app",
)

# Initialize the Azure OpenAI instrumentor
AzureOpenAIInstrumentor().instrument(tracer_provider=trace_provider)


async def main():
client = AsyncAzureOpenAI(
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_version=os.environ.get("AZURE_OPENAI_API_VERSION", "2024-02-01"),
)

response = await client.chat.completions.create(
model=os.environ["AZURE_OPENAI_DEPLOYMENT"],
messages=[{"role": "user", "content": "Write a haiku about Azure cloud."}],
max_tokens=50,
)
print(response.choices[0].message.content)


if __name__ == "__main__":
asyncio.run(main())
30 changes: 30 additions & 0 deletions python/frameworks/azure_openai/examples/chat_completions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import os
from openai import AzureOpenAI

from fi_instrumentation.otel import register
from fi_instrumentation.fi_types import ProjectType
from traceai_azure_openai import AzureOpenAIInstrumentor

# Configure trace provider
trace_provider = register(
project_type=ProjectType.OBSERVE,
project_name="azure_openai_app",
)

# Initialize the Azure OpenAI instrumentor
AzureOpenAIInstrumentor().instrument(tracer_provider=trace_provider)


if __name__ == "__main__":
client = AzureOpenAI(
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_version=os.environ.get("AZURE_OPENAI_API_VERSION", "2024-02-01"),
)

response = client.chat.completions.create(
model=os.environ["AZURE_OPENAI_DEPLOYMENT"],
messages=[{"role": "user", "content": "Write a haiku."}],
max_tokens=20,
)
print(response.choices[0].message.content)
35 changes: 35 additions & 0 deletions python/frameworks/azure_openai/examples/chat_completions_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import os
from openai import AzureOpenAI

from fi_instrumentation.otel import register
from fi_instrumentation.fi_types import ProjectType
from traceai_azure_openai import AzureOpenAIInstrumentor

# Configure trace provider
trace_provider = register(
project_type=ProjectType.OBSERVE,
project_name="azure_openai_stream_app",
)

# Initialize the Azure OpenAI instrumentor
AzureOpenAIInstrumentor().instrument(tracer_provider=trace_provider)


if __name__ == "__main__":
client = AzureOpenAI(
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_version=os.environ.get("AZURE_OPENAI_API_VERSION", "2024-02-01"),
)

stream = client.chat.completions.create(
model=os.environ["AZURE_OPENAI_DEPLOYMENT"],
messages=[{"role": "user", "content": "Tell me a short story."}],
max_tokens=100,
stream=True,
)

for chunk in stream:
if chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content, end="")
print()
32 changes: 32 additions & 0 deletions python/frameworks/azure_openai/examples/embeddings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os
from openai import AzureOpenAI

from fi_instrumentation.otel import register
from fi_instrumentation.fi_types import ProjectType
from traceai_azure_openai import AzureOpenAIInstrumentor

# Configure trace provider
trace_provider = register(
project_type=ProjectType.OBSERVE,
project_name="azure_openai_embeddings_app",
)

# Initialize the Azure OpenAI instrumentor
AzureOpenAIInstrumentor().instrument(tracer_provider=trace_provider)


if __name__ == "__main__":
client = AzureOpenAI(
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_key=os.environ["AZURE_OPENAI_API_KEY"],
api_version=os.environ.get("AZURE_OPENAI_API_VERSION", "2024-02-01"),
)

response = client.embeddings.create(
model=os.environ.get("AZURE_OPENAI_EMBEDDING_DEPLOYMENT", "text-embedding-ada-002"),
input=["Hello world", "Azure OpenAI embeddings are great!"],
)

for i, embedding in enumerate(response.data):
print(f"Embedding {i}: dimension={len(embedding.embedding)}")
print(f"Total tokens: {response.usage.total_tokens}")
3 changes: 3 additions & 0 deletions python/frameworks/azure_openai/examples/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
traceAI-azure-openai
openai
fi-instrumentation-otel
18 changes: 18 additions & 0 deletions python/frameworks/azure_openai/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[tool.poetry]
name = "traceAI-azure-openai"
version = "0.1.0"
description = "OpenTelemetry instrumentation for Azure OpenAI"
authors = ["Future AGI <no-reply@futureagi.com>"]
readme = "README.md"
packages = [
{ include = "traceai_azure_openai" }
]

[tool.poetry.dependencies]
python = ">3.9,<3.14"
fi-instrumentation-otel = ">=0.1.11"
openai = ">=1.69.0"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
78 changes: 78 additions & 0 deletions python/frameworks/azure_openai/traceai_azure_openai/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import logging
from importlib import import_module
from typing import Any, Collection

import logging
logger = logging.getLogger(__name__)
try:
from fi.evals import Protect
except ImportError:
logger.warning("ai-evaluation is not installed, please install it to trace protect")
Protect = None
pass
from fi_instrumentation import FITracer, TraceConfig
from fi_instrumentation.instrumentation._protect_wrapper import GuardrailProtectWrapper
from opentelemetry import trace as trace_api
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor # type: ignore
from traceai_azure_openai._request import _AsyncRequest, _Request
from traceai_azure_openai.package import _instruments
from traceai_azure_openai.version import __version__
from wrapt import wrap_function_wrapper

logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())

_MODULE = "openai"


class AzureOpenAIInstrumentor(BaseInstrumentor): # type: ignore
"""
An instrumentor for Azure OpenAI
"""

__slots__ = (
"_original_request",
"_original_async_request",
)

def instrumentation_dependencies(self) -> Collection[str]:
return _instruments

def _instrument(self, **kwargs: Any) -> None:
if not (tracer_provider := kwargs.get("tracer_provider")):
tracer_provider = trace_api.get_tracer_provider()
if not (config := kwargs.get("config")):
config = TraceConfig()
else:
assert isinstance(config, TraceConfig)
tracer = FITracer(
trace_api.get_tracer(__name__, __version__, tracer_provider),
config=config,
)
openai = import_module(_MODULE)
self._original_request = openai.OpenAI.request
self._original_async_request = openai.AsyncOpenAI.request
wrap_function_wrapper(
module=_MODULE,
name="OpenAI.request",
wrapper=_Request(tracer=tracer, openai=openai),
)
wrap_function_wrapper(
module=_MODULE,
name="AsyncOpenAI.request",
wrapper=_AsyncRequest(tracer=tracer, openai=openai),
)
if Protect is not None:
self._original_protect = Protect.protect
wrap_function_wrapper(
module="fi.evals",
name="Protect.protect",
wrapper=GuardrailProtectWrapper(tracer),
)
else:
self._original_protect = None

def _uninstrument(self, **kwargs: Any) -> None:
openai = import_module(_MODULE)
openai.OpenAI.request = self._original_request
openai.AsyncOpenAI.request = self._original_async_request
Loading