Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Migrate from deprecated AzureOpenAI constructors to standard OpenAI client #270

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
Copilot wants to merge 3 commits into main
base: main
Choose a base branch
Loading
from copilot/fix-269
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Complete AzureOpenAI to OpenAI migration
Co-authored-by: pamelafox <297042+pamelafox@users.noreply.github.com>
  • Loading branch information
Copilot and pamelafox committed Sep 23, 2025
commit aa2f281d23835a6c97e7b401c55102dac7b67178
1 change: 0 additions & 1 deletion .env.sample
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ OPENAI_EMBED_HOST=azure
# Needed for Azure:
# You also need to `azd auth login` if running this locally
AZURE_OPENAI_ENDPOINT=https://YOUR-AZURE-OPENAI-SERVICE-NAME.openai.azure.com
AZURE_OPENAI_VERSION=2024年03月01日-preview
AZURE_OPENAI_CHAT_DEPLOYMENT=gpt-4o-mini
AZURE_OPENAI_CHAT_MODEL=gpt-4o-mini
AZURE_OPENAI_EMBED_DEPLOYMENT=text-embedding-3-large
Expand Down
27 changes: 16 additions & 11 deletions evals/generate_ground_truth.py
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@
import os
from collections.abc import Generator
from pathlib import Path
from typing import Union

from azure.identity import AzureDeveloperCliCredential, get_bearer_token_provider
from dotenv_azd import load_azd_env
from openai import AzureOpenAI, OpenAI
from openai import OpenAI
from openai.types.chat import ChatCompletionToolParam
from sqlalchemy import create_engine, select
from sqlalchemy.orm import Session
Expand Down Expand Up @@ -78,26 +77,32 @@ def answer_formatter(answer, source) -> str:
return f"{answer} [{source['id']}]"


def get_openai_client() -> tuple[Union[AzureOpenAI, OpenAI], str]:
def get_openai_client() -> tuple[OpenAI, str]:
"""Return an OpenAI client based on the environment variables"""
openai_client: Union[AzureOpenAI, OpenAI]
openai_client: OpenAI
OPENAI_CHAT_HOST = os.getenv("OPENAI_CHAT_HOST")
if OPENAI_CHAT_HOST == "azure":
if api_key := os.getenv("AZURE_OPENAI_KEY"):
logger.info("Using Azure OpenAI Service with API Key from AZURE_OPENAI_KEY")
openai_client = AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"]
azure_deployment = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"]
api_version = "2024年10月21日"
openai_client = OpenAI(
base_url=f"{azure_endpoint.rstrip('/')}/openai/deployments/{azure_deployment}?api-version={api_version}",
api_key=api_key,
)
else:
logger.info("Using Azure OpenAI Service with Azure Developer CLI Credential")
azure_credential = AzureDeveloperCliCredential(process_timeout=60, tenant_id=os.environ["AZURE_TENANT_ID"])
token_provider = get_bearer_token_provider(azure_credential, "https://cognitiveservices.azure.com/.default")
openai_client = AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_ad_token_provider=token_provider,
azure_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"]
azure_deployment = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"]
api_version = "2024年10月21日"
# Get the initial token from the provider
initial_token = token_provider()
openai_client = OpenAI(
base_url=f"{azure_endpoint.rstrip('/')}/openai/deployments/{azure_deployment}?api-version={api_version}",
api_key=initial_token,
)
model = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"]
elif OPENAI_CHAT_HOST == "ollama":
Expand Down
5 changes: 0 additions & 5 deletions infra/main.bicep
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -325,10 +325,6 @@ var webAppEnv = union(azureOpenAIKeyEnv, openAIComKeyEnv, [
name: 'AZURE_OPENAI_ENDPOINT'
value: !empty(azureOpenAIEndpoint) ? azureOpenAIEndpoint : (deployAzureOpenAI ? openAI.outputs.endpoint : '')
}
{
name: 'AZURE_OPENAI_VERSION'
value: openAIChatHost == 'azure' ? azureOpenAIAPIVersion : ''
}
])

module web 'web.bicep' = {
Expand Down Expand Up @@ -553,7 +549,6 @@ output AZURE_OPENAI_RESOURCE_GROUP string = deployAzureOpenAI ? openAIResourceGr
output AZURE_OPENAI_ENDPOINT string = !empty(azureOpenAIEndpoint)
? azureOpenAIEndpoint
: (deployAzureOpenAI ? openAI.outputs.endpoint : '')
output AZURE_OPENAI_VERSION string = azureOpenAIAPIVersion
output AZURE_OPENAI_CHAT_DEPLOYMENT string = deployAzureOpenAI ? chatDeploymentName : ''
output AZURE_OPENAI_CHAT_DEPLOYMENT_VERSION string = deployAzureOpenAI ? chatDeploymentVersion : ''
output AZURE_OPENAI_CHAT_DEPLOYMENT_CAPACITY int = deployAzureOpenAI ? chatDeploymentCapacity : 0
Expand Down
8 changes: 4 additions & 4 deletions src/backend/fastapi_app/__init__.py
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
import os
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from typing import TypedDict, Union
from typing import TypedDict

import fastapi
from azure.monitor.opentelemetry import configure_azure_monitor
from dotenv import load_dotenv
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai import AsyncOpenAI
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
Expand All @@ -27,8 +27,8 @@
class State(TypedDict):
sessionmaker: async_sessionmaker[AsyncSession]
context: FastAPIAppContext
chat_client: Union[AsyncOpenAI, AsyncAzureOpenAI]
embed_client: Union[AsyncOpenAI, AsyncAzureOpenAI]
chat_client: AsyncOpenAI
embed_client: AsyncOpenAI


@asynccontextmanager
Expand Down
4 changes: 2 additions & 2 deletions src/backend/fastapi_app/dependencies.py
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import azure.identity
from fastapi import Depends, Request
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai import AsyncOpenAI
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker

Expand All @@ -17,7 +17,7 @@ class OpenAIClient(BaseModel):
OpenAI client
"""

client: Union[AsyncOpenAI, AsyncAzureOpenAI]
client: AsyncOpenAI
model_config = {"arbitrary_types_allowed": True}


Expand Down
6 changes: 3 additions & 3 deletions src/backend/fastapi_app/embeddings.py
View file Open in desktop
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from typing import Optional, TypedDict, Union
from typing import Optional, TypedDict

from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai import AsyncOpenAI


async def compute_text_embedding(
q: str,
openai_client: Union[AsyncOpenAI, AsyncAzureOpenAI],
openai_client: AsyncOpenAI,
embed_model: str,
embed_deployment: Optional[str] = None,
embedding_dimensions: Optional[int] = None,
Expand Down
46 changes: 22 additions & 24 deletions src/backend/fastapi_app/openai_clients.py
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,22 @@

async def create_openai_chat_client(
azure_credential: Union[azure.identity.AzureDeveloperCliCredential, azure.identity.ManagedIdentityCredential, None],
) -> Union[openai.AsyncAzureOpenAI, openai.AsyncOpenAI]:
openai_chat_client: Union[openai.AsyncAzureOpenAI, openai.AsyncOpenAI]
) -> openai.AsyncOpenAI:
openai_chat_client: openai.AsyncOpenAI
OPENAI_CHAT_HOST = os.getenv("OPENAI_CHAT_HOST")
if OPENAI_CHAT_HOST == "azure":
api_version = os.environ["AZURE_OPENAI_VERSION"] or "2024年10月21日"
azure_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"]
azure_deployment = os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"]
# Use default API version for Azure OpenAI
api_version = "2024年10月21日"
if api_key := os.getenv("AZURE_OPENAI_KEY"):
logger.info(
"Setting up Azure OpenAI client for chat completions using API key, endpoint %s, deployment %s",
azure_endpoint,
azure_deployment,
)
openai_chat_client = openai.AsyncAzureOpenAI(
api_version=api_version,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
openai_chat_client = openai.AsyncOpenAI(
base_url=f"{azure_endpoint.rstrip('/')}/openai/deployments/{azure_deployment}?api-version={api_version}",
api_key=api_key,
)
elif azure_credential:
Expand All @@ -38,11 +37,11 @@ async def create_openai_chat_client(
token_provider = azure.identity.get_bearer_token_provider(
azure_credential, "https://cognitiveservices.azure.com/.default"
)
openai_chat_client = openai.AsyncAzureOpenAI(
api_version=api_version,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
azure_ad_token_provider=token_provider,
# Get the initial token from the provider
initial_token = token_provider()
openai_chat_client = openai.AsyncOpenAI(
base_url=f"{azure_endpoint.rstrip('/')}/openai/deployments/{azure_deployment}?api-version={api_version}",
api_key=initial_token,
)
else:
raise ValueError("Azure OpenAI client requires either an API key or Azure Identity credential.")
Expand All @@ -69,23 +68,22 @@ async def create_openai_chat_client(

async def create_openai_embed_client(
azure_credential: Union[azure.identity.AzureDeveloperCliCredential, azure.identity.ManagedIdentityCredential, None],
) -> Union[openai.AsyncAzureOpenAI, openai.AsyncOpenAI]:
openai_embed_client: Union[openai.AsyncAzureOpenAI, openai.AsyncOpenAI]
) -> openai.AsyncOpenAI:
openai_embed_client: openai.AsyncOpenAI
OPENAI_EMBED_HOST = os.getenv("OPENAI_EMBED_HOST")
if OPENAI_EMBED_HOST == "azure":
api_version = os.environ["AZURE_OPENAI_VERSION"] or "2024年03月01日-preview"
azure_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"]
azure_deployment = os.environ["AZURE_OPENAI_EMBED_DEPLOYMENT"]
# Use default API version for Azure OpenAI
api_version = "2024年03月01日-preview"
if api_key := os.getenv("AZURE_OPENAI_KEY"):
logger.info(
"Setting up Azure OpenAI client for embeddings using API key, endpoint %s, deployment %s",
azure_endpoint,
azure_deployment,
)
openai_embed_client = openai.AsyncAzureOpenAI(
api_version=api_version,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
openai_embed_client = openai.AsyncOpenAI(
base_url=f"{azure_endpoint.rstrip('/')}/openai/deployments/{azure_deployment}?api-version={api_version}",
api_key=api_key,
)
elif azure_credential:
Expand All @@ -97,11 +95,11 @@ async def create_openai_embed_client(
token_provider = azure.identity.get_bearer_token_provider(
azure_credential, "https://cognitiveservices.azure.com/.default"
)
openai_embed_client = openai.AsyncAzureOpenAI(
api_version=api_version,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
azure_ad_token_provider=token_provider,
# Get the initial token from the provider
initial_token = token_provider()
openai_embed_client = openai.AsyncOpenAI(
base_url=f"{azure_endpoint.rstrip('/')}/openai/deployments/{azure_deployment}?api-version={api_version}",
api_key=initial_token,
)
else:
raise ValueError("Azure OpenAI client requires either an API key or Azure Identity credential.")
Expand Down
6 changes: 3 additions & 3 deletions src/backend/fastapi_app/postgres_searcher.py
View file Open in desktop
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from typing import Optional, Union
from typing import Optional

import numpy as np
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai import AsyncOpenAI
from sqlalchemy import Float, Integer, column, select, text
from sqlalchemy.ext.asyncio import AsyncSession

Expand All @@ -14,7 +14,7 @@ class PostgresSearcher:
def __init__(
self,
db_session: AsyncSession,
openai_embed_client: Union[AsyncOpenAI, AsyncAzureOpenAI],
openai_embed_client: AsyncOpenAI,
embed_deployment: Optional[str], # Not needed for non-Azure OpenAI or for retrieval_mode="text"
embed_model: str,
embed_dimensions: Optional[int],
Expand Down
6 changes: 3 additions & 3 deletions src/backend/fastapi_app/rag_advanced.py
View file Open in desktop
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
from collections.abc import AsyncGenerator
from typing import Optional, Union
from typing import Optional

from agents import (
Agent,
Expand All @@ -12,7 +12,7 @@
function_tool,
set_tracing_disabled,
)
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai import AsyncOpenAI
from openai.types.responses import EasyInputMessageParam, ResponseInputItemParam, ResponseTextDeltaEvent

from fastapi_app.api_models import (
Expand Down Expand Up @@ -45,7 +45,7 @@ def __init__(
messages: list[ResponseInputItemParam],
overrides: ChatRequestOverrides,
searcher: PostgresSearcher,
openai_chat_client: Union[AsyncOpenAI, AsyncAzureOpenAI],
openai_chat_client: AsyncOpenAI,
chat_model: str,
chat_deployment: Optional[str], # Not needed for non-Azure OpenAI
):
Expand Down
6 changes: 3 additions & 3 deletions src/backend/fastapi_app/rag_simple.py
View file Open in desktop
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from collections.abc import AsyncGenerator
from typing import Optional, Union
from typing import Optional

from agents import Agent, ItemHelpers, ModelSettings, OpenAIChatCompletionsModel, Runner, set_tracing_disabled
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai import AsyncOpenAI
from openai.types.responses import ResponseInputItemParam, ResponseTextDeltaEvent

from fastapi_app.api_models import (
Expand All @@ -28,7 +28,7 @@ def __init__(
messages: list[ResponseInputItemParam],
overrides: ChatRequestOverrides,
searcher: PostgresSearcher,
openai_chat_client: Union[AsyncOpenAI, AsyncAzureOpenAI],
openai_chat_client: AsyncOpenAI,
chat_model: str,
chat_deployment: Optional[str], # Not needed for non-Azure OpenAI
):
Expand Down

AltStyle によって変換されたページ (->オリジナル) /