import os from typing import Optional from openai import AsyncAzureOpenAI from dotenv import load_dotenv load_dotenv() class LLMClient: def __init__(self): self.client = AsyncAzureOpenAI( api_version=os.getenv("AZURE_OPENAI_API_KEY"), azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), api_key=os.getenv("AZURE_OPENAI_API_KEY") ) async def query(self, prompt: str) -> Optional[str]: try: response = await self.client.chat.completions.create( model='agile4', messages=[{"role": "user", "content": prompt}], ) return response.choices[0].message.content.strip() except Exception as e: print(f"OpenAI API error: {e}") return None