How to send out the Azure Open AI response in real-time streaming through FastAPI and hosted on Azure Functions?
Background:
- The goal is to deploy a OpenAI streaming response API with Azure Functions + FastAPI.
- OpenAI streaming works perfect with FastAPI alone.
- However since FastAPI is hosted by Function App, the response is blocked until streaming is done .
# openai router
from fastapi.responses import StreamingResponse
from fastapi import APIRouter
import openai
import os
OPENAI_API_TYPE = os.environ.get("OPENAI_API_TYPE", "azure")
OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION", "2023-05-15")
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
OPENAI_RESOURCE_ENDPOINT = os.environ.get("OPENAI_RESOURCE_ENDPOINT")
openai.api_type = OPENAI_API_TYPE
openai.api_key = OPENAI_API_KEY
openai.api_base = OPENAI_RESOURCE_ENDPOINT
openai.api_version = OPENAI_API_VERSION
router = APIRouter(prefix="/api/openai", tags=["openai"])
@router.post("/gpt")
async def ask_gpt(text: str):
def get_chat_completion_stream(
messages: list[dict],
engine="gpt-35-turbo",
temperature: Union[int, float] = 0,
) -> str:
res = openai.ChatCompletion.create(
messages=messages, temperature=temperature, engine=engine, stream=True
)
start_time = time.time()
for event in res:
if "content" in event["choices"][0].delta:
current_response = event["choices"][0].delta.content
res_time = time.time() - start_time
print(f"response :{current_response}, time: {res_time}")
yield "data: " + current_response + "\n\n"
chat_prompts = [
{"role": "user", "content": text},
]
return StreamingResponse(
openai_utils.get_chat_completion_stream(chat_prompts),
media_type="text/event-stream",
)
# __init__.py
import azure.functions as func
from main import app
def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse:
return func.AsgiMiddleware(app).handle(req, context)
{
"scriptFile": "__init__.py",
"bindings": [
{
"authLevel": "Anonymous",
"type": "httpTrigger",
"direction": "in",
"name": "req",
"methods": ["get", "post", "patch", "delete", "put"],
"route": "/api/{*route}"
},
{
"type": "http",
"direction": "out",
"name": "$return"
}
]
}