Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions agent-openai-agents-sdk/agent_server/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def create_coding_agent(mcp_server: McpServer) -> Agent:


@invoke()
async def invoke(request: ResponsesAgentRequest) -> ResponsesAgentResponse:
async def invoke_handler(request: ResponsesAgentRequest) -> ResponsesAgentResponse:
# Optionally use the user's workspace client for on-behalf-of authentication
# user_workspace_client = get_user_workspace_client()
async with await init_mcp_server() as mcp_server:
Expand All @@ -52,7 +52,7 @@ async def invoke(request: ResponsesAgentRequest) -> ResponsesAgentResponse:


@stream()
async def stream(request: dict) -> AsyncGenerator[ResponsesAgentStreamEvent, None]:
async def stream_handler(request: dict) -> AsyncGenerator[ResponsesAgentStreamEvent, None]:
# Optionally use the user's workspace client for on-behalf-of authentication
# user_workspace_client = get_user_workspace_client()
async with await init_mcp_server() as mcp_server:
Expand Down
28 changes: 28 additions & 0 deletions agent-openai-agents-sdk/agent_server/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from fastapi.responses import StreamingResponse
from aiohttp.web import Request
from mlflow.genai.agent_server import AgentServer


class MlflowAgentServer(AgentServer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._initialized = False

async def _handle_invocations_request(self, *args, **kwargs):
if not self._initialized:
self._initialized = True
from mlflow.genai.agent_server import setup_mlflow_git_based_version_tracking
setup_mlflow_git_based_version_tracking()
return await super()._handle_invocations_request(*args, **kwargs)
Comment on lines +6 to +16
Copy link
Member Author

@fanzeyi fanzeyi Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

in a longer term we need to move this into mlflow's AgentServer.

we can add a parameter in the initializer enable_git_tracking, and in _handle_invocations_request handler we have this logic to check if this flag is set to true, and call this when that flag is true.



def create_app():
# Need to import the agent to register the functions with the server
import agent_server.agent
from dotenv import load_dotenv

# Load env vars from .env before importing the agent for proper auth
load_dotenv(dotenv_path=".env", override=True)

server = MlflowAgentServer("ResponsesAgent", enable_chat_proxy=True)
return server.app
17 changes: 0 additions & 17 deletions agent-openai-agents-sdk/agent_server/start_server.py

This file was deleted.

2 changes: 1 addition & 1 deletion agent-openai-agents-sdk/app.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
command: ["uv", "run", "start-app"]
command: ["python", "scripts/start_app.py"]
# databricks apps listen by default on port 8000

env:
Expand Down
5 changes: 4 additions & 1 deletion agent-openai-agents-sdk/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ authors = [
requires-python = ">=3.11"
dependencies = [
"fastapi>=0.115.12",
"uvicorn>=0.34.2",
"uvicorn[standard]>=0.34.2",
"databricks-openai>=0.9.0",
"mlflow>=3.9.0",
"openai-agents>=0.4.1",
Expand All @@ -33,3 +33,6 @@ start-app = "scripts.start_app:main"
start-server = "agent_server.start_server:main"
agent-evaluate = "agent_server.evaluate_agent:evaluate"
discover-tools = "scripts.discover_tools:main"

[tool.uv.pip]
output-file = "requirements.txt"
Loading