agentic_app.py
The agentic_app.py module is used in orchestrator-core for running the LLM enabled orchestrator.
Functionality in this class is toggled by two different environment variables:
SEARCH_ENABLED: When set to True the orchestrator will enable the LLM Based search
AGENT_ENABLED: When set to True the orchestrator will activate the Agent module of the orchestrator.
FastAPI Backend
The code for the WFO's Fast API backend is very well documented, so look through the functions used in this module here:
orchestrator.agentic_app
The main application module.
This module contains the main LLMOrchestratorCore class for the FastAPI backend and
provides the ability to run the CLI with LLM features (search and/or agent).
LLMOrchestratorCore
Bases: orchestrator.app.OrchestratorCore
Source code in orchestrator/agentic_app.py
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114 | class LLMOrchestratorCore(OrchestratorCore):
def __init__(
self,
*args: Any,
llm_settings: LLMSettings = llm_settings,
agent_model: str | None = None,
**kwargs: Any,
) -> None:
"""Initialize the `LLMOrchestratorCore` class.
This class extends `OrchestratorCore` with LLM features (search and agent).
It runs the search migration based on feature flags.
Args:
*args: All the normal arguments passed to the `OrchestratorCore` class.
llm_settings: A class of settings for the LLM
agent_model: Override the agent model (defaults to llm_settings.AGENT_MODEL)
**kwargs: Additional arguments passed to the `OrchestratorCore` class.
Returns:
None
"""
self.llm_settings = llm_settings
self.agent_model = agent_model or llm_settings.AGENT_MODEL
super().__init__(*args, **kwargs)
# Mount agent protocol adapters under /api/agent/
if self.llm_settings.AGENT_ENABLED:
from orchestrator.search.agent.adapters import A2AApp, MCPApp
from orchestrator.search.agent.agent import AgentAdapter
from orchestrator.security import AgentAuthMiddleware
from orchestrator.settings import app_settings
a2a_path = "/api/agent/a2a"
a2a_url = f"{app_settings.BASE_URL}{a2a_path}/"
a2a = A2AApp(AgentAdapter(self.agent_model, debug=self.llm_settings.AGENT_DEBUG), url=a2a_url)
self.mount(a2a_path, AgentAuthMiddleware(a2a.app, self.auth_manager))
# Expose the agent card at the root well-known URL required by the A2A spec.
# This route is intentionally unauthenticated — agent card discovery must be public.
self.add_api_route(
"/.well-known/agent-card.json",
a2a.app._agent_card_endpoint,
methods=["GET", "HEAD"],
include_in_schema=False,
)
mcp_app = MCPApp(AgentAdapter(self.agent_model, debug=self.llm_settings.AGENT_DEBUG))
self.mount("/api/agent", AgentAuthMiddleware(mcp_app.app, self.auth_manager))
# Sub-app lifespans don't run when mounted, so we manage
# the A2A and MCP lifecycles via host app startup/shutdown.
# AsyncExitStack ensures clean rollback if a later entry fails.
_adapter_stack = AsyncExitStack()
async def _start_adapters() -> None:
await _adapter_stack.__aenter__()
await _adapter_stack.enter_async_context(a2a)
await _adapter_stack.enter_async_context(mcp_app)
async def _stop_adapters() -> None:
await _adapter_stack.__aexit__(None, None, None)
self.add_event_handler("startup", _start_adapters)
self.add_event_handler("shutdown", _stop_adapters)
# Run search migration if search or agent is enabled
if self.llm_settings.SEARCH_ENABLED or self.llm_settings.AGENT_ENABLED:
logger.info("Running search migration")
try:
from orchestrator.db import db
from orchestrator.search.llm_migration import run_migration
with db.engine.begin() as connection:
run_migration(connection)
except ImportError as e:
logger.error(
"Unable to run search migration. Please install search dependencies: "
"`pip install orchestrator-core[search]`",
error=str(e),
)
raise
|
__init__
__init__(
*args: typing.Any,
llm_settings: orchestrator.llm_settings.LLMSettings = llm_settings,
agent_model: str | None = None,
**kwargs: typing.Any
) -> None
Initialize the LLMOrchestratorCore class.
This class extends OrchestratorCore with LLM features (search and agent).
It runs the search migration based on feature flags.
Parameters:
-
*args
(typing.Any, default:
()
)
–
All the normal arguments passed to the OrchestratorCore class.
-
llm_settings
(orchestrator.llm_settings.LLMSettings, default:
orchestrator.agentic_app.LLMOrchestratorCore.llm_settings
)
–
A class of settings for the LLM
-
agent_model
(str | None, default:
None
)
–
Override the agent model (defaults to llm_settings.AGENT_MODEL)
-
**kwargs
(typing.Any, default:
{}
)
–
Additional arguments passed to the OrchestratorCore class.
Returns:
Source code in orchestrator/agentic_app.py
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114 | def __init__(
self,
*args: Any,
llm_settings: LLMSettings = llm_settings,
agent_model: str | None = None,
**kwargs: Any,
) -> None:
"""Initialize the `LLMOrchestratorCore` class.
This class extends `OrchestratorCore` with LLM features (search and agent).
It runs the search migration based on feature flags.
Args:
*args: All the normal arguments passed to the `OrchestratorCore` class.
llm_settings: A class of settings for the LLM
agent_model: Override the agent model (defaults to llm_settings.AGENT_MODEL)
**kwargs: Additional arguments passed to the `OrchestratorCore` class.
Returns:
None
"""
self.llm_settings = llm_settings
self.agent_model = agent_model or llm_settings.AGENT_MODEL
super().__init__(*args, **kwargs)
# Mount agent protocol adapters under /api/agent/
if self.llm_settings.AGENT_ENABLED:
from orchestrator.search.agent.adapters import A2AApp, MCPApp
from orchestrator.search.agent.agent import AgentAdapter
from orchestrator.security import AgentAuthMiddleware
from orchestrator.settings import app_settings
a2a_path = "/api/agent/a2a"
a2a_url = f"{app_settings.BASE_URL}{a2a_path}/"
a2a = A2AApp(AgentAdapter(self.agent_model, debug=self.llm_settings.AGENT_DEBUG), url=a2a_url)
self.mount(a2a_path, AgentAuthMiddleware(a2a.app, self.auth_manager))
# Expose the agent card at the root well-known URL required by the A2A spec.
# This route is intentionally unauthenticated — agent card discovery must be public.
self.add_api_route(
"/.well-known/agent-card.json",
a2a.app._agent_card_endpoint,
methods=["GET", "HEAD"],
include_in_schema=False,
)
mcp_app = MCPApp(AgentAdapter(self.agent_model, debug=self.llm_settings.AGENT_DEBUG))
self.mount("/api/agent", AgentAuthMiddleware(mcp_app.app, self.auth_manager))
# Sub-app lifespans don't run when mounted, so we manage
# the A2A and MCP lifecycles via host app startup/shutdown.
# AsyncExitStack ensures clean rollback if a later entry fails.
_adapter_stack = AsyncExitStack()
async def _start_adapters() -> None:
await _adapter_stack.__aenter__()
await _adapter_stack.enter_async_context(a2a)
await _adapter_stack.enter_async_context(mcp_app)
async def _stop_adapters() -> None:
await _adapter_stack.__aexit__(None, None, None)
self.add_event_handler("startup", _start_adapters)
self.add_event_handler("shutdown", _stop_adapters)
# Run search migration if search or agent is enabled
if self.llm_settings.SEARCH_ENABLED or self.llm_settings.AGENT_ENABLED:
logger.info("Running search migration")
try:
from orchestrator.db import db
from orchestrator.search.llm_migration import run_migration
with db.engine.begin() as connection:
run_migration(connection)
except ImportError as e:
logger.error(
"Unable to run search migration. Please install search dependencies: "
"`pip install orchestrator-core[search]`",
error=str(e),
)
raise
|