agentic_app.py
The agentic_app.py module is used in orchestrator-core
for actually running the entire Agentic WFO FastAPI backend and the CLI.
FastAPI Backend
The code for the WFO's Fast API backend is very well documented, so look through the functions used in this module here:
orchestrator.agentic_app
The main application module.
This module contains the main LLMOrchestratorCore
class for the FastAPI
backend and
provides the ability to run the CLI with LLM features (search and/or agent).
LLMOrchestratorCore
Bases: orchestrator.app.OrchestratorCore
Source code in orchestrator/agentic_app.py
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96 | class LLMOrchestratorCore(OrchestratorCore):
def __init__(
self,
*args: Any,
llm_settings: LLMSettings = llm_settings,
agent_model: "OpenAIModel | str | None" = None,
agent_tools: "list[FunctionToolset] | None" = None,
**kwargs: Any,
) -> None:
"""Initialize the `LLMOrchestratorCore` class.
This class extends `OrchestratorCore` with LLM features (search and agent).
It runs the search migration and mounts the agent endpoint based on feature flags.
Args:
*args: All the normal arguments passed to the `OrchestratorCore` class.
llm_settings: A class of settings for the LLM
agent_model: Override the agent model (defaults to llm_settings.AGENT_MODEL)
agent_tools: A list of tools that can be used by the agent
**kwargs: Additional arguments passed to the `OrchestratorCore` class.
Returns:
None
"""
self.llm_settings = llm_settings
self.agent_model = agent_model or llm_settings.AGENT_MODEL
self.agent_tools = agent_tools
super().__init__(*args, **kwargs)
# Run search migration if search or agent is enabled
if self.llm_settings.SEARCH_ENABLED or self.llm_settings.AGENT_ENABLED:
logger.info("Running search migration")
try:
from orchestrator.db import db
from orchestrator.search.llm_migration import run_migration
with db.engine.begin() as connection:
run_migration(connection)
except ImportError as e:
logger.error(
"Unable to run search migration. Please install search dependencies: "
"`pip install orchestrator-core[search]`",
error=str(e),
)
raise
# Mount agent endpoint if agent is enabled
if self.llm_settings.AGENT_ENABLED:
logger.info("Initializing agent features", model=self.agent_model)
try:
from orchestrator.search.agent import build_agent_router
agent_app = build_agent_router(self.agent_model, self.agent_tools)
self.mount("/agent", agent_app)
except ImportError as e:
logger.error(
"Unable to initialize agent features. Please install agent dependencies: "
"`pip install orchestrator-core[agent]`",
error=str(e),
)
raise
|
__init__
__init__(
*args: typing.Any,
llm_settings: orchestrator.llm_settings.LLMSettings = llm_settings,
agent_model: pydantic_ai.models.openai.OpenAIModel | str | None = None,
agent_tools: list[pydantic_ai.toolsets.FunctionToolset] | None = None,
**kwargs: typing.Any
) -> None
Initialize the LLMOrchestratorCore
class.
This class extends OrchestratorCore
with LLM features (search and agent).
It runs the search migration and mounts the agent endpoint based on feature flags.
Parameters:
-
*args
(typing.Any
, default:
()
)
–
All the normal arguments passed to the OrchestratorCore
class.
-
llm_settings
(orchestrator.llm_settings.LLMSettings
, default:
orchestrator.agentic_app.LLMOrchestratorCore.llm_settings
)
–
A class of settings for the LLM
-
agent_model
(pydantic_ai.models.openai.OpenAIModel | str | None
, default:
None
)
–
Override the agent model (defaults to llm_settings.AGENT_MODEL)
-
agent_tools
(list[pydantic_ai.toolsets.FunctionToolset] | None
, default:
None
)
–
A list of tools that can be used by the agent
-
**kwargs
(typing.Any
, default:
{}
)
–
Additional arguments passed to the OrchestratorCore
class.
Returns:
Source code in orchestrator/agentic_app.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96 | def __init__(
self,
*args: Any,
llm_settings: LLMSettings = llm_settings,
agent_model: "OpenAIModel | str | None" = None,
agent_tools: "list[FunctionToolset] | None" = None,
**kwargs: Any,
) -> None:
"""Initialize the `LLMOrchestratorCore` class.
This class extends `OrchestratorCore` with LLM features (search and agent).
It runs the search migration and mounts the agent endpoint based on feature flags.
Args:
*args: All the normal arguments passed to the `OrchestratorCore` class.
llm_settings: A class of settings for the LLM
agent_model: Override the agent model (defaults to llm_settings.AGENT_MODEL)
agent_tools: A list of tools that can be used by the agent
**kwargs: Additional arguments passed to the `OrchestratorCore` class.
Returns:
None
"""
self.llm_settings = llm_settings
self.agent_model = agent_model or llm_settings.AGENT_MODEL
self.agent_tools = agent_tools
super().__init__(*args, **kwargs)
# Run search migration if search or agent is enabled
if self.llm_settings.SEARCH_ENABLED or self.llm_settings.AGENT_ENABLED:
logger.info("Running search migration")
try:
from orchestrator.db import db
from orchestrator.search.llm_migration import run_migration
with db.engine.begin() as connection:
run_migration(connection)
except ImportError as e:
logger.error(
"Unable to run search migration. Please install search dependencies: "
"`pip install orchestrator-core[search]`",
error=str(e),
)
raise
# Mount agent endpoint if agent is enabled
if self.llm_settings.AGENT_ENABLED:
logger.info("Initializing agent features", model=self.agent_model)
try:
from orchestrator.search.agent import build_agent_router
agent_app = build_agent_router(self.agent_model, self.agent_tools)
self.mount("/agent", agent_app)
except ImportError as e:
logger.error(
"Unable to initialize agent features. Please install agent dependencies: "
"`pip install orchestrator-core[agent]`",
error=str(e),
)
raise
|