Skip to content

Commit 0c140c8

Browse files
committed
backend: switch openai structured output to function calling
1 parent af63b76 commit 0c140c8

File tree

2 files changed

+19
-6
lines changed

2 files changed

+19
-6
lines changed

backend/retrieval_graph/graph.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -38,13 +38,16 @@ async def analyze_and_route_query(
3838
return {"router": state.router}
3939

4040
configuration = AgentConfiguration.from_runnable_config(config)
41-
model = load_chat_model(configuration.query_model)
41+
structured_output_kwargs = (
42+
{"method": "function_calling"} if "openai" in configuration.query_model else {}
43+
)
44+
model = load_chat_model(configuration.query_model).with_structured_output(
45+
Router, **structured_output_kwargs
46+
)
4247
messages = [
4348
{"role": "system", "content": configuration.router_system_prompt}
4449
] + state.messages
45-
response = cast(
46-
Router, await model.with_structured_output(Router).ainvoke(messages)
47-
)
50+
response = cast(Router, await model.ainvoke(messages))
4851
return {"router": response}
4952

5053

@@ -140,7 +143,12 @@ class Plan(TypedDict):
140143
steps: list[str]
141144

142145
configuration = AgentConfiguration.from_runnable_config(config)
143-
model = load_chat_model(configuration.query_model).with_structured_output(Plan)
146+
structured_output_kwargs = (
147+
{"method": "function_calling"} if "openai" in configuration.query_model else {}
148+
)
149+
model = load_chat_model(configuration.query_model).with_structured_output(
150+
Plan, **structured_output_kwargs
151+
)
144152
messages = [
145153
{"role": "system", "content": configuration.research_plan_system_prompt}
146154
] + state.messages

backend/retrieval_graph/researcher_graph/graph.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,12 @@ class Response(TypedDict):
3737
queries: list[str]
3838

3939
configuration = AgentConfiguration.from_runnable_config(config)
40-
model = load_chat_model(configuration.query_model).with_structured_output(Response)
40+
structured_output_kwargs = (
41+
{"method": "function_calling"} if "openai" in configuration.query_model else {}
42+
)
43+
model = load_chat_model(configuration.query_model).with_structured_output(
44+
Response, **structured_output_kwargs
45+
)
4146
messages = [
4247
{"role": "system", "content": configuration.generate_queries_system_prompt},
4348
{"role": "human", "content": state.question},

0 commit comments

Comments
 (0)