Skip to content

Commit 4a943ad

Browse files
RobertCraigiestainless-app[bot]
authored andcommitted
fix(responses): add missing arguments to parse
1 parent be1f58f commit 4a943ad

File tree

1 file changed

+30
-10
lines changed

1 file changed

+30
-10
lines changed

src/openai/resources/responses/responses.py

Lines changed: 30 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -943,22 +943,27 @@ def stream(
943943
def parse(
944944
self,
945945
*,
946-
input: Union[str, ResponseInputParam],
947-
model: Union[str, ChatModel],
948946
text_format: type[TextFormatT] | NotGiven = NOT_GIVEN,
949-
tools: Iterable[ParseableToolParam] | NotGiven = NOT_GIVEN,
947+
background: Optional[bool] | NotGiven = NOT_GIVEN,
950948
include: Optional[List[ResponseIncludable]] | NotGiven = NOT_GIVEN,
949+
input: Union[str, ResponseInputParam] | NotGiven = NOT_GIVEN,
951950
instructions: Optional[str] | NotGiven = NOT_GIVEN,
952951
max_output_tokens: Optional[int] | NotGiven = NOT_GIVEN,
952+
max_tool_calls: Optional[int] | NotGiven = NOT_GIVEN,
953953
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
954+
model: ResponsesModel | NotGiven = NOT_GIVEN,
954955
parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN,
955956
previous_response_id: Optional[str] | NotGiven = NOT_GIVEN,
957+
prompt: Optional[ResponsePromptParam] | NotGiven = NOT_GIVEN,
956958
reasoning: Optional[Reasoning] | NotGiven = NOT_GIVEN,
959+
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
957960
store: Optional[bool] | NotGiven = NOT_GIVEN,
958961
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
959962
temperature: Optional[float] | NotGiven = NOT_GIVEN,
960963
text: ResponseTextConfigParam | NotGiven = NOT_GIVEN,
961964
tool_choice: response_create_params.ToolChoice | NotGiven = NOT_GIVEN,
965+
tools: Iterable[ParseableToolParam] | NotGiven = NOT_GIVEN,
966+
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
962967
top_p: Optional[float] | NotGiven = NOT_GIVEN,
963968
truncation: Optional[Literal["auto", "disabled"]] | NotGiven = NOT_GIVEN,
964969
user: str | NotGiven = NOT_GIVEN,
@@ -991,21 +996,26 @@ def parser(raw_response: Response) -> ParsedResponse[TextFormatT]:
991996
"/responses",
992997
body=maybe_transform(
993998
{
994-
"input": input,
995-
"model": model,
999+
"background": background,
9961000
"include": include,
1001+
"input": input,
9971002
"instructions": instructions,
9981003
"max_output_tokens": max_output_tokens,
1004+
"max_tool_calls": max_tool_calls,
9991005
"metadata": metadata,
1006+
"model": model,
10001007
"parallel_tool_calls": parallel_tool_calls,
10011008
"previous_response_id": previous_response_id,
1009+
"prompt": prompt,
10021010
"reasoning": reasoning,
1011+
"service_tier": service_tier,
10031012
"store": store,
10041013
"stream": stream,
10051014
"temperature": temperature,
10061015
"text": text,
10071016
"tool_choice": tool_choice,
10081017
"tools": tools,
1018+
"top_logprobs": top_logprobs,
10091019
"top_p": top_p,
10101020
"truncation": truncation,
10111021
"user": user,
@@ -2202,22 +2212,27 @@ def stream(
22022212
async def parse(
22032213
self,
22042214
*,
2205-
input: Union[str, ResponseInputParam],
2206-
model: Union[str, ChatModel],
22072215
text_format: type[TextFormatT] | NotGiven = NOT_GIVEN,
2208-
tools: Iterable[ParseableToolParam] | NotGiven = NOT_GIVEN,
2216+
background: Optional[bool] | NotGiven = NOT_GIVEN,
22092217
include: Optional[List[ResponseIncludable]] | NotGiven = NOT_GIVEN,
2218+
input: Union[str, ResponseInputParam] | NotGiven = NOT_GIVEN,
22102219
instructions: Optional[str] | NotGiven = NOT_GIVEN,
22112220
max_output_tokens: Optional[int] | NotGiven = NOT_GIVEN,
2221+
max_tool_calls: Optional[int] | NotGiven = NOT_GIVEN,
22122222
metadata: Optional[Metadata] | NotGiven = NOT_GIVEN,
2223+
model: ResponsesModel | NotGiven = NOT_GIVEN,
22132224
parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN,
22142225
previous_response_id: Optional[str] | NotGiven = NOT_GIVEN,
2226+
prompt: Optional[ResponsePromptParam] | NotGiven = NOT_GIVEN,
22152227
reasoning: Optional[Reasoning] | NotGiven = NOT_GIVEN,
2228+
service_tier: Optional[Literal["auto", "default", "flex", "scale", "priority"]] | NotGiven = NOT_GIVEN,
22162229
store: Optional[bool] | NotGiven = NOT_GIVEN,
22172230
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
22182231
temperature: Optional[float] | NotGiven = NOT_GIVEN,
22192232
text: ResponseTextConfigParam | NotGiven = NOT_GIVEN,
22202233
tool_choice: response_create_params.ToolChoice | NotGiven = NOT_GIVEN,
2234+
tools: Iterable[ParseableToolParam] | NotGiven = NOT_GIVEN,
2235+
top_logprobs: Optional[int] | NotGiven = NOT_GIVEN,
22212236
top_p: Optional[float] | NotGiven = NOT_GIVEN,
22222237
truncation: Optional[Literal["auto", "disabled"]] | NotGiven = NOT_GIVEN,
22232238
user: str | NotGiven = NOT_GIVEN,
@@ -2250,21 +2265,26 @@ def parser(raw_response: Response) -> ParsedResponse[TextFormatT]:
22502265
"/responses",
22512266
body=maybe_transform(
22522267
{
2253-
"input": input,
2254-
"model": model,
2268+
"background": background,
22552269
"include": include,
2270+
"input": input,
22562271
"instructions": instructions,
22572272
"max_output_tokens": max_output_tokens,
2273+
"max_tool_calls": max_tool_calls,
22582274
"metadata": metadata,
2275+
"model": model,
22592276
"parallel_tool_calls": parallel_tool_calls,
22602277
"previous_response_id": previous_response_id,
2278+
"prompt": prompt,
22612279
"reasoning": reasoning,
2280+
"service_tier": service_tier,
22622281
"store": store,
22632282
"stream": stream,
22642283
"temperature": temperature,
22652284
"text": text,
22662285
"tool_choice": tool_choice,
22672286
"tools": tools,
2287+
"top_logprobs": top_logprobs,
22682288
"top_p": top_p,
22692289
"truncation": truncation,
22702290
"user": user,

0 commit comments

Comments
 (0)