Skip to content

Commit 4041e4f

Browse files
committed
fix(api): add missing parallel_tool_calls arguments
1 parent e005a84 commit 4041e4f

File tree

2 files changed

+71
-0
lines changed

2 files changed

+71
-0
lines changed

src/openai/resources/beta/threads/threads.py

+12
Original file line numberDiff line numberDiff line change
@@ -828,6 +828,7 @@ def create_and_run_poll(
828828
None,
829829
]
830830
| NotGiven = NOT_GIVEN,
831+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
831832
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
832833
temperature: Optional[float] | NotGiven = NOT_GIVEN,
833834
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -856,6 +857,7 @@ def create_and_run_poll(
856857
max_prompt_tokens=max_prompt_tokens,
857858
metadata=metadata,
858859
model=model,
860+
parallel_tool_calls=parallel_tool_calls,
859861
response_format=response_format,
860862
temperature=temperature,
861863
stream=False,
@@ -908,6 +910,7 @@ def create_and_run_stream(
908910
None,
909911
]
910912
| NotGiven = NOT_GIVEN,
913+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
911914
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
912915
temperature: Optional[float] | NotGiven = NOT_GIVEN,
913916
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -962,6 +965,7 @@ def create_and_run_stream(
962965
None,
963966
]
964967
| NotGiven = NOT_GIVEN,
968+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
965969
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
966970
temperature: Optional[float] | NotGiven = NOT_GIVEN,
967971
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -1016,6 +1020,7 @@ def create_and_run_stream(
10161020
None,
10171021
]
10181022
| NotGiven = NOT_GIVEN,
1023+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
10191024
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
10201025
temperature: Optional[float] | NotGiven = NOT_GIVEN,
10211026
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -1050,6 +1055,7 @@ def create_and_run_stream(
10501055
"max_prompt_tokens": max_prompt_tokens,
10511056
"metadata": metadata,
10521057
"model": model,
1058+
"parallel_tool_calls": parallel_tool_calls,
10531059
"response_format": response_format,
10541060
"temperature": temperature,
10551061
"tool_choice": tool_choice,
@@ -1838,6 +1844,7 @@ async def create_and_run_poll(
18381844
None,
18391845
]
18401846
| NotGiven = NOT_GIVEN,
1847+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
18411848
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
18421849
temperature: Optional[float] | NotGiven = NOT_GIVEN,
18431850
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -1866,6 +1873,7 @@ async def create_and_run_poll(
18661873
max_prompt_tokens=max_prompt_tokens,
18671874
metadata=metadata,
18681875
model=model,
1876+
parallel_tool_calls=parallel_tool_calls,
18691877
response_format=response_format,
18701878
temperature=temperature,
18711879
stream=False,
@@ -1920,6 +1928,7 @@ def create_and_run_stream(
19201928
None,
19211929
]
19221930
| NotGiven = NOT_GIVEN,
1931+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
19231932
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
19241933
temperature: Optional[float] | NotGiven = NOT_GIVEN,
19251934
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -1974,6 +1983,7 @@ def create_and_run_stream(
19741983
None,
19751984
]
19761985
| NotGiven = NOT_GIVEN,
1986+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
19771987
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
19781988
temperature: Optional[float] | NotGiven = NOT_GIVEN,
19791989
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -2028,6 +2038,7 @@ def create_and_run_stream(
20282038
None,
20292039
]
20302040
| NotGiven = NOT_GIVEN,
2041+
parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
20312042
response_format: Optional[AssistantResponseFormatOptionParam] | NotGiven = NOT_GIVEN,
20322043
temperature: Optional[float] | NotGiven = NOT_GIVEN,
20332044
thread: thread_create_and_run_params.Thread | NotGiven = NOT_GIVEN,
@@ -2064,6 +2075,7 @@ def create_and_run_stream(
20642075
"max_prompt_tokens": max_prompt_tokens,
20652076
"metadata": metadata,
20662077
"model": model,
2078+
"parallel_tool_calls": parallel_tool_calls,
20672079
"response_format": response_format,
20682080
"temperature": temperature,
20692081
"tool_choice": tool_choice,

tests/lib/test_assistants.py

+59
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
from __future__ import annotations
2+
3+
import inspect
4+
from typing import Any, Callable
5+
6+
import pytest
7+
8+
from openai import OpenAI, AsyncOpenAI
9+
10+
11+
def assert_signatures_in_sync(
12+
source_func: Callable[..., Any],
13+
check_func: Callable[..., Any],
14+
*,
15+
exclude_params: set[str] = set(),
16+
) -> None:
17+
check_sig = inspect.signature(check_func)
18+
source_sig = inspect.signature(source_func)
19+
20+
errors: list[str] = []
21+
22+
for name, generated_param in source_sig.parameters.items():
23+
if name in exclude_params:
24+
continue
25+
26+
custom_param = check_sig.parameters.get(name)
27+
if not custom_param:
28+
errors.append(f"the `{name}` param is missing")
29+
continue
30+
31+
if custom_param.annotation != generated_param.annotation:
32+
errors.append(
33+
f"types for the `{name}` param are do not match; generated={repr(generated_param.annotation)} custom={repr(generated_param.annotation)}"
34+
)
35+
continue
36+
37+
if errors:
38+
raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors))
39+
40+
41+
@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
42+
def test_create_and_run_poll_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
43+
checking_client = client if sync else async_client
44+
45+
assert_signatures_in_sync(
46+
checking_client.beta.threads.create_and_run,
47+
checking_client.beta.threads.create_and_run_poll,
48+
exclude_params={"stream"},
49+
)
50+
51+
@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
52+
def test_create_and_run_stream_method_definition_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
53+
checking_client = client if sync else async_client
54+
55+
assert_signatures_in_sync(
56+
checking_client.beta.threads.create_and_run,
57+
checking_client.beta.threads.create_and_run_stream,
58+
exclude_params={"stream"},
59+
)

0 commit comments

Comments
 (0)