Skip to content

Commit 8d222e1

Browse files
stainless-app[bot]meorphis
authored andcommitted
feat(api): OpenAPI spec update via Stainless API (#63)
1 parent 9e7efc7 commit 8d222e1

File tree

5 files changed

+21
-1
lines changed

5 files changed

+21
-1
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-bb0c2179ddf83834412606080e40f875436162cb81210f5af52c5f14aeb9c35b.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-f30a850ec208f2c5cde3101faff73304656182d65e1f101e104fc935f5d32c18.yml

src/prompt_foundry_python_sdk/types/prompt_configuration.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,8 @@ class Parameters(BaseModel):
5454
api_model_name: str = FieldInfo(alias="modelName")
5555
"""Example: "gpt-3.5-turbo" """
5656

57+
parallel_tool_calls: bool = FieldInfo(alias="parallelToolCalls")
58+
5759
presence_penalty: float = FieldInfo(alias="presencePenalty")
5860
"""Example: 0"""
5961

src/prompt_foundry_python_sdk/types/prompt_create_params.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,8 @@ class Parameters(TypedDict, total=False):
6464
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
6565
"""Example: "gpt-3.5-turbo" """
6666

67+
parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]
68+
6769
presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]
6870
"""Example: 0"""
6971

src/prompt_foundry_python_sdk/types/prompt_update_params.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,8 @@ class Parameters(TypedDict, total=False):
6464
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
6565
"""Example: "gpt-3.5-turbo" """
6666

67+
parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]
68+
6769
presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]
6870
"""Example: 0"""
6971

tests/api_resources/test_prompts.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,7 @@ def test_method_create(self, client: PromptFoundry) -> None:
132132
"seed": 0,
133133
"tool_choice": "string",
134134
"stream": True,
135+
"parallel_tool_calls": True,
135136
},
136137
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
137138
)
@@ -247,6 +248,7 @@ def test_raw_response_create(self, client: PromptFoundry) -> None:
247248
"seed": 0,
248249
"tool_choice": "string",
249250
"stream": True,
251+
"parallel_tool_calls": True,
250252
},
251253
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
252254
)
@@ -366,6 +368,7 @@ def test_streaming_response_create(self, client: PromptFoundry) -> None:
366368
"seed": 0,
367369
"tool_choice": "string",
368370
"stream": True,
371+
"parallel_tool_calls": True,
369372
},
370373
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
371374
) as response:
@@ -488,6 +491,7 @@ def test_method_update(self, client: PromptFoundry) -> None:
488491
"seed": 0,
489492
"tool_choice": "string",
490493
"stream": True,
494+
"parallel_tool_calls": True,
491495
},
492496
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
493497
)
@@ -604,6 +608,7 @@ def test_raw_response_update(self, client: PromptFoundry) -> None:
604608
"seed": 0,
605609
"tool_choice": "string",
606610
"stream": True,
611+
"parallel_tool_calls": True,
607612
},
608613
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
609614
)
@@ -724,6 +729,7 @@ def test_streaming_response_update(self, client: PromptFoundry) -> None:
724729
"seed": 0,
725730
"tool_choice": "string",
726731
"stream": True,
732+
"parallel_tool_calls": True,
727733
},
728734
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
729735
) as response:
@@ -847,6 +853,7 @@ def test_path_params_update(self, client: PromptFoundry) -> None:
847853
"seed": 0,
848854
"tool_choice": "string",
849855
"stream": True,
856+
"parallel_tool_calls": True,
850857
},
851858
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
852859
)
@@ -1303,6 +1310,7 @@ async def test_method_create(self, async_client: AsyncPromptFoundry) -> None:
13031310
"seed": 0,
13041311
"tool_choice": "string",
13051312
"stream": True,
1313+
"parallel_tool_calls": True,
13061314
},
13071315
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
13081316
)
@@ -1418,6 +1426,7 @@ async def test_raw_response_create(self, async_client: AsyncPromptFoundry) -> No
14181426
"seed": 0,
14191427
"tool_choice": "string",
14201428
"stream": True,
1429+
"parallel_tool_calls": True,
14211430
},
14221431
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
14231432
)
@@ -1537,6 +1546,7 @@ async def test_streaming_response_create(self, async_client: AsyncPromptFoundry)
15371546
"seed": 0,
15381547
"tool_choice": "string",
15391548
"stream": True,
1549+
"parallel_tool_calls": True,
15401550
},
15411551
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
15421552
) as response:
@@ -1659,6 +1669,7 @@ async def test_method_update(self, async_client: AsyncPromptFoundry) -> None:
16591669
"seed": 0,
16601670
"tool_choice": "string",
16611671
"stream": True,
1672+
"parallel_tool_calls": True,
16621673
},
16631674
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
16641675
)
@@ -1775,6 +1786,7 @@ async def test_raw_response_update(self, async_client: AsyncPromptFoundry) -> No
17751786
"seed": 0,
17761787
"tool_choice": "string",
17771788
"stream": True,
1789+
"parallel_tool_calls": True,
17781790
},
17791791
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
17801792
)
@@ -1895,6 +1907,7 @@ async def test_streaming_response_update(self, async_client: AsyncPromptFoundry)
18951907
"seed": 0,
18961908
"tool_choice": "string",
18971909
"stream": True,
1910+
"parallel_tool_calls": True,
18981911
},
18991912
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
19001913
) as response:
@@ -2018,6 +2031,7 @@ async def test_path_params_update(self, async_client: AsyncPromptFoundry) -> Non
20182031
"seed": 0,
20192032
"tool_choice": "string",
20202033
"stream": True,
2034+
"parallel_tool_calls": True,
20212035
},
20222036
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
20232037
)

0 commit comments

Comments
 (0)