Skip to content

Commit 7af0da4

Browse files
feat(api): OpenAPI spec update via Stainless API (#102)
1 parent d1a859c commit 7af0da4

File tree

5 files changed

+24
-1
lines changed

5 files changed

+24
-1
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-a9e988c1c50fb5eeb661ffe2eac4481591a946f668695afb91d52cb455adacf3.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-fc846baf5f12d27d0e53b795bb0f0d62b4e0296728ae26e637aef857fcbcc6f9.yml

src/prompt_foundry_python_sdk/types/prompt_configuration.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,9 @@ class Parameters(BaseModel):
5454
api_model_name: str = FieldInfo(alias="modelName")
5555
"""Example: "gpt-3.5-turbo" """
5656

57+
api_model_provider: Literal["OPENAI"] = FieldInfo(alias="modelProvider")
58+
"""The provider of the provided model."""
59+
5760
parallel_tool_calls: bool = FieldInfo(alias="parallelToolCalls")
5861

5962
presence_penalty: float = FieldInfo(alias="presencePenalty")

src/prompt_foundry_python_sdk/types/prompt_create_params.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ class Parameters(TypedDict, total=False):
6464
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
6565
"""Example: "gpt-3.5-turbo" """
6666

67+
model_provider: Required[Annotated[Literal["OPENAI"], PropertyInfo(alias="modelProvider")]]
68+
"""The provider of the provided model."""
69+
6770
parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]
6871

6972
presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]

src/prompt_foundry_python_sdk/types/prompt_update_params.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ class Parameters(TypedDict, total=False):
6464
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
6565
"""Example: "gpt-3.5-turbo" """
6666

67+
model_provider: Required[Annotated[Literal["OPENAI"], PropertyInfo(alias="modelProvider")]]
68+
"""The provider of the provided model."""
69+
6770
parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]
6871

6972
presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]

tests/api_resources/test_prompts.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,7 @@ def test_method_create(self, client: PromptFoundry) -> None:
122122
],
123123
name="string",
124124
parameters={
125+
"model_provider": "OPENAI",
125126
"model_name": "string",
126127
"response_format": "JSON",
127128
"temperature": 0,
@@ -238,6 +239,7 @@ def test_raw_response_create(self, client: PromptFoundry) -> None:
238239
],
239240
name="string",
240241
parameters={
242+
"model_provider": "OPENAI",
241243
"model_name": "string",
242244
"response_format": "JSON",
243245
"temperature": 0,
@@ -358,6 +360,7 @@ def test_streaming_response_create(self, client: PromptFoundry) -> None:
358360
],
359361
name="string",
360362
parameters={
363+
"model_provider": "OPENAI",
361364
"model_name": "string",
362365
"response_format": "JSON",
363366
"temperature": 0,
@@ -481,6 +484,7 @@ def test_method_update(self, client: PromptFoundry) -> None:
481484
],
482485
name="string",
483486
parameters={
487+
"model_provider": "OPENAI",
484488
"model_name": "string",
485489
"response_format": "JSON",
486490
"temperature": 0,
@@ -598,6 +602,7 @@ def test_raw_response_update(self, client: PromptFoundry) -> None:
598602
],
599603
name="string",
600604
parameters={
605+
"model_provider": "OPENAI",
601606
"model_name": "string",
602607
"response_format": "JSON",
603608
"temperature": 0,
@@ -719,6 +724,7 @@ def test_streaming_response_update(self, client: PromptFoundry) -> None:
719724
],
720725
name="string",
721726
parameters={
727+
"model_provider": "OPENAI",
722728
"model_name": "string",
723729
"response_format": "JSON",
724730
"temperature": 0,
@@ -843,6 +849,7 @@ def test_path_params_update(self, client: PromptFoundry) -> None:
843849
],
844850
name="string",
845851
parameters={
852+
"model_provider": "OPENAI",
846853
"model_name": "string",
847854
"response_format": "JSON",
848855
"temperature": 0,
@@ -1300,6 +1307,7 @@ async def test_method_create(self, async_client: AsyncPromptFoundry) -> None:
13001307
],
13011308
name="string",
13021309
parameters={
1310+
"model_provider": "OPENAI",
13031311
"model_name": "string",
13041312
"response_format": "JSON",
13051313
"temperature": 0,
@@ -1416,6 +1424,7 @@ async def test_raw_response_create(self, async_client: AsyncPromptFoundry) -> No
14161424
],
14171425
name="string",
14181426
parameters={
1427+
"model_provider": "OPENAI",
14191428
"model_name": "string",
14201429
"response_format": "JSON",
14211430
"temperature": 0,
@@ -1536,6 +1545,7 @@ async def test_streaming_response_create(self, async_client: AsyncPromptFoundry)
15361545
],
15371546
name="string",
15381547
parameters={
1548+
"model_provider": "OPENAI",
15391549
"model_name": "string",
15401550
"response_format": "JSON",
15411551
"temperature": 0,
@@ -1659,6 +1669,7 @@ async def test_method_update(self, async_client: AsyncPromptFoundry) -> None:
16591669
],
16601670
name="string",
16611671
parameters={
1672+
"model_provider": "OPENAI",
16621673
"model_name": "string",
16631674
"response_format": "JSON",
16641675
"temperature": 0,
@@ -1776,6 +1787,7 @@ async def test_raw_response_update(self, async_client: AsyncPromptFoundry) -> No
17761787
],
17771788
name="string",
17781789
parameters={
1790+
"model_provider": "OPENAI",
17791791
"model_name": "string",
17801792
"response_format": "JSON",
17811793
"temperature": 0,
@@ -1897,6 +1909,7 @@ async def test_streaming_response_update(self, async_client: AsyncPromptFoundry)
18971909
],
18981910
name="string",
18991911
parameters={
1912+
"model_provider": "OPENAI",
19001913
"model_name": "string",
19011914
"response_format": "JSON",
19021915
"temperature": 0,
@@ -2021,6 +2034,7 @@ async def test_path_params_update(self, async_client: AsyncPromptFoundry) -> Non
20212034
],
20222035
name="string",
20232036
parameters={
2037+
"model_provider": "OPENAI",
20242038
"model_name": "string",
20252039
"response_format": "JSON",
20262040
"temperature": 0,

0 commit comments

Comments
 (0)