Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 21
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-a9e988c1c50fb5eeb661ffe2eac4481591a946f668695afb91d52cb455adacf3.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-fc846baf5f12d27d0e53b795bb0f0d62b4e0296728ae26e637aef857fcbcc6f9.yml
3 changes: 3 additions & 0 deletions src/prompt_foundry_python_sdk/types/prompt_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ class Parameters(BaseModel):
api_model_name: str = FieldInfo(alias="modelName")
"""Example: "gpt-3.5-turbo" """

api_model_provider: Literal["OPENAI"] = FieldInfo(alias="modelProvider")
"""The provider of the provided model."""

parallel_tool_calls: bool = FieldInfo(alias="parallelToolCalls")

presence_penalty: float = FieldInfo(alias="presencePenalty")
Expand Down
3 changes: 3 additions & 0 deletions src/prompt_foundry_python_sdk/types/prompt_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ class Parameters(TypedDict, total=False):
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
"""Example: "gpt-3.5-turbo" """

model_provider: Required[Annotated[Literal["OPENAI"], PropertyInfo(alias="modelProvider")]]
"""The provider of the provided model."""

parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]

presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]
Expand Down
3 changes: 3 additions & 0 deletions src/prompt_foundry_python_sdk/types/prompt_update_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ class Parameters(TypedDict, total=False):
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
"""Example: "gpt-3.5-turbo" """

model_provider: Required[Annotated[Literal["OPENAI"], PropertyInfo(alias="modelProvider")]]
"""The provider of the provided model."""

parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]

presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]
Expand Down
14 changes: 14 additions & 0 deletions tests/api_resources/test_prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ def test_method_create(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -238,6 +239,7 @@ def test_raw_response_create(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -358,6 +360,7 @@ def test_streaming_response_create(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -481,6 +484,7 @@ def test_method_update(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -598,6 +602,7 @@ def test_raw_response_update(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -719,6 +724,7 @@ def test_streaming_response_update(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -843,6 +849,7 @@ def test_path_params_update(self, client: PromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -1300,6 +1307,7 @@ async def test_method_create(self, async_client: AsyncPromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -1416,6 +1424,7 @@ async def test_raw_response_create(self, async_client: AsyncPromptFoundry) -> No
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -1536,6 +1545,7 @@ async def test_streaming_response_create(self, async_client: AsyncPromptFoundry)
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -1659,6 +1669,7 @@ async def test_method_update(self, async_client: AsyncPromptFoundry) -> None:
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -1776,6 +1787,7 @@ async def test_raw_response_update(self, async_client: AsyncPromptFoundry) -> No
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -1897,6 +1909,7 @@ async def test_streaming_response_update(self, async_client: AsyncPromptFoundry)
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down Expand Up @@ -2021,6 +2034,7 @@ async def test_path_params_update(self, async_client: AsyncPromptFoundry) -> Non
],
name="string",
parameters={
"model_provider": "OPENAI",
"model_name": "string",
"response_format": "JSON",
"temperature": 0,
Expand Down