Skip to content

Commit fedb55e

Browse files
stainless-app[bot]Stainless Bot
authored andcommitted
feat(api): OpenAPI spec update via Stainless API (#226)
1 parent 415b23b commit fedb55e

13 files changed

+91
-84
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 22
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-9cff8ea13f14bd0899df69243fe78b4f88d4d0172263aa260af1ea66a7d0484e.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-6c87a6d2f0a1447fab78657f8b44e2d1ea2c282d2c9f92458bcd25f543944c6e.yml

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,7 @@ response = client.completion.with_raw_response.create(
339339
print(response.headers.get('X-My-Header'))
340340

341341
completion = response.parse() # get the object that `completion.create()` would have returned
342-
print(completion.message)
342+
print(completion.provider)
343343
```
344344

345345
These methods return an [`APIResponse`](https://github.com/prompt-foundry/python-sdk/tree/main/src/prompt_foundry_python_sdk/_response.py) object.

src/prompt_foundry_python_sdk/resources/prompts.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -230,9 +230,10 @@ def get_parameters(
230230
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
231231
) -> ModelParameters:
232232
"""
233-
Fetches the model configuration parameters for a specified prompt, including
234-
penalty settings, response format, and the model messages rendered with the
235-
given variables mapped to the set LLM provider.
233+
Fetches the configured model parameters and messages rendered with the provided
234+
variables mapped to the set LLM provider. This endpoint abstracts the need to
235+
handle mapping between different providers, while still allowing direct calls to
236+
the providers.
236237
237238
Args:
238239
append_messages: Appended the the end of the configured prompt messages before running the
@@ -477,9 +478,10 @@ async def get_parameters(
477478
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
478479
) -> ModelParameters:
479480
"""
480-
Fetches the model configuration parameters for a specified prompt, including
481-
penalty settings, response format, and the model messages rendered with the
482-
given variables mapped to the set LLM provider.
481+
Fetches the configured model parameters and messages rendered with the provided
482+
variables mapped to the set LLM provider. This endpoint abstracts the need to
483+
handle mapping between different providers, while still allowing direct calls to
484+
the providers.
483485
484486
Args:
485487
append_messages: Appended the the end of the configured prompt messages before running the

src/prompt_foundry_python_sdk/types/completion_create_params.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,15 @@
1111
"CompletionCreateParams",
1212
"AppendMessage",
1313
"AppendMessageContent",
14-
"AppendMessageContentTextContentBlockSchema",
14+
"AppendMessageContentTextContentBlock",
1515
"AppendMessageContentImageBase64ContentBlock",
1616
"AppendMessageContentToolCallContentBlock",
1717
"AppendMessageContentToolCallContentBlockToolCall",
1818
"AppendMessageContentToolCallContentBlockToolCallFunction",
1919
"AppendMessageContentToolResultContentBlock",
2020
"OverrideMessage",
2121
"OverrideMessageContent",
22-
"OverrideMessageContentTextContentBlockSchema",
22+
"OverrideMessageContentTextContentBlock",
2323
"OverrideMessageContentImageBase64ContentBlock",
2424
"OverrideMessageContentToolCallContentBlock",
2525
"OverrideMessageContentToolCallContentBlockToolCall",
@@ -48,7 +48,7 @@ class CompletionCreateParams(TypedDict, total=False):
4848
"""The template variables added to the prompt when executing the prompt."""
4949

5050

51-
class AppendMessageContentTextContentBlockSchema(TypedDict, total=False):
51+
class AppendMessageContentTextContentBlock(TypedDict, total=False):
5252
text: Required[str]
5353

5454
type: Required[Literal["TEXT"]]
@@ -100,7 +100,7 @@ class AppendMessageContentToolResultContentBlock(TypedDict, total=False):
100100

101101

102102
AppendMessageContent: TypeAlias = Union[
103-
AppendMessageContentTextContentBlockSchema,
103+
AppendMessageContentTextContentBlock,
104104
AppendMessageContentImageBase64ContentBlock,
105105
AppendMessageContentToolCallContentBlock,
106106
AppendMessageContentToolResultContentBlock,
@@ -113,7 +113,7 @@ class AppendMessage(TypedDict, total=False):
113113
role: Required[Literal["assistant", "system", "tool", "user"]]
114114

115115

116-
class OverrideMessageContentTextContentBlockSchema(TypedDict, total=False):
116+
class OverrideMessageContentTextContentBlock(TypedDict, total=False):
117117
text: Required[str]
118118

119119
type: Required[Literal["TEXT"]]
@@ -165,7 +165,7 @@ class OverrideMessageContentToolResultContentBlock(TypedDict, total=False):
165165

166166

167167
OverrideMessageContent: TypeAlias = Union[
168-
OverrideMessageContentTextContentBlockSchema,
168+
OverrideMessageContentTextContentBlock,
169169
OverrideMessageContentImageBase64ContentBlock,
170170
OverrideMessageContentToolCallContentBlock,
171171
OverrideMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/completion_create_response.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"CompletionCreateResponse",
1313
"Message",
1414
"MessageContent",
15-
"MessageContentTextContentBlockSchema",
15+
"MessageContentTextContentBlock",
1616
"MessageContentImageBase64ContentBlock",
1717
"MessageContentToolCallContentBlock",
1818
"MessageContentToolCallContentBlockToolCall",
@@ -22,7 +22,7 @@
2222
]
2323

2424

25-
class MessageContentTextContentBlockSchema(BaseModel):
25+
class MessageContentTextContentBlock(BaseModel):
2626
text: str
2727

2828
type: Literal["TEXT"]
@@ -75,7 +75,7 @@ class MessageContentToolResultContentBlock(BaseModel):
7575

7676
MessageContent: TypeAlias = Annotated[
7777
Union[
78-
MessageContentTextContentBlockSchema,
78+
MessageContentTextContentBlock,
7979
MessageContentImageBase64ContentBlock,
8080
MessageContentToolCallContentBlock,
8181
MessageContentToolResultContentBlock,
@@ -108,4 +108,9 @@ class CompletionCreateResponse(BaseModel):
108108
message: Message
109109
"""The completion message generated by the model."""
110110

111+
name: str
112+
113+
provider: Literal["ANTHROPIC", "OPENAI"]
114+
"""The LLM model provider."""
115+
111116
stats: Stats

src/prompt_foundry_python_sdk/types/evaluation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"Evaluation",
1313
"AppendedMessage",
1414
"AppendedMessageContent",
15-
"AppendedMessageContentTextContentBlockSchema",
15+
"AppendedMessageContentTextContentBlock",
1616
"AppendedMessageContentImageBase64ContentBlock",
1717
"AppendedMessageContentToolCallContentBlock",
1818
"AppendedMessageContentToolCallContentBlockToolCall",
@@ -21,7 +21,7 @@
2121
]
2222

2323

24-
class AppendedMessageContentTextContentBlockSchema(BaseModel):
24+
class AppendedMessageContentTextContentBlock(BaseModel):
2525
text: str
2626

2727
type: Literal["TEXT"]
@@ -74,7 +74,7 @@ class AppendedMessageContentToolResultContentBlock(BaseModel):
7474

7575
AppendedMessageContent: TypeAlias = Annotated[
7676
Union[
77-
AppendedMessageContentTextContentBlockSchema,
77+
AppendedMessageContentTextContentBlock,
7878
AppendedMessageContentImageBase64ContentBlock,
7979
AppendedMessageContentToolCallContentBlock,
8080
AppendedMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/evaluation_create_params.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"EvaluationCreateParams",
1212
"AppendedMessage",
1313
"AppendedMessageContent",
14-
"AppendedMessageContentTextContentBlockSchema",
14+
"AppendedMessageContentTextContentBlock",
1515
"AppendedMessageContentImageBase64ContentBlock",
1616
"AppendedMessageContentToolCallContentBlock",
1717
"AppendedMessageContentToolCallContentBlockToolCall",
@@ -36,7 +36,7 @@ class EvaluationCreateParams(TypedDict, total=False):
3636
"""How heavily to weigh the evaluation within the prompt."""
3737

3838

39-
class AppendedMessageContentTextContentBlockSchema(TypedDict, total=False):
39+
class AppendedMessageContentTextContentBlock(TypedDict, total=False):
4040
text: Required[str]
4141

4242
type: Required[Literal["TEXT"]]
@@ -88,7 +88,7 @@ class AppendedMessageContentToolResultContentBlock(TypedDict, total=False):
8888

8989

9090
AppendedMessageContent: TypeAlias = Union[
91-
AppendedMessageContentTextContentBlockSchema,
91+
AppendedMessageContentTextContentBlock,
9292
AppendedMessageContentImageBase64ContentBlock,
9393
AppendedMessageContentToolCallContentBlock,
9494
AppendedMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/evaluation_update_params.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"EvaluationUpdateParams",
1212
"AppendedMessage",
1313
"AppendedMessageContent",
14-
"AppendedMessageContentTextContentBlockSchema",
14+
"AppendedMessageContentTextContentBlock",
1515
"AppendedMessageContentImageBase64ContentBlock",
1616
"AppendedMessageContentToolCallContentBlock",
1717
"AppendedMessageContentToolCallContentBlockToolCall",
@@ -36,7 +36,7 @@ class EvaluationUpdateParams(TypedDict, total=False):
3636
"""How heavily to weigh the evaluation within the prompt."""
3737

3838

39-
class AppendedMessageContentTextContentBlockSchema(TypedDict, total=False):
39+
class AppendedMessageContentTextContentBlock(TypedDict, total=False):
4040
text: Required[str]
4141

4242
type: Required[Literal["TEXT"]]
@@ -88,7 +88,7 @@ class AppendedMessageContentToolResultContentBlock(TypedDict, total=False):
8888

8989

9090
AppendedMessageContent: TypeAlias = Union[
91-
AppendedMessageContentTextContentBlockSchema,
91+
AppendedMessageContentTextContentBlock,
9292
AppendedMessageContentImageBase64ContentBlock,
9393
AppendedMessageContentToolCallContentBlock,
9494
AppendedMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/prompt_configuration.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"PromptConfiguration",
1313
"Message",
1414
"MessageContent",
15-
"MessageContentTextContentBlockSchema",
15+
"MessageContentTextContentBlock",
1616
"MessageContentImageBase64ContentBlock",
1717
"MessageContentToolCallContentBlock",
1818
"MessageContentToolCallContentBlockToolCall",
@@ -23,7 +23,7 @@
2323
]
2424

2525

26-
class MessageContentTextContentBlockSchema(BaseModel):
26+
class MessageContentTextContentBlock(BaseModel):
2727
text: str
2828

2929
type: Literal["TEXT"]
@@ -76,7 +76,7 @@ class MessageContentToolResultContentBlock(BaseModel):
7676

7777
MessageContent: TypeAlias = Annotated[
7878
Union[
79-
MessageContentTextContentBlockSchema,
79+
MessageContentTextContentBlock,
8080
MessageContentImageBase64ContentBlock,
8181
MessageContentToolCallContentBlock,
8282
MessageContentToolResultContentBlock,
@@ -98,17 +98,17 @@ class Parameters(BaseModel):
9898
max_tokens: Optional[float] = FieldInfo(alias="maxTokens", default=None)
9999
"""Example: 100"""
100100

101-
api_model_name: str = FieldInfo(alias="modelName")
102-
"""Example: "gpt-3.5-turbo" """
103-
104-
api_model_provider: Literal["ANTHROPIC", "OPENAI"] = FieldInfo(alias="modelProvider")
105-
"""The provider of the provided model."""
101+
name: str
102+
"""The name of the model for the provider."""
106103

107104
parallel_tool_calls: bool = FieldInfo(alias="parallelToolCalls")
108105

109106
presence_penalty: float = FieldInfo(alias="presencePenalty")
110107
"""Example: 0"""
111108

109+
provider: Literal["ANTHROPIC", "OPENAI"]
110+
"""The LLM model provider."""
111+
112112
response_format: Literal["JSON", "TEXT"] = FieldInfo(alias="responseFormat")
113113
"""Example: PromptResponseFormat.TEXT"""
114114

src/prompt_foundry_python_sdk/types/prompt_create_params.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"PromptCreateParams",
1212
"Message",
1313
"MessageContent",
14-
"MessageContentTextContentBlockSchema",
14+
"MessageContentTextContentBlock",
1515
"MessageContentImageBase64ContentBlock",
1616
"MessageContentToolCallContentBlock",
1717
"MessageContentToolCallContentBlockToolCall",
@@ -32,7 +32,7 @@ class PromptCreateParams(TypedDict, total=False):
3232
tools: Required[Iterable[Tool]]
3333

3434

35-
class MessageContentTextContentBlockSchema(TypedDict, total=False):
35+
class MessageContentTextContentBlock(TypedDict, total=False):
3636
text: Required[str]
3737

3838
type: Required[Literal["TEXT"]]
@@ -84,7 +84,7 @@ class MessageContentToolResultContentBlock(TypedDict, total=False):
8484

8585

8686
MessageContent: TypeAlias = Union[
87-
MessageContentTextContentBlockSchema,
87+
MessageContentTextContentBlock,
8888
MessageContentImageBase64ContentBlock,
8989
MessageContentToolCallContentBlock,
9090
MessageContentToolResultContentBlock,
@@ -106,17 +106,17 @@ class Parameters(TypedDict, total=False):
106106
max_tokens: Required[Annotated[Optional[float], PropertyInfo(alias="maxTokens")]]
107107
"""Example: 100"""
108108

109-
model_name: Required[Annotated[str, PropertyInfo(alias="modelName")]]
110-
"""Example: "gpt-3.5-turbo" """
111-
112-
model_provider: Required[Annotated[Literal["ANTHROPIC", "OPENAI"], PropertyInfo(alias="modelProvider")]]
113-
"""The provider of the provided model."""
109+
name: Required[str]
110+
"""The name of the model for the provider."""
114111

115112
parallel_tool_calls: Required[Annotated[bool, PropertyInfo(alias="parallelToolCalls")]]
116113

117114
presence_penalty: Required[Annotated[float, PropertyInfo(alias="presencePenalty")]]
118115
"""Example: 0"""
119116

117+
provider: Required[Literal["ANTHROPIC", "OPENAI"]]
118+
"""The LLM model provider."""
119+
120120
response_format: Required[Annotated[Literal["JSON", "TEXT"], PropertyInfo(alias="responseFormat")]]
121121
"""Example: PromptResponseFormat.TEXT"""
122122

0 commit comments

Comments
 (0)