Skip to content

Commit f4194ed

Browse files
author
Stainless Bot
committed
feat(api): OpenAPI spec update via Stainless API (#167)
1 parent c46a704 commit f4194ed

File tree

3 files changed

+23
-23
lines changed

3 files changed

+23
-23
lines changed

.github/workflows/release-doctor.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
name: Release Doctor
22
on:
33
pull_request:
4+
branches:
5+
- main
46
workflow_dispatch:
57

68
jobs:

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-0042044f00457ff0bf65c07207eea291e4df838e2bdab4dfc602eec8d3517c42.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-441451c27073e45d1bdc832c5b66c26d90bd185bd94bd461b91257fbf0987ef2.yml

README.md

Lines changed: 20 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -31,32 +31,30 @@ npm install openai
3131
Import the OpenAI and Prompt Foundry SDKs
3232

3333
```js
34-
import PromptFoundry from "@prompt-foundry/typescript-sdk";
35-
import { Configuration, OpenAIApi } from "openai";
34+
import PromptFoundry from '@prompt-foundry/typescript-sdk';
35+
import { Configuration, OpenAIApi } from 'openai';
3636

3737
// Initialize Prompt Foundry SDK with your API key
3838
const promptFoundry = new PromptFoundry({
39-
apiKey: process.env["PROMPT_FOUNDRY_API_KEY"],
39+
apiKey: process.env['PROMPT_FOUNDRY_API_KEY'],
4040
});
4141

4242
// Initialize OpenAI SDK with your API key
4343
const configuration = new Configuration({
44-
apiKey: process.env["OPENAI_API_KEY"],
44+
apiKey: process.env['OPENAI_API_KEY'],
4545
});
4646
const openai = new OpenAIApi(configuration);
4747

4848
async function main() {
4949
// Retrieve model parameters for the prompt
50-
const modelParameters = await promptFoundry.prompts.getParameters("1212121", {
51-
variables: { hello: "world" },
50+
const modelParameters = await promptFoundry.prompts.getParameters('1212121', {
51+
variables: { hello: 'world' },
5252
});
5353

5454
// check if provider is Open AI
55-
if (modelParameters.provider === "openai") {
55+
if (modelParameters.provider === 'openai') {
5656
// Use the retrieved parameters to create a chat completion request
57-
const modelResponse = await openai.chat.completions.create(
58-
modelParameters.parameters
59-
);
57+
const modelResponse = await openai.chat.completions.create(modelParameters.parameters);
6058

6159
// Print the response from OpenAI
6260
console.log(modelResponse.data);
@@ -77,27 +75,27 @@ npm install @anthropic-ai/sdk
7775
Import the Anthropic and Prompt Foundry SDKs
7876

7977
```js
80-
import PromptFoundry from "@prompt-foundry/typescript-sdk";
81-
import Anthropic from "@anthropic-ai/sdk";
78+
import PromptFoundry from '@prompt-foundry/typescript-sdk';
79+
import Anthropic from '@anthropic-ai/sdk';
8280

8381
// Initialize Prompt Foundry SDK with your API key
8482
const promptFoundry = new PromptFoundry({
85-
apiKey: process.env["PROMPT_FOUNDRY_API_KEY"],
83+
apiKey: process.env['PROMPT_FOUNDRY_API_KEY'],
8684
});
8785

8886
// Initialize Anthropic SDK with your API key
8987
const anthropic = new Anthropic({
90-
apiKey: process.env["ANTHROPIC_API_KEY"],
88+
apiKey: process.env['ANTHROPIC_API_KEY'],
9189
});
9290

9391
async function main() {
9492
// Retrieve model parameters for the prompt
95-
const modelParameters = await promptFoundry.prompts.getParameters("1212121", {
96-
variables: { hello: "world" },
93+
const modelParameters = await promptFoundry.prompts.getParameters('1212121', {
94+
variables: { hello: 'world' },
9795
});
9896

9997
// check if provider is Open AI
100-
if (modelParameters.provider === "anthropic") {
98+
if (modelParameters.provider === 'anthropic') {
10199
// Use the retrieved parameters to create a chat completion request
102100
const message = await anthropic.messages.create(modelParameters.parameters);
103101

@@ -117,7 +115,7 @@ This library includes TypeScript definitions for all request params and response
117115
```ts
118116
import PromptFoundry from '@prompt-foundry/typescript-sdk';
119117

120-
const promptFoundry = new PromptFoundry({
118+
const client = new PromptFoundry({
121119
apiKey: process.env['PROMPT_FOUNDRY_API_KEY'], // This is the default and can be omitted
122120
});
123121

@@ -177,7 +175,7 @@ You can use the `maxRetries` option to configure or disable this:
177175
<!-- prettier-ignore -->
178176
```js
179177
// Configure the default for all requests:
180-
const promptFoundry = new PromptFoundry({
178+
const client = new PromptFoundry({
181179
maxRetries: 0, // default is 2
182180
});
183181

@@ -194,7 +192,7 @@ Requests time out after 1 minute by default. You can configure this with a `time
194192
<!-- prettier-ignore -->
195193
```ts
196194
// Configure the default for all requests:
197-
const promptFoundry = new PromptFoundry({
195+
const client = new PromptFoundry({
198196
timeout: 20 * 1000, // 20 seconds (default is 1 minute)
199197
});
200198

@@ -218,7 +216,7 @@ You can also use the `.withResponse()` method to get the raw `Response` along wi
218216

219217
<!-- prettier-ignore -->
220218
```ts
221-
const promptFoundry = new PromptFoundry();
219+
const client = new PromptFoundry();
222220

223221
const response = await promptFoundry.prompts.getParameters('1212121').asResponse();
224222
console.log(response.headers.get('X-My-Header'));
@@ -327,7 +325,7 @@ import http from 'http';
327325
import { HttpsProxyAgent } from 'https-proxy-agent';
328326

329327
// Configure the default for all requests:
330-
const promptFoundry = new PromptFoundry({
328+
const client = new PromptFoundry({
331329
httpAgent: new HttpsProxyAgent(process.env.PROXY_URL),
332330
});
333331

0 commit comments

Comments
 (0)