Skip to content

Commit

Permalink
Merge pull request #19 from prompt-foundry/release-please--branches--…
Browse files Browse the repository at this point in the history
…main--changes--next--components--typescript-sdk

release: 1.3.0
  • Loading branch information
anthonyjgrove authored Jun 13, 2024
2 parents c6cbf8f + 165a9ae commit d8983ac
Show file tree
Hide file tree
Showing 8 changed files with 220 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "1.2.0"
".": "1.3.0"
}
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 21
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-265943fe7b3601e5408bb1937caf6d9de132f59dae77150042441ff4896d9a73.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-0b67f25d6d4fdd34057ac5f57434d488db64fe65f0b2c4729b97232c64927b13.yml
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# Changelog

## 1.3.0 (2024-06-13)

Full Changelog: [v1.2.0...v1.3.0](https://github.com/prompt-foundry/typescript-sdk/compare/v1.2.0...v1.3.0)

### Features

* **api:** OpenAPI spec update via Stainless API ([#18](https://github.com/prompt-foundry/typescript-sdk/issues/18)) ([8f9f9f4](https://github.com/prompt-foundry/typescript-sdk/commit/8f9f9f443597e62eba64eb22b38d0a998b3224cf))
* **api:** OpenAPI spec update via Stainless API ([#20](https://github.com/prompt-foundry/typescript-sdk/issues/20)) ([44e6687](https://github.com/prompt-foundry/typescript-sdk/commit/44e6687efc7d44caeca0eb279a5f9d22f4e95167))
* **api:** update via SDK Studio ([#21](https://github.com/prompt-foundry/typescript-sdk/issues/21)) ([73e5840](https://github.com/prompt-foundry/typescript-sdk/commit/73e584009e745522f6c749e6841235de4d902022))

## 1.2.0 (2024-06-13)

Full Changelog: [v1.1.0...v1.2.0](https://github.com/prompt-foundry/typescript-sdk/compare/v1.1.0...v1.2.0)
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@prompt-foundry/typescript-sdk",
"version": "1.2.0",
"version": "1.3.0",
"description": "The official TypeScript library for the Prompt Foundry API",
"author": "Prompt Foundry <[email protected]>",
"types": "dist/index.d.ts",
Expand Down
4 changes: 2 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ export class PromptFoundry extends Core.APIClient {
* API Client for interfacing with the Prompt Foundry API.
*
* @param {string | undefined} [opts.apiKey=process.env['PROMPT_FOUNDRY_API_KEY'] ?? undefined]
* @param {string} [opts.baseURL=process.env['PROMPT_FOUNDRY_BASE_URL'] ?? https://api.promptfoundry.ai/sdk/v1] - Override the default base URL for the API.
* @param {string} [opts.baseURL=process.env['PROMPT_FOUNDRY_BASE_URL'] ?? https://api.promptfoundry.ai] - Override the default base URL for the API.
* @param {number} [opts.timeout=1 minute] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
* @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections.
* @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
Expand All @@ -101,7 +101,7 @@ export class PromptFoundry extends Core.APIClient {
const options: ClientOptions = {
apiKey,
...opts,
baseURL: baseURL || `https://api.promptfoundry.ai/sdk/v1`,
baseURL: baseURL || `https://api.promptfoundry.ai`,
};

if (Core.isRunningInBrowser()) {
Expand Down
215 changes: 202 additions & 13 deletions src/resources/prompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,19 +70,23 @@ export class Prompts extends APIResource {
}

export interface ModelParameters {
parameters: ModelParameters.Parameters;
name: string;

parameters:
| ModelParameters.OpenAICreateCompletionNonStreamingRequest
| ModelParameters.OpenAICreateCompletionStreamingRequest;

provider: 'openai';
}

export namespace ModelParameters {
export interface Parameters {
export interface OpenAICreateCompletionNonStreamingRequest {
messages: Array<
| Parameters.OpenAIChatCompletionRequestSystemMessage
| Parameters.OpenAIChatCompletionRequestUserMessage
| Parameters.OpenAIChatCompletionRequestAssistantMessage
| Parameters.OpenAIChatCompletionRequestToolMessage
| Parameters.OpenAIChatCompletionRequestFunctionMessage
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestSystemMessage
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestUserMessage
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestAssistantMessage
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestToolMessage
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestFunctionMessage
>;

model: string;
Expand All @@ -101,21 +105,206 @@ export namespace ModelParameters {

presence_penalty?: number | null;

response_format?: Parameters.ResponseFormat;
response_format?: OpenAICreateCompletionNonStreamingRequest.ResponseFormat;

seed?: number | null;

stop?: string | Array<string>;

stream?: boolean | null;
stream?: false | null;

stream_options?: unknown | null;

temperature?: number | null;

tool_choice?:
| 'none'
| 'auto'
| 'required'
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionNamedToolChoice;

tools?: Array<OpenAICreateCompletionNonStreamingRequest.Tool>;

top_logprobs?: number | null;

top_p?: number | null;

user?: string;
}

export namespace OpenAICreateCompletionNonStreamingRequest {
export interface OpenAIChatCompletionRequestSystemMessage {
content: string;

role: 'system';

name?: string;
}

export interface OpenAIChatCompletionRequestUserMessage {
content:
| string
| Array<
| OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartText
| OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartImage
>;

role: 'user';

name?: string;
}

export namespace OpenAIChatCompletionRequestUserMessage {
export interface OpenAIChatCompletionRequestMessageContentPartText {
text: string;

type: 'text';
}

export interface OpenAIChatCompletionRequestMessageContentPartImage {
image_url: OpenAIChatCompletionRequestMessageContentPartImage.ImageURL;

type: 'image_url';
}

export namespace OpenAIChatCompletionRequestMessageContentPartImage {
export interface ImageURL {
url: string;

detail?: 'auto' | 'low' | 'high';
}
}
}

export interface OpenAIChatCompletionRequestAssistantMessage {
role: 'assistant';

content?: string | null;

function_call?: OpenAIChatCompletionRequestAssistantMessage.FunctionCall | null;

name?: string;

tool_calls?: Array<OpenAIChatCompletionRequestAssistantMessage.ToolCall>;
}

export namespace OpenAIChatCompletionRequestAssistantMessage {
export interface FunctionCall {
arguments: string;

name: string;
}

export interface ToolCall {
id: string;

function: ToolCall.Function;

type: 'function';
}

export namespace ToolCall {
export interface Function {
arguments: string;

name: string;
}
}
}

export interface OpenAIChatCompletionRequestToolMessage {
content: string;

role: 'tool';

tool_call_id: string;
}

export interface OpenAIChatCompletionRequestFunctionMessage {
content: string | null;

name: string;

role: 'function';
}

export interface ResponseFormat {
type?: 'text' | 'json_object';
}

export interface OpenAIChatCompletionNamedToolChoice {
function: OpenAIChatCompletionNamedToolChoice.Function;

type: 'function';
}

export namespace OpenAIChatCompletionNamedToolChoice {
export interface Function {
name: string;
}
}

export interface Tool {
function: Tool.Function;

type: 'function';
}

export namespace Tool {
export interface Function {
name: string;

description?: string;

parameters?: Record<string, unknown>;
}
}
}

export interface OpenAICreateCompletionStreamingRequest {
messages: Array<
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestSystemMessage
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestUserMessage
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestAssistantMessage
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestToolMessage
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestFunctionMessage
>;

model: string;

stream: true;

frequency_penalty?: number | null;

logit_bias?: Record<string, number> | null;

logprobs?: boolean | null;

max_tokens?: number | null;

n?: number | null;

parallel_tool_calls?: boolean;

presence_penalty?: number | null;

response_format?: OpenAICreateCompletionStreamingRequest.ResponseFormat;

seed?: number | null;

stop?: string | Array<string>;

stream_options?: Parameters.StreamOptions | null;
stream_options?: OpenAICreateCompletionStreamingRequest.StreamOptions | null;

temperature?: number | null;

tool_choice?: 'none' | 'auto' | 'required' | Parameters.OpenAIChatCompletionNamedToolChoice;
tool_choice?:
| 'none'
| 'auto'
| 'required'
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionNamedToolChoice;

tools?: Array<Parameters.Tool>;
tools?: Array<OpenAICreateCompletionStreamingRequest.Tool>;

top_logprobs?: number | null;

Expand All @@ -124,7 +313,7 @@ export namespace ModelParameters {
user?: string;
}

export namespace Parameters {
export namespace OpenAICreateCompletionStreamingRequest {
export interface OpenAIChatCompletionRequestSystemMessage {
content: string;

Expand Down
2 changes: 1 addition & 1 deletion src/version.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export const VERSION = '1.2.0'; // x-release-please-version
export const VERSION = '1.3.0'; // x-release-please-version
4 changes: 2 additions & 2 deletions tests/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -157,13 +157,13 @@ describe('instantiate client', () => {
test('empty env variable', () => {
process.env['PROMPT_FOUNDRY_BASE_URL'] = ''; // empty
const client = new PromptFoundry({ apiKey: 'My API Key' });
expect(client.baseURL).toEqual('https://api.promptfoundry.ai/sdk/v1');
expect(client.baseURL).toEqual('https://api.promptfoundry.ai');
});

test('blank env variable', () => {
process.env['PROMPT_FOUNDRY_BASE_URL'] = ' '; // blank
const client = new PromptFoundry({ apiKey: 'My API Key' });
expect(client.baseURL).toEqual('https://api.promptfoundry.ai/sdk/v1');
expect(client.baseURL).toEqual('https://api.promptfoundry.ai');
});
});

Expand Down

0 comments on commit d8983ac

Please sign in to comment.