Skip to content

Commit

Permalink
feat: OpenAPI spec update via Stainless API
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-bot committed Mar 1, 2024
1 parent 6c7d674 commit e4c260b
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 57 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ jobs:


steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Set up Node
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: '18'

Expand Down
8 changes: 5 additions & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
This repository uses [`yarn@v1`](https://classic.yarnpkg.com/lang/en/docs/install/#mac-stable).
Other package managers may work but are not officially supported for development.

To setup the repository, run:
To set up the repository, run:

```bash
yarn
Expand Down Expand Up @@ -43,6 +43,8 @@ To install via git:

```bash
npm install --save git+ssh://git@github.com:groq/groq-typescript.git
# or
yarn add git+ssh://git@github.com:groq/groq-typescript.git
```

Alternatively, to link a local copy of the repo:
Expand All @@ -65,7 +67,7 @@ pnpm link -—global groq-sdk

## Running tests

Most tests will require you to [setup a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests.
Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests.

```bash
npx prism path/to/your/openapi.yml
Expand Down Expand Up @@ -99,7 +101,7 @@ the changes aren't made through the automated pipeline, you may want to make rel

### Publish with a GitHub workflow

You can release to package managers by using [the `Publish NPM` GitHub action](https://www.github.com/groq/groq-typescript/actions/workflows/publish-npm.yml). This will require a setup organization or repository secret to be set up.
You can release to package managers by using [the `Publish NPM` GitHub action](https://www.github.com/groq/groq-typescript/actions/workflows/publish-npm.yml). This requires a setup organization or repository secret to be set up.

### Publish manually

Expand Down
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ The REST API documentation can be found [on console.groq.com](https://console.gr
## Installation

```sh
# install from NPM
npm install --save groq-sdk
# or
yarn add groq-sdk
Expand Down Expand Up @@ -201,7 +202,7 @@ import Groq from 'groq-sdk';
```

To do the inverse, add `import "groq-sdk/shims/node"` (which does import polyfills).
This can also be useful if you are getting the wrong TypeScript types for `Response` more details [here](https://github.com/groq/groq-typescript/tree/main/src/_shims#readme).
This can also be useful if you are getting the wrong TypeScript types for `Response` ([more details](https://github.com/groq/groq-typescript/tree/main/src/_shims#readme)).

You may also provide a custom `fetch` function when instantiating the client,
which can be used to inspect or alter the `Request` or `Response` before/after each request:
Expand All @@ -211,7 +212,7 @@ import { fetch } from 'undici'; // as one example
import Groq from 'groq-sdk';

const client = new Groq({
fetch: async (url: RequestInfo, init?: RequestInfo): Promise<Response> => {
fetch: async (url: RequestInfo, init?: RequestInit): Promise<Response> => {
console.log('About to make a request', url, init);
const response = await fetch(url, init);
console.log('Got response', response);
Expand Down
16 changes: 0 additions & 16 deletions src/core.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { VERSION } from './version';
import { Stream } from './lib/streaming';
import {
GroqError,
APIError,
Expand Down Expand Up @@ -39,19 +38,6 @@ type APIResponseProps = {

async function defaultParseResponse<T>(props: APIResponseProps): Promise<T> {
const { response } = props;
if (props.options.stream) {
debug('response', response.status, response.url, response.headers, response.body);

// Note: there is an invariant here that isn't represented in the type system
// that if you set `stream: true` the response type must also be `Stream<T>`

if (props.options.__streamClass) {
return props.options.__streamClass.fromSSEResponse(response, props.controller) as any;
}

return Stream.fromSSEResponse(response, props.controller) as any;
}

// fetch refuses to read the body when the status code is 204.
if (response.status === 204) {
return null as T;
Expand Down Expand Up @@ -750,7 +736,6 @@ export type RequestOptions<Req = unknown | Record<string, unknown> | Readable> =
idempotencyKey?: string;

__binaryResponse?: boolean | undefined;
__streamClass?: typeof Stream;
};

// This is required so that we can determine if a given object matches the RequestOptions
Expand All @@ -771,7 +756,6 @@ const requestOptionsKeys: KeysEnum<RequestOptions> = {
idempotencyKey: true,

__binaryResponse: true,
__streamClass: true,
};

export const isRequestOptions = (obj: unknown): obj is RequestOptions => {
Expand Down
39 changes: 5 additions & 34 deletions src/resources/chat/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,32 +3,13 @@
import * as Core from 'groq-sdk/core';
import { APIResource } from 'groq-sdk/resource';
import * as CompletionsAPI from 'groq-sdk/resources/chat/completions';
import { Stream } from 'groq-sdk/lib/streaming';
import { ChatCompletionChunk } from 'groq-sdk/lib/chat_completions_ext';

export class Completions extends APIResource {
/**
* Creates a completion for a chat prompt
*/
create(
body: ChatCompletionCreateParamsNonStreaming,
options?: Core.RequestOptions,
): Core.APIPromise<ChatCompletion>;
create(
body: ChatCompletionCreateParamsStreaming,
options?: Core.RequestOptions,
): Core.APIPromise<Stream<ChatCompletionChunk>>;
create(
body: ChatCompletionCreateParamsBase,
options?: Core.RequestOptions,
): Core.APIPromise<Stream<ChatCompletionChunk> | ChatCompletion>;
create(
body: ChatCompletionCreateParams,
options?: Core.RequestOptions,
): Core.APIPromise<ChatCompletion> | Core.APIPromise<Stream<ChatCompletionChunk>> {
return this._client.post('/openai/v1/chat/completions', { body, ...options, stream: body.stream ?? false }) as
| Core.APIPromise<ChatCompletion>
| Core.APIPromise<Stream<ChatCompletionChunk>>;
create(body: CompletionCreateParams, options?: Core.RequestOptions): Core.APIPromise<ChatCompletion> {
return this._client.post('/openai/v1/chat/completions', { body, ...options });
}
}

Expand Down Expand Up @@ -122,13 +103,15 @@ export namespace ChatCompletion {

prompt_tokens?: number;

queue_time?: number;

total_time?: number;

total_tokens?: number;
}
}

export interface ChatCompletionCreateParamsBase {
export interface CompletionCreateParams {
messages: Array<CompletionCreateParams.Message>;

model: string;
Expand Down Expand Up @@ -252,15 +235,3 @@ export namespace Completions {
export import ChatCompletion = CompletionsAPI.ChatCompletion;
export import CompletionCreateParams = CompletionsAPI.CompletionCreateParams;
}

export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCreateParamsBase {
stream?: false;
}

export interface ChatCompletionCreateParamsStreaming extends ChatCompletionCreateParamsBase {
stream: true;
}

export type ChatCompletionCreateParams =
| ChatCompletionCreateParamsNonStreaming
| ChatCompletionCreateParamsStreaming;

0 comments on commit e4c260b

Please sign in to comment.