diff --git a/readme.md b/readme.md index e18eb0e..ef60d97 100644 --- a/readme.md +++ b/readme.md @@ -11,13 +11,13 @@ Unfortunately, the official [openai-node](https://github.com/openai/openai-node) - You want a fast and small client that doesn't patch fetch - Supports all envs with native fetch: Node 18+, browsers, Deno, Cloudflare Workers, etc - Package size: `openai-fetch` is [~14kb](https://bundlephobia.com/package/openai-fetch) and `openai` is [~142kb](https://bundlephobia.com/package/openai) -- You only need the chat, completions, and embeddings +- You only need the chat, completions, embeddings, and moderations ### Use `openai-node` if you need: - Your runtime doesn't have native fetch support - Your app can't handle native ESM code -- Endpoints other than chat, completions, and embeddings +- Endpoints other than chat, completions, embeddings, and moderations - Aren't concerned with lib size or fetch patching ## Install @@ -59,6 +59,9 @@ client.streamCompletion(params: CompletionStreamParams): Promise + +// Checks for potentially harmful content +client.createModeration(params: ModerationParams): Promise ``` ### Type Definitions diff --git a/src/openai-client.ts b/src/openai-client.ts index 38d02c0..0adfe37 100644 --- a/src/openai-client.ts +++ b/src/openai-client.ts @@ -13,6 +13,8 @@ import { type CompletionStreamResponse, type EmbeddingParams, type EmbeddingResponse, + type ModerationParams, + type ModerationResponse, } from './types.js'; export type ConfigOpts = { @@ -132,4 +134,15 @@ export class OpenAIClient { .json(); return response; } + + /** Given some input text, outputs if the model classifies it as potentially harmful across several categories. */ + async createModeration( + params: ModerationParams, + opts?: RequestOpts + ): Promise { + const response: OpenAI.ModerationCreateResponse = await this.getApi(opts) + .post('moderations', { json: params }) + .json(); + return response; + } } diff --git a/src/types.ts b/src/types.ts index 3f0a748..183915f 100644 --- a/src/types.ts +++ b/src/types.ts @@ -83,3 +83,6 @@ export type CompletionStreamResponse = ReadableStream; export type EmbeddingParams = OpenAI.EmbeddingCreateParams; export type EmbeddingResponse = OpenAI.CreateEmbeddingResponse; + +export type ModerationParams = OpenAI.ModerationCreateParams; +export type ModerationResponse = OpenAI.ModerationCreateResponse;