From 7f069507c7f8e6815fd02235d4d527633e02cd06 Mon Sep 17 00:00:00 2001 From: Rohit Khanduri Date: Sat, 11 May 2024 23:43:53 +0200 Subject: [PATCH] feat: constants Added constants and removed dangling strings for better code comprehensibility. --- README.md | 2 ++ src/browser.ts | 24 ++++++++++++++---------- src/constants/index.ts | 31 +++++++++++++++++++++++++++++++ src/index.ts | 30 +++++++++++++++++++----------- src/utils.ts | 15 ++++++++------- tsconfig.json | 6 +----- 6 files changed, 75 insertions(+), 33 deletions(-) create mode 100644 src/constants/index.ts diff --git a/README.md b/README.md index 76b688a..fa9b70d 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,9 @@ console.log(response.message.content) ``` ### Browser Usage + To use the library without node, import the browser module. + ```javascript import ollama from 'ollama/browser' ``` diff --git a/src/browser.ts b/src/browser.ts index 5e14536..7aba8d1 100644 --- a/src/browser.ts +++ b/src/browser.ts @@ -22,6 +22,7 @@ import type { ShowResponse, StatusResponse, } from './interfaces.js' +import { EMPTY_STRING, MESSAGES, OLLAMA_LOCAL_URL, REQUEST_CONSTANTS } from './constants' export class Ollama { protected readonly config: Config @@ -30,10 +31,10 @@ export class Ollama { constructor(config?: Partial) { this.config = { - host: '', + host: EMPTY_STRING, } if (!config?.proxy) { - this.config.host = utils.formatHost(config?.host ?? 'http://127.0.0.1:11434') + this.config.host = utils.formatHost(config?.host ?? OLLAMA_LOCAL_URL) } this.fetch = fetch @@ -76,7 +77,7 @@ export class Ollama { ) if (!response.body) { - throw new Error('Missing body') + throw new Error(MESSAGES.MISSING_BODY) } const itr = utils.parseJSON(response.body) @@ -90,7 +91,7 @@ export class Ollama { yield message // message will be done in the case of chat and generate // message will be success in the case of a progress response (pull, push, create) - if ((message as any).done || (message as any).status === 'success') { + if ((message as any).done || (message as any).status === MESSAGES.SUCCESS) { return } } @@ -98,7 +99,7 @@ export class Ollama { })() } else { const message = await itr.next() - if (!message.value.done && (message.value as any).status !== 'success') { + if (!message.value.done && (message.value as any).status !== MESSAGES.SUCCESS) { throw new Error('Expected a completed response.') } return message.value @@ -137,7 +138,10 @@ export class Ollama { if (request.images) { request.images = await Promise.all(request.images.map(this.encodeImage.bind(this))) } - return this.processStreamableRequest('generate', request) + return this.processStreamableRequest( + REQUEST_CONSTANTS.GENERATE, + request, + ) } chat(request: ChatRequest & { stream: true }): Promise> @@ -175,7 +179,7 @@ export class Ollama { async create( request: CreateRequest, ): Promise> { - return this.processStreamableRequest('create', { + return this.processStreamableRequest(REQUEST_CONSTANTS.CREATE, { name: request.model, stream: request.stream, modelfile: request.modelfile, @@ -213,7 +217,7 @@ export class Ollama { async push( request: PushRequest, ): Promise> { - return this.processStreamableRequest('push', { + return this.processStreamableRequest(REQUEST_CONSTANTS.PUSH, { name: request.model, stream: request.stream, insecure: request.insecure, @@ -230,7 +234,7 @@ export class Ollama { await utils.del(this.fetch, `${this.config.host}/api/delete`, { name: request.model, }) - return { status: 'success' } + return { status: MESSAGES.SUCCESS } } /** @@ -241,7 +245,7 @@ export class Ollama { */ async copy(request: CopyRequest): Promise { await utils.post(this.fetch, `${this.config.host}/api/copy`, { ...request }) - return { status: 'success' } + return { status: MESSAGES.SUCCESS } } /** diff --git a/src/constants/index.ts b/src/constants/index.ts new file mode 100644 index 0000000..2d2bf38 --- /dev/null +++ b/src/constants/index.ts @@ -0,0 +1,31 @@ +const EMPTY_STRING = '' +const MESSAGES = { + MISSING_BODY: 'Missing body', + SUCCESS: 'Success', + FETCHING_TEXT: 'Getting text from response', + ERROR_FETCHING_TEXT: 'Failed to get text from error response', + ERROR_NO_MODEL_FILE: 'Must provide either path or modelfile to create a model', + ERROR_JSON_PARSE: 'Failed to parse error response as JSON', +} as const +const REQUEST_CONSTANTS = { + GENERATE: 'generate', + CREATE: 'create', + PUSH: 'push', +} as const +const MODEL_FILE_COMMANDS = ['FROM', 'ADAPTER'] +const OLLAMA_LOCAL_URL = 'http://127.0.0.1:11434' +const SHA256 = 'sha256' +const ENCODING = { + HEX: 'hex', + BASE64: 'base64', + UTF8: 'utf8', +} as const +export { + EMPTY_STRING, + MESSAGES, + REQUEST_CONSTANTS, + MODEL_FILE_COMMANDS, + OLLAMA_LOCAL_URL, + SHA256, + ENCODING, +} diff --git a/src/index.ts b/src/index.ts index b00882e..e062203 100644 --- a/src/index.ts +++ b/src/index.ts @@ -6,18 +6,25 @@ import { homedir } from 'os' import { Ollama as OllamaBrowser } from './browser.js' import type { CreateRequest, ProgressResponse } from './interfaces.js' +import { + EMPTY_STRING, + ENCODING, + MESSAGES, + MODEL_FILE_COMMANDS, + SHA256, +} from './constants' export class Ollama extends OllamaBrowser { async encodeImage(image: Uint8Array | Buffer | string): Promise { if (typeof image !== 'string') { // image is Uint8Array or Buffer, convert it to base64 - return Buffer.from(image).toString('base64') + return Buffer.from(image).toString(ENCODING.BASE64) } try { if (fs.existsSync(image)) { // this is a filepath, read the file and convert it to base64 const fileBuffer = await promises.readFile(resolve(image)) - return Buffer.from(fileBuffer).toString('base64') + return Buffer.from(fileBuffer).toString(ENCODING.BASE64) } } catch { // continue @@ -40,7 +47,7 @@ export class Ollama extends OllamaBrowser { const lines = modelfile.split('\n') for (const line of lines) { const [command, args] = line.split(' ', 2) - if (['FROM', 'ADAPTER'].includes(command.toUpperCase())) { + if (MODEL_FILE_COMMANDS.includes(command.toUpperCase())) { const path = this.resolvePath(args.trim(), mfDir) if (await this.fileExists(path)) { out.push(`${command} @${await this.createBlob(path)}`) @@ -94,13 +101,13 @@ export class Ollama extends OllamaBrowser { // Compute the SHA256 digest const sha256sum = await new Promise((resolve, reject) => { - const hash = createHash('sha256') + const hash = createHash(SHA256) fileStream.on('data', (data) => hash.update(data)) - fileStream.on('end', () => resolve(hash.digest('hex'))) + fileStream.on('end', () => resolve(hash.digest(ENCODING.HEX))) fileStream.on('error', reject) }) - const digest = `sha256:${sha256sum}` + const digest = `${SHA256}:${sha256sum}` try { await utils.head(this.fetch, `${this.config.host}/api/blobs/${digest}`) @@ -144,9 +151,11 @@ export class Ollama extends OllamaBrowser { async create( request: CreateRequest, ): Promise> { - let modelfileContent = '' + let modelfileContent = EMPTY_STRING if (request.path) { - modelfileContent = await promises.readFile(request.path, { encoding: 'utf8' }) + modelfileContent = await promises.readFile(request.path, { + encoding: ENCODING.UTF8, + }) modelfileContent = await this.parseModelfile( modelfileContent, dirname(request.path), @@ -154,16 +163,15 @@ export class Ollama extends OllamaBrowser { } else if (request.modelfile) { modelfileContent = await this.parseModelfile(request.modelfile) } else { - throw new Error('Must provide either path or modelfile to create a model') + throw new Error(MESSAGES.ERROR_NO_MODEL_FILE) } request.modelfile = modelfileContent // check stream here so that typescript knows which overload to use if (request.stream) { return super.create(request as CreateRequest & { stream: true }) - } else { - return super.create(request as CreateRequest & { stream: false }) } + return super.create(request as CreateRequest & { stream: false }) } } diff --git a/src/utils.ts b/src/utils.ts index b4235a5..d17a916 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,5 +1,6 @@ import { version } from './version.js' import type { Fetch, ErrorResponse } from './interfaces.js' +import { EMPTY_STRING, ENCODING, MESSAGES, OLLAMA_LOCAL_URL } from './constants' /** * An error class for response errors. @@ -36,15 +37,15 @@ const checkOk = async (response: Response): Promise => { errorData = (await response.json()) as ErrorResponse message = errorData.error || message } catch (error) { - console.log('Failed to parse error response as JSON') + console.log(MESSAGES.ERROR_JSON_PARSE) } } else { try { - console.log('Getting text from response') + console.log(MESSAGES.FETCHING_TEXT) const textResponse = await response.text() message = textResponse || message } catch (error) { - console.log('Failed to get text from error response') + console.log(MESSAGES.ERROR_FETCHING_TEXT) } } @@ -181,8 +182,8 @@ export const del = async ( export const parseJSON = async function* ( itr: ReadableStream, ): AsyncGenerator { - const decoder = new TextDecoder('utf-8') - let buffer = '' + const decoder = new TextDecoder(ENCODING.UTF8) + let buffer = EMPTY_STRING const reader = itr.getReader() @@ -197,7 +198,7 @@ export const parseJSON = async function* ( const parts = buffer.split('\n') - buffer = parts.pop() ?? '' + buffer = parts.pop() ?? EMPTY_STRING for (const part of parts) { try { @@ -223,7 +224,7 @@ export const parseJSON = async function* ( */ export const formatHost = (host: string): string => { if (!host) { - return 'http://127.0.0.1:11434' + return OLLAMA_LOCAL_URL } let isExplicitProtocol = host.includes('://') diff --git a/tsconfig.json b/tsconfig.json index 327ed39..667d872 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -13,11 +13,7 @@ "module": "ES2022", "outDir": "./dist", "target": "ES6", - "lib": [ - "es6", - "es2018.asyncgenerator", - "dom" - ] + "lib": ["es6", "es2018.asyncgenerator", "dom"], }, "ts-node": {