Skip to content

Commit

Permalink
chore: refactoring
Browse files Browse the repository at this point in the history
- created new constants file to gradually remove all dangling strings from the code for better readability
- cleaned up the browser.ts file
- cleaned up the index.ts file for better readability
- created new utility functions for IO Operations
  • Loading branch information
rohit1901 committed May 24, 2024
1 parent 7f06950 commit 8ac2d83
Show file tree
Hide file tree
Showing 4 changed files with 188 additions and 108 deletions.
30 changes: 15 additions & 15 deletions src/browser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ export class Ollama {
}

this.fetch = fetch
if (config?.fetch != null) {
// NOTE: fetch could either be undefined or an instance of Fetch
if (config?.fetch) {
this.fetch = config.fetch
}

Expand Down Expand Up @@ -97,13 +98,12 @@ export class Ollama {
}
throw new Error('Did not receive done or success response in stream.')
})()
} else {
const message = await itr.next()
if (!message.value.done && (message.value as any).status !== MESSAGES.SUCCESS) {
throw new Error('Expected a completed response.')
}
return message.value
}
const message = await itr.next()
if (!message.value.done && (message.value as any).status !== MESSAGES.SUCCESS) {
throw new Error('Expected a completed response.')
}
return message.value
}

/**
Expand All @@ -112,14 +112,14 @@ export class Ollama {
* @returns {Promise<string>} - The base64 encoded image.
*/
async encodeImage(image: Uint8Array | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array convert it to base64
const uint8Array = new Uint8Array(image)
const numberArray = Array.from(uint8Array)
return btoa(String.fromCharCode.apply(null, numberArray))
if (typeof image === 'string') {
// image is already base64 encoded
return image
}
// the string may be base64 encoded
return image
// image is Uint8Array convert it to base64
const uint8Array = new Uint8Array(image)
const numberArray = Array.from(uint8Array)
return btoa(String.fromCharCode.apply(null, numberArray))
}

generate(
Expand Down Expand Up @@ -198,7 +198,7 @@ export class Ollama {
async pull(
request: PullRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('pull', {
return this.processStreamableRequest<ProgressResponse>(REQUEST_CONSTANTS.PULL, {
name: request.model,
stream: request.stream,
insecure: request.insecure,
Expand Down
21 changes: 21 additions & 0 deletions src/constants/index.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,33 @@
const EMPTY_STRING = ''
const CODE_404 = '404'
const PROTOCOLS = {
HTTP: 'http',
HTTPS: 'https',
} as const
const PORTS = {
HTTP: '80',
HTTPS: '443',
} as const
const MESSAGES = {
MISSING_BODY: 'Missing body',
SUCCESS: 'Success',
FETCHING_TEXT: 'Getting text from response',
ERROR_FETCHING_TEXT: 'Failed to get text from error response',
ERROR_NO_MODEL_FILE: 'Must provide either path or modelfile to create a model',
ERROR_JSON_PARSE: 'Failed to parse error response as JSON',
STREAMING_UPLOADS_NOT_SUPPORTED:
'Streaming uploads are not supported in this environment.',
} as const
const REQUEST_CONSTANTS = {
GENERATE: 'generate',
CREATE: 'create',
PUSH: 'push',
PULL: 'pull',
} as const
const STREAMING_EVENTS = {
DATA: 'data',
END: 'end',
ERROR: 'error',
} as const
const MODEL_FILE_COMMANDS = ['FROM', 'ADAPTER']
const OLLAMA_LOCAL_URL = 'http://127.0.0.1:11434'
Expand All @@ -22,8 +39,12 @@ const ENCODING = {
} as const
export {
EMPTY_STRING,
CODE_404,
PROTOCOLS,
PORTS,
MESSAGES,
REQUEST_CONSTANTS,
STREAMING_EVENTS,
MODEL_FILE_COMMANDS,
OLLAMA_LOCAL_URL,
SHA256,
Expand Down
205 changes: 114 additions & 91 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,36 +1,69 @@
import * as utils from './utils.js'
import fs, { createReadStream, promises } from 'fs'
import { fileExists, isFilePath } from './utils.js'
import { createReadStream, promises } from 'fs'
import { dirname, join, resolve } from 'path'
import { createHash } from 'crypto'
import { homedir } from 'os'
import { Ollama as OllamaBrowser } from './browser.js'

import type { CreateRequest, ProgressResponse } from './interfaces.js'
import {
EMPTY_STRING,
CODE_404,
ENCODING,
MESSAGES,
MODEL_FILE_COMMANDS,
SHA256,
STREAMING_EVENTS,
} from './constants'

export class Ollama extends OllamaBrowser {
private async encodeImageFromString(image: string): Promise<string> {
const isPath = await isFilePath(image)
if (isPath) {
return this.encodeImageFromFile(image)
}
return image
}

private async encodeImageFromBuffer(image: Uint8Array | Buffer): Promise<string> {
return Buffer.from(image).toString(ENCODING.BASE64)
}

private async encodeImageFromFile(path: string): Promise<string> {
const fileBuffer = await promises.readFile(resolve(path))
return Buffer.from(fileBuffer).toString(ENCODING.BASE64)
}

/**
* Encode an image to base64.
* @param image {Uint8Array | Buffer | string} - The image to encode
* @returns {Promise<string>} - The base64 encoded image
*/
async encodeImage(image: Uint8Array | Buffer | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array or Buffer, convert it to base64
return Buffer.from(image).toString(ENCODING.BASE64)
if (typeof image === 'string') {
return this.encodeImageFromString(image)
}
try {
if (fs.existsSync(image)) {
// this is a filepath, read the file and convert it to base64
const fileBuffer = await promises.readFile(resolve(image))
return Buffer.from(fileBuffer).toString(ENCODING.BASE64)
}
} catch {
// continue
return this.encodeImageFromBuffer(image)
}

private async parseLine(line: string, mfDir: string): Promise<string> {
const [command, args] = line.split(' ', 2)
if (MODEL_FILE_COMMANDS.includes(command.toUpperCase())) {
return this.parseCommand(command, args.trim(), mfDir)
}
// the string may be base64 encoded
return image
return line
}

private async parseCommand(
command: string,
args: string,
mfDir: string,
): Promise<string> {
const path = this.resolvePath(args, mfDir)
if (await fileExists(path)) {
return `${command} @${await this.createBlob(path)}`
}
return `${command} ${args}`
}

/**
Expand All @@ -43,22 +76,11 @@ export class Ollama extends OllamaBrowser {
modelfile: string,
mfDir: string = process.cwd(),
): Promise<string> {
const out: string[] = []
const lines = modelfile.split('\n')
for (const line of lines) {
const [command, args] = line.split(' ', 2)
if (MODEL_FILE_COMMANDS.includes(command.toUpperCase())) {
const path = this.resolvePath(args.trim(), mfDir)
if (await this.fileExists(path)) {
out.push(`${command} @${await this.createBlob(path)}`)
} else {
out.push(`${command} ${args}`)
}
} else {
out.push(line)
}
}
return out.join('\n')
const parsedLines = await Promise.all(
lines.map((line) => this.parseLine(line, mfDir)),
)
return parsedLines.join('\n')
}

/**
Expand All @@ -67,69 +89,59 @@ export class Ollama extends OllamaBrowser {
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private resolvePath(inputPath, mfDir) {
private resolvePath(inputPath: string, mfDir: string) {
if (inputPath.startsWith('~')) {
return join(homedir(), inputPath.slice(1))
}
return resolve(mfDir, inputPath)
}

private async computeSha256(path: string): Promise<string> {
return new Promise<string>((resolve, reject) => {
const fileStream = createReadStream(path)
const hash = createHash(SHA256)
fileStream.on('data', (data) => hash.update(data))
fileStream.on('end', () => resolve(hash.digest(ENCODING.HEX)))
fileStream.on('error', reject)
})
}

private createReadableStream(path: string): ReadableStream {
const fileStream = createReadStream(path)
return new ReadableStream({
start(controller) {
fileStream.on(STREAMING_EVENTS.DATA, (chunk) => {
controller.enqueue(chunk)
})

fileStream.on(STREAMING_EVENTS.END, () => {
controller.close()
})

fileStream.on(STREAMING_EVENTS.ERROR, (err) => {
controller.error(err)
})
},
})
}
/**
* checks if a file exists
* Create a blob from a file.
* @param path {string} - The path to the file
* @private @internal
* @returns {Promise<boolean>} - Whether the file exists or not
* @returns {Promise<string>} - The digest of the blob
*/
private async fileExists(path: string): Promise<boolean> {
try {
await promises.access(path)
return true
} catch {
return false
}
}

private async createBlob(path: string): Promise<string> {
if (typeof ReadableStream === 'undefined') {
// Not all fetch implementations support streaming
// TODO: support non-streaming uploads
throw new Error('Streaming uploads are not supported in this environment.')
throw new Error(MESSAGES.STREAMING_UPLOADS_NOT_SUPPORTED)
}

// Create a stream for reading the file
const fileStream = createReadStream(path)

// Compute the SHA256 digest
const sha256sum = await new Promise<string>((resolve, reject) => {
const hash = createHash(SHA256)
fileStream.on('data', (data) => hash.update(data))
fileStream.on('end', () => resolve(hash.digest(ENCODING.HEX)))
fileStream.on('error', reject)
})

const sha256sum = await this.computeSha256(path)
const digest = `${SHA256}:${sha256sum}`

try {
await utils.head(this.fetch, `${this.config.host}/api/blobs/${digest}`)
} catch (e) {
if (e instanceof Error && e.message.includes('404')) {
// Create a new readable stream for the fetch request
const readableStream = new ReadableStream({
start(controller) {
fileStream.on('data', (chunk) => {
controller.enqueue(chunk) // Enqueue the chunk directly
})

fileStream.on('end', () => {
controller.close() // Close the stream when the file ends
})

fileStream.on('error', (err) => {
controller.error(err) // Propagate errors to the stream
})
},
})

if (e instanceof Error && e.message.includes(CODE_404)) {
const readableStream = this.createReadableStream(path)
await utils.post(
this.fetch,
`${this.config.host}/api/blobs/${digest}`,
Expand All @@ -148,31 +160,42 @@ export class Ollama extends OllamaBrowser {
): Promise<AsyncGenerator<ProgressResponse>>
create(request: CreateRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Create a model.
* @param request {CreateRequest} - The request object
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The progress response
*/
async create(
request: CreateRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
let modelfileContent = EMPTY_STRING
if (request.path) {
modelfileContent = await promises.readFile(request.path, {
encoding: ENCODING.UTF8,
})
modelfileContent = await this.parseModelfile(
modelfileContent,
dirname(request.path),
)
} else if (request.modelfile) {
modelfileContent = await this.parseModelfile(request.modelfile)
} else {
throw new Error(MESSAGES.ERROR_NO_MODEL_FILE)
}
request.modelfile = modelfileContent
request.modelfile = await this.getModelfileContent(request)

// check stream here so that typescript knows which overload to use
if (request.stream) {
return super.create(request as CreateRequest & { stream: true })
}
return super.create(request as CreateRequest & { stream: false })
}

private async getModelfileContentFromPath(path: string): Promise<string> {
const modelfileContent = await promises.readFile(path, {
encoding: ENCODING.UTF8,
})
return this.parseModelfile(modelfileContent, dirname(path))
}
/**
* Get the content of the modelfile.
* @param request {CreateRequest} - The request object
* @returns {Promise<string>} - The content of the modelfile
*/
private async getModelfileContent(request: CreateRequest): Promise<string> {
if (request.path) {
return this.getModelfileContentFromPath(request.path)
} else if (request.modelfile) {
return this.parseModelfile(request.modelfile)
} else {
throw new Error(MESSAGES.ERROR_NO_MODEL_FILE)
}
}
}

export default new Ollama()
Expand Down
Loading

0 comments on commit 8ac2d83

Please sign in to comment.