diff --git a/README.md b/README.md index 63799f3..dbcd230 100644 --- a/README.md +++ b/README.md @@ -163,15 +163,6 @@ aipick -m "Why is the sky blue?" aipick --message # or -m ``` -##### `--generate` or `-g` -- Number of responses to generate (Warning: generating multiple costs more) (default: **1**) - -```sh -aipick --generate # or -g -``` - -> Warning: this uses more tokens, meaning it costs more. - ##### `--systemPrompt` or `-s` - System prompt to let users fine-tune prompt @@ -242,7 +233,6 @@ aipick config set OPENAI.key= GEMINI.temperature=3 | `OLLAMA_HOST` | `http://localhost:11434` | The Ollama Host | | `OLLAMA_TIMEOUT` | `100_000` ms | Request timeout for the Ollama | | `locale` | `en` | Locale for the generated commit messages | -| `generate` | `1` | Number of commit messages to generate | | `type` | `conventional` | Type of commit message to generate | | `proxy` | N/A | Set a HTTP/HTTPS proxy to use for requests(only **OpenAI**) | | `timeout` | `10_000` ms | Network request timeout | diff --git a/package.json b/package.json index 1a84ef0..d4f5013 100644 --- a/package.json +++ b/package.json @@ -72,7 +72,7 @@ "formdata-node": "^6.0.3", "groq-sdk": "^0.4.0", "inquirer": "9.2.8", - "inquirer-reactive-list-prompt": "^1.0.8", + "inquirer-reactive-list-prompt": "^1.0.9", "ollama": "^0.5.6", "ora": "^8.0.1", "readline": "^1.3.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 30018cd..e4f571f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -45,8 +45,8 @@ dependencies: specifier: 9.2.8 version: 9.2.8 inquirer-reactive-list-prompt: - specifier: ^1.0.8 - version: 1.0.8(inquirer@9.2.8) + specifier: ^1.0.9 + version: 1.0.9(inquirer@9.2.8) ollama: specifier: ^0.5.6 version: 0.5.6 @@ -4891,8 +4891,8 @@ packages: engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} dev: true - /inquirer-reactive-list-prompt@1.0.8(inquirer@9.2.8): - resolution: {integrity: sha512-4lOxr5uJiZVQ7aMNJhcrnExFrNxb16FlPcuysKYJBR4xYGihNHzn34P3YfAqZ1HFF78elH6tBWGJ7IbUIMHC2A==} + /inquirer-reactive-list-prompt@1.0.9(inquirer@9.2.8): + resolution: {integrity: sha512-+mgpjugb3uFSDRRdGl60VMFBskYYoI4nRVDlaspNEqbAxHrkjy4dfnAT0Z5rWeSqWXqYhC1yeuyajGg+F1SqaQ==} peerDependencies: inquirer: 9.2.8 dependencies: diff --git a/src/cli.ts b/src/cli.ts index ac2d7fc..2dc1bb0 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -12,11 +12,6 @@ cli( name: 'aipick', version, flags: { - generate: { - type: Number, - description: 'Number of responses to generate (Warning: generating multiple costs more) (default: 1)', - alias: 'g', - }, message: { type: String, description: 'Message to ask to AI', @@ -38,7 +33,7 @@ cli( ignoreArgv: type => type === 'unknown-flag' || type === 'argument', }, argv => { - aipick(argv.flags.generate, argv.flags.message, argv.flags.systemPrompt, rawArgv); + aipick(argv.flags.message, argv.flags.systemPrompt, rawArgv); }, rawArgv ); diff --git a/src/commands/aipick.ts b/src/commands/aipick.ts index 1456867..270e6d5 100644 --- a/src/commands/aipick.ts +++ b/src/commands/aipick.ts @@ -11,7 +11,7 @@ import { KnownError, handleCliError } from '../utils/error.js'; const consoleManager = new ConsoleManager(); -export default async (generate: number | undefined, message: string | undefined, systemPrompt: string | undefined, rawArgv: string[]) => +export default async (message: string | undefined, systemPrompt: string | undefined, rawArgv: string[]) => (async () => { consoleManager.printTitle(); @@ -21,7 +21,6 @@ export default async (generate: number | undefined, message: string | undefined, const config = await getConfig( { - generate: generate as number, systemPrompt: systemPrompt?.toString() as string, }, rawArgv diff --git a/src/managers/reactive-prompt.manager.ts b/src/managers/reactive-prompt.manager.ts index 100f53e..4728961 100644 --- a/src/managers/reactive-prompt.manager.ts +++ b/src/managers/reactive-prompt.manager.ts @@ -30,9 +30,11 @@ export class ReactivePromptManager { emptyMessage: `⚠ ${emptyResponses}`, loop: false, showDescription, - descPageSize: 10, + descPageSize: 15, choices$: this.choices$, loader$: this.loader$, + // @ts-ignore ignore + pickKey: 'short', }); } diff --git a/src/services/ai/ai.service.ts b/src/services/ai/ai.service.ts index ad45a24..e20db96 100644 --- a/src/services/ai/ai.service.ts +++ b/src/services/ai/ai.service.ts @@ -2,17 +2,13 @@ import { ReactiveListChoice } from 'inquirer-reactive-list-prompt'; import { Observable, of } from 'rxjs'; import { ModelConfig, ModelName } from '../../utils/config.js'; +import { getFirstWordsFrom } from '../../utils/utils.js'; export interface AIResponse { title: string; value: string; } -export interface RawAIResponse { - summary: string; - description?: string; -} - export interface AIServiceParams { config: ModelConfig; userMessage: string; @@ -56,57 +52,13 @@ export abstract class AIService { }); }; - protected sanitizeResponse(generatedText: string, maxCount: number, ignoreBody: boolean): AIResponse[] { + protected sanitizeResponse(generatedText: string, ignoreBody: boolean): AIResponse[] { try { - const rawResponses: RawAIResponse[] = JSON.parse(generatedText); - const filtedResponses = rawResponses.map((data: RawAIResponse) => { - if (ignoreBody) { - return { - title: `${data.summary}`, - value: `${data.summary}`, - }; - } - return { - title: `${data.summary}`, - value: `${data.summary}${data.description ? `\n\n${data.description}` : ''}`, - }; - }); - - if (filtedResponses.length > maxCount) { - return filtedResponses.slice(0, maxCount); - } - return filtedResponses; + const title = `${getFirstWordsFrom(generatedText)}...`; + const value = generatedText; + return [{ title, value }]; } catch (error) { - const jsonPattern = /\[[\s\S]*?\]/; - try { - const jsonMatch = generatedText.match(jsonPattern); - if (!jsonMatch) { - // No valid JSON array found in the response - return []; - } - const jsonStr = jsonMatch[0]; - const rawResponses: RawAIResponse[] = JSON.parse(jsonStr); - const filtedResponses = rawResponses.map((data: RawAIResponse) => { - if (ignoreBody) { - return { - title: `${data.summary}`, - value: `${data.summary}`, - }; - } - return { - title: `${data.summary}`, - value: `${data.summary}${data.description ? `\n\n${data.description}` : ''}`, - }; - }); - - if (filtedResponses.length > maxCount) { - return filtedResponses.slice(0, maxCount); - } - return filtedResponses; - } catch (e) { - // Error parsing JSON - return []; - } + return []; } } } diff --git a/src/services/ai/anthropic.service.ts b/src/services/ai/anthropic.service.ts index 3ca46a8..8057317 100644 --- a/src/services/ai/anthropic.service.ts +++ b/src/services/ai/anthropic.service.ts @@ -36,6 +36,7 @@ export class AnthropicService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -47,11 +48,10 @@ export class AnthropicService extends AIService { private async generateResponses(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -73,7 +73,7 @@ export class AnthropicService extends AIService { const result: Anthropic.Message = await this.anthropic.messages.create(params); const completion = result.content.map(({ text }) => text).join(''); logging && createLogResponse('Anthropic', userMessage, generatedSystemPrompt, completion); - return this.sanitizeResponse(completion, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(completion, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny.code === 'ENOTFOUND') { diff --git a/src/services/ai/codestral.service.ts b/src/services/ai/codestral.service.ts index 4fd2e4e..26e60ec 100644 --- a/src/services/ai/codestral.service.ts +++ b/src/services/ai/codestral.service.ts @@ -33,6 +33,7 @@ export class CodestralService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -43,10 +44,9 @@ export class CodestralService extends AIService { private async generateResponses(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -55,7 +55,7 @@ export class CodestralService extends AIService { this.checkAvailableModels(); const chatResponse = await this.createChatCompletions(generatedSystemPrompt, userMessage); logging && createLogResponse('Codestral', userMessage, generatedSystemPrompt, chatResponse); - return this.sanitizeResponse(chatResponse, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(chatResponse, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny.code === 'ENOTFOUND') { diff --git a/src/services/ai/cohere.service.ts b/src/services/ai/cohere.service.ts index 63104e6..6b6877f 100644 --- a/src/services/ai/cohere.service.ts +++ b/src/services/ai/cohere.service.ts @@ -30,6 +30,7 @@ export class CohereService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -41,10 +42,9 @@ export class CohereService extends AIService { private async generateResponses(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -62,7 +62,7 @@ export class CohereService extends AIService { }); logging && createLogResponse('Cohere', userMessage, generatedSystemPrompt, prediction.text); - return this.sanitizeResponse(prediction.text, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(prediction.text, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny instanceof CohereTimeoutError) { diff --git a/src/services/ai/gemini.service.ts b/src/services/ai/gemini.service.ts index d3e6a92..cc76105 100644 --- a/src/services/ai/gemini.service.ts +++ b/src/services/ai/gemini.service.ts @@ -28,6 +28,7 @@ export class GeminiService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -39,11 +40,10 @@ export class GeminiService extends AIService { private async generateResponses(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const maxTokens = this.params.config['max-tokens']; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -63,7 +63,7 @@ export class GeminiService extends AIService { const completion = response.text(); logging && createLogResponse('Gemini', userMessage, generatedSystemPrompt, completion); - return this.sanitizeResponse(completion, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(completion, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny.code === 'ENOTFOUND') { diff --git a/src/services/ai/groq.service.ts b/src/services/ai/groq.service.ts index 3d325ab..08b4b23 100644 --- a/src/services/ai/groq.service.ts +++ b/src/services/ai/groq.service.ts @@ -28,6 +28,7 @@ export class GroqService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -39,11 +40,10 @@ export class GroqService extends AIService { private async generateResponses(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const maxTokens = this.params.config['max-tokens']; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -73,7 +73,7 @@ export class GroqService extends AIService { const result = chatCompletion.choices[0].message.content || ''; logging && createLogResponse('Groq', userMessage, generatedSystemPrompt, result); - return this.sanitizeResponse(result, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(result, this.params.config.ignoreBody); } catch (error) { throw error as any; } diff --git a/src/services/ai/hugging-face.service.ts b/src/services/ai/hugging-face.service.ts index bea92ba..0a47b9a 100644 --- a/src/services/ai/hugging-face.service.ts +++ b/src/services/ai/hugging-face.service.ts @@ -81,6 +81,7 @@ export class HuggingFaceService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -94,10 +95,9 @@ export class HuggingFaceService extends AIService { await this.intialize(); const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -110,7 +110,7 @@ export class HuggingFaceService extends AIService { // await this.deleteConversation(conversation.id); logging && createLogResponse('HuggingFace', userMessage, generatedSystemPrompt, response); - return this.sanitizeResponse(response, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(response, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny.code === 'ENOTFOUND') { diff --git a/src/services/ai/mistral.service.ts b/src/services/ai/mistral.service.ts index 5639cec..808a5e0 100644 --- a/src/services/ai/mistral.service.ts +++ b/src/services/ai/mistral.service.ts @@ -63,6 +63,7 @@ export class MistralService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -74,10 +75,9 @@ export class MistralService extends AIService { private async generateMessages(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -87,7 +87,7 @@ export class MistralService extends AIService { await this.checkAvailableModels(); const chatResponse = await this.createChatCompletions(generatedSystemPrompt, userMessage); logging && createLogResponse('MistralAI', userMessage, generatedSystemPrompt, chatResponse); - return this.sanitizeResponse(chatResponse, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(chatResponse, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny.code === 'ENOTFOUND') { diff --git a/src/services/ai/ollama.service.ts b/src/services/ai/ollama.service.ts index 21bb55f..4a3c547 100644 --- a/src/services/ai/ollama.service.ts +++ b/src/services/ai/ollama.service.ts @@ -40,6 +40,7 @@ export class OllamaService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -69,10 +70,9 @@ export class OllamaService extends AIService { private async generateMessages(): Promise { try { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -82,7 +82,7 @@ export class OllamaService extends AIService { await this.checkIsAvailableOllama(); const chatResponse = await this.createChatCompletions(generatedSystemPrompt, userMessage); logging && createLogResponse(`Ollama_${this.model}`, userMessage, generatedSystemPrompt, chatResponse); - return this.sanitizeResponse(chatResponse, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(chatResponse, this.params.config.ignoreBody); } catch (error) { const errorAsAny = error as any; if (errorAsAny.code === 'ENOTFOUND') { diff --git a/src/services/ai/openai.service.ts b/src/services/ai/openai.service.ts index f6eab7c..97f4674 100644 --- a/src/services/ai/openai.service.ts +++ b/src/services/ai/openai.service.ts @@ -23,6 +23,7 @@ export class OpenAIService extends AIService { concatMap(messages => from(messages)), map(data => ({ name: `${this.serviceName} ${data.title}`, + short: data.title, value: data.value, description: data.value, isError: false, @@ -61,10 +62,9 @@ export class OpenAIService extends AIService { private async generateMessages(): Promise { const userMessage = this.params.userMessage; - const { generate, systemPrompt, systemPromptPath, logging, temperature } = this.params.config; + const { systemPrompt, systemPromptPath, logging, temperature } = this.params.config; const promptOptions: PromptOptions = { ...DEFAULT_PROMPT_OPTIONS, - generate, userMessage, systemPrompt, systemPromptPath, @@ -85,6 +85,6 @@ export class OpenAIService extends AIService { this.params.config.proxy ); - return this.sanitizeResponse(response, generate, this.params.config.ignoreBody); + return this.sanitizeResponse(response, this.params.config.ignoreBody); } } diff --git a/src/utils/config.ts b/src/utils/config.ts index 6ac2527..d453ef0 100644 --- a/src/utils/config.ts +++ b/src/utils/config.ts @@ -10,7 +10,6 @@ import { flattenArray } from './utils.js'; import type { TiktokenModel } from '@dqbd/tiktoken'; - export const DEFAULT_OLLMA_HOST = 'http://localhost:11434'; const { hasOwnProperty } = Object.prototype; @@ -38,19 +37,6 @@ const generalConfigParsers = { } return systemPromptPath; }, - generate(count?: string) { - if (!count) { - return 1; - } - - parseAssert('generate', /^\d+$/.test(count), 'Must be an integer'); - - const parsed = Number(count); - parseAssert('generate', parsed > 0, 'Must be greater than 0'); - parseAssert('generate', parsed <= 5, 'Must be less or equal to 5'); - - return parsed; - }, timeout(timeout?: string) { if (!timeout) { return 10_000; @@ -122,7 +108,6 @@ const modelConfigParsers: Record any>> proxy: (proxy?: string) => proxy || '', systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -146,7 +131,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -175,7 +159,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, logging: generalConfigParsers.logging, ignoreBody: generalConfigParsers.ignoreBody, }, @@ -191,7 +174,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -217,7 +199,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -252,7 +233,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -272,7 +252,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -291,7 +270,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], @@ -310,7 +288,6 @@ const modelConfigParsers: Record any>> }, systemPrompt: generalConfigParsers.systemPrompt, systemPromptPath: generalConfigParsers.systemPromptPath, - generate: generalConfigParsers.generate, timeout: generalConfigParsers.timeout, temperature: generalConfigParsers.temperature, 'max-tokens': generalConfigParsers['max-tokens'], diff --git a/src/utils/log.ts b/src/utils/log.ts index 58874f3..4c39c6a 100644 --- a/src/utils/log.ts +++ b/src/utils/log.ts @@ -4,8 +4,6 @@ import path from 'path'; import { xxh64 } from '@pacote/xxhash'; -import { removeTextAfterPhrase } from './utils.js'; - export const logPath = path.join(os.homedir(), '.aipick_log'); const now = new Date(); @@ -19,14 +17,7 @@ export const createLogResponse = (aiName: string, userMessage: string, prompt: s writeFileSyncRecursive(fullPath, `${title}\n${response}\n\n${originData}`); return; } - const removedPrompt = removeTextAfterPhrase( - prompt, - 'Example response format:\n[\n {\n "subject": "string",\n "body": "string",\n },\n ...\n]' - ); - writeFileSyncRecursive( - fullPath, - `${title}\n${response}\n\n\n[AIPick System Prompt]\n${removedPrompt}\n\n\n[User Prompt]\n${userMessage}` - ); + writeFileSyncRecursive(fullPath, `${title}\n${response}\n\n\n[AIPick Prompt]\n${prompt}\n\n\n[User Prompt]\n${userMessage}`); }; export const generateLogFileName = (date: Date, diff: string) => { diff --git a/src/utils/prompt.ts b/src/utils/prompt.ts index b4992b5..d58b358 100644 --- a/src/utils/prompt.ts +++ b/src/utils/prompt.ts @@ -23,51 +23,25 @@ const parseTemplate = (template: string, options: PromptOptions): string => { }); }; -const defaultPrompt = (promptOptions: PromptOptions) => { +const finalPrompt = (): string => { // TODO: add something - const { generate } = promptOptions; return [].filter(Boolean).join('\n'); }; -const finalPrompt = (generate: number): string => { - return [ - `Generate exactly ${generate} response${generate !== 1 ? 's' : ''} based on the user message.`, - `Provide your response as a JSON array containing exactly ${generate} object${generate !== 1 ? 's' : ''}, each with "summary" and "description" keys.`, - `The array must always contain ${generate} element${generate !== 1 ? 's' : ''}, no more and no less.`, - `Example response format: - [ - ${Array(generate) - .fill(null) - .map( - (_, index) => `{ - "summary": "Brief summary of response ${index + 1}", - "description": "Detailed description of response ${index + 1}" - }` - ) - .join(',\n ')} - ]`, - `Ensure that the JSON array always contains exactly ${generate} element${generate !== 1 ? 's' : ''}, even if you need to provide similar or slightly varied responses to meet this requirement.`, - `The "summary" should be a concise overview, while the "description" should provide more detailed information.`, - `The response should be valid JSON that can be parsed without errors.`, - ] - .filter(Boolean) - .join('\n'); -}; - export const generatePrompt = (promptOptions: PromptOptions) => { - const { systemPrompt, systemPromptPath, generate } = promptOptions; + const { systemPrompt, systemPromptPath } = promptOptions; if (systemPrompt) { - return `${systemPrompt}\n${finalPrompt(generate)}`; + return `${systemPrompt}\n${finalPrompt()}`; } if (!systemPromptPath) { - return `${finalPrompt(generate)}`; + return `${finalPrompt()}`; } try { const systemPromptTemplate = fs.readFileSync(path.resolve(systemPromptPath), 'utf-8'); - return `${parseTemplate(systemPromptTemplate, promptOptions)}\n${finalPrompt(generate)}`; + return `${parseTemplate(systemPromptTemplate, promptOptions)}\n${finalPrompt()}`; } catch (error) { - return `${finalPrompt(generate)}`; + return `${finalPrompt()}`; } }; diff --git a/src/utils/utils.ts b/src/utils/utils.ts index f39dc27..8a7aa57 100644 --- a/src/utils/utils.ts +++ b/src/utils/utils.ts @@ -54,3 +54,9 @@ export const flattenArray = (arr: any[]): string[] => { return flat.concat(Array.isArray(toFlatten) ? flattenArray(toFlatten) : toFlatten); }, []); }; + +export const getFirstWordsFrom = (s: string, wordCount: number = 4): string => { + const words = s.split(' '); + const firstFiveWords = words.slice(0, wordCount); + return firstFiveWords.join(' '); +};