diff --git a/packages/tasks/src/local-apps.ts b/packages/tasks/src/local-apps.ts index bc81e598f..cd7e5f1ea 100644 --- a/packages/tasks/src/local-apps.ts +++ b/packages/tasks/src/local-apps.ts @@ -89,7 +89,7 @@ const snippetLlamacpp = (model: ModelData, filepath?: string): LocalAppSnippet[] `${binary} \\`, ` --hf-repo "${model.id}" \\`, ` --hf-file ${filepath ?? "{{GGUF_FILE}}"} \\`, - ` -p "${model.tags.includes("conversational") ? "You are a helpful assistant" : "Once upon a time "}"`, + ` -p "${model.tags.includes("conversational") ? "You are a helpful assistant" : "Once upon a time"}"`, ].join("\n"); if (model.tags.includes("conversational")) { snippet += " \\\n --conversation"; @@ -164,7 +164,7 @@ const snippetVllm = (model: ModelData): LocalAppSnippet[] => { ` -H "Content-Type: application/json" \\`, ` --data '{`, ` "model": "${model.id}",`, - ` "prompt": "Once upon a time ",`, + ` "prompt": "Once upon a time",`, ` "max_tokens": 512,`, ` "temperature": 0.5`, ` }'`, diff --git a/packages/tasks/src/model-libraries-snippets.ts b/packages/tasks/src/model-libraries-snippets.ts index cbfefacc9..ff74cf676 100644 --- a/packages/tasks/src/model-libraries-snippets.ts +++ b/packages/tasks/src/model-libraries-snippets.ts @@ -381,7 +381,7 @@ llm = Llama.from_pretrained( )`; } else { snippet += `output = llm( - "Once upon a time ", + "Once upon a time", max_tokens=512, echo=True )