diff --git a/docs/templates.md b/docs/templates.md index 69121597..e38645c0 100644 --- a/docs/templates.md +++ b/docs/templates.md @@ -26,6 +26,13 @@ You can also save default parameters: llm --system 'Summarize this text in the voice of $voice' \ --model gpt-4 -p voice GlaDOS --save summarize ``` + +Add `--schema` to bake a {ref}`schema ` into your template: + +```bash +llm --schema dog.schema.json 'invent a dog' --save dog +``` + If you add `--extract` the setting to {ref}`extract the first fenced code block ` will be persisted in the template. ```bash llm --system 'write a Python function' --extract --save python-function @@ -68,15 +75,18 @@ This will open the system default editor. :::{tip} You can control which editor will be used here using the `EDITOR` environment variable - for example, to use VS Code: - - export EDITOR="code -w" - +```bash +export EDITOR="code -w" +``` Add that to your `~/.zshrc` or `~/.bashrc` file depending on which shell you use (`zsh` is the default on macOS since macOS Catalina in 2019). ::: You can also create a file called `summary.yaml` in the folder shown by running `llm templates path`, for example: ```bash -$ llm templates path +llm templates path +``` +Example output: +``` /Users/simon/Library/Application Support/io.datasette.llm/templates ``` @@ -120,6 +130,26 @@ You can combine system and regular prompts like so: system: You speak like an excitable Victorian adventurer prompt: 'Summarize this: $input' ``` +### Schemas + +Use the `schema:` key to embed a JSON schema (as YAML) in your template. The easiest way to create these is with the `llm --schema ... --save name-of-template` command - the result should look something like this: + +```yaml +name: dogs +schema_object: + properties: + dogs: + items: + properties: + bio: + type: string + name: + type: string + type: object + type: array + type: object +``` + ### Additional template variables diff --git a/docs/usage.md b/docs/usage.md index 1669cbe0..e3392f6d 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -152,7 +152,20 @@ llm --schema '{ } }' -m gpt-4o-mini 'invent two dogs' ``` -The JSON returned from the model should match that schema. +LLM will pass this to the model, whish should result in JSON returned from the model matching that schema. + +You can also save the JSON schema to a file and reference the filename using `--schema`: + +```bash +llm --schema dogs.schema.json 'invent two dogs' +``` +Or save your schema {ref}`to a template ` like this: + +```bash +llm --schema dogs.schema.json --save dogs +# Then to use it: +llm -t dogs 'invent two dogs' +``` Be warned that different models may support different dialects of the JSON schema specification. diff --git a/llm/cli.py b/llm/cli.py index 73d508a3..09eba956 100644 --- a/llm/cli.py +++ b/llm/cli.py @@ -299,7 +299,7 @@ def prompt( model_aliases = get_model_aliases() def read_prompt(): - nonlocal prompt + nonlocal prompt, schema # Is there extra prompt available on stdin? stdin_prompt = None @@ -318,6 +318,7 @@ def read_prompt(): and sys.stdin.isatty() and not attachments and not attachment_types + and not schema ): # Hang waiting for input to stdin (unless --save) prompt = sys.stdin.read() @@ -356,6 +357,8 @@ def read_prompt(): to_save["extract"] = True if extract_last: to_save["extract_last"] = True + if schema: + to_save["schema_object"] = schema path.write_text( yaml.dump( to_save, @@ -374,6 +377,8 @@ def read_prompt(): template_obj = load_template(template) extract = template_obj.extract extract_last = template_obj.extract_last + if template_obj.schema_object: + schema = template_obj.schema_object prompt = read_prompt() try: prompt, system = template_obj.evaluate(prompt, params) diff --git a/llm/templates.py b/llm/templates.py index ca477195..502007f5 100644 --- a/llm/templates.py +++ b/llm/templates.py @@ -12,6 +12,7 @@ class Template(BaseModel): # Should a fenced code block be extracted? extract: Optional[bool] = None extract_last: Optional[bool] = None + schema_object: Optional[dict] = None model_config = ConfigDict(extra="forbid") diff --git a/tests/test_templates.py b/tests/test_templates.py index e66005c4..381fb422 100644 --- a/tests/test_templates.py +++ b/tests/test_templates.py @@ -97,6 +97,12 @@ def test_templates_list(templates_path, args): {"system": "write python", "extract": True}, None, ), + # So should schemas + ( + ["--schema", '{"properties": {"name": {"type": "string"}}}'], + {"schema_object": {"properties": {"name": {"type": "string"}}}}, + None, + ), ), ) def test_templates_prompt_save(templates_path, args, expected_prompt, expected_error):