From 2a4033416a86915f4d59709a4f60e60baf7ee994 Mon Sep 17 00:00:00 2001 From: Kunal Vaishnavi Date: Fri, 31 Jan 2025 23:04:52 +0000 Subject: [PATCH] Revert Python example changes --- examples/python/model-chat.py | 6 +++++- examples/python/model-qa.py | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/examples/python/model-chat.py b/examples/python/model-chat.py index c00d8ade4..12fb18a44 100644 --- a/examples/python/model-chat.py +++ b/examples/python/model-chat.py @@ -79,7 +79,11 @@ def main(args): if args.timings: started_timestamp = time.time() - prompt = f'{args.chat_template.format(input=text)}' + # If there is a chat template, use it + prompt = text + if args.chat_template: + prompt = f'{args.chat_template.format(input=text)}' + input_tokens = tokenizer.encode(prompt) generator.append_tokens(input_tokens) diff --git a/examples/python/model-qa.py b/examples/python/model-qa.py index 1fde98468..5e639ef2b 100644 --- a/examples/python/model-qa.py +++ b/examples/python/model-qa.py @@ -71,7 +71,11 @@ def main(args): if args.timings: started_timestamp = time.time() - prompt = f'{args.chat_template.format(input=text)}' + # If there is a chat template, use it + prompt = text + if args.chat_template: + prompt = f'{args.chat_template.format(input=text)}' + input_tokens = tokenizer.encode(prompt) generator.append_tokens(input_tokens)