Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

switch to chat api #17

Merged
merged 1 commit into from
May 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 19 additions & 6 deletions src/comai/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import typer
from termcolor import colored
import getch
from typing import List, Tuple, Optional
from typing import List, Optional
from typing_extensions import Annotated
from time import sleep
from threading import Thread, Lock
Expand Down Expand Up @@ -48,7 +48,10 @@ def start_wait_prompt():
t.start()
return print_mutex

def print_answer(command: str, print_mutex):
def print_answer(command_chunks: str, print_mutex):

first_chunk = next(command_chunks)

print_mutex.acquire()
characters_to_remove = len(initial_prompt) + num_dots
print(LEFT * characters_to_remove, end='')
Expand All @@ -57,19 +60,26 @@ def print_answer(command: str, print_mutex):
print(LEFT * characters_to_remove, end='')

print(colored(answer_prompt, 'green'), end='', flush=True)
print_words_sequentially(command)
print(first_chunk, end='', flush=True)
for chunk in command_chunks:
print(chunk, end='', flush=True)

def version_callback(value: bool):
if value:
print(f"comai version: {__version__}")
raise typer.Exit()

def save_command(command_chunks, command: list):
for chunk in command_chunks:
command.append(chunk)
yield chunk

@app.command()
def main(
instructions: List[str],
version: Annotated[Optional[bool], typer.Option("--version", callback=version_callback)] = None
):
input_text = ' '.join(instructions[1:])
input_text = ' '.join(instructions)

api_key = config.load_api_key()
if not api_key:
Expand All @@ -78,8 +88,11 @@ def main(
config.save_api_key(api_key)

print_mutex = start_wait_prompt()
command = translation.translate_to_command(input_text, api_key)
print_answer(command, print_mutex)
command_chunks = translation.translate_to_command(input_text, api_key)
command = []
command_chunks = save_command(command_chunks, command)
print_answer(command_chunks, print_mutex)
command = ''.join(command)

char = getch.getch()
print()
Expand Down
20 changes: 13 additions & 7 deletions src/comai/translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,21 @@

def translate_to_command(nl_description, openai_api_key):
openai.api_key = openai_api_key
prompt = (f"Convert the following natural language instruction to a Unix command:\n"
f"{nl_description}\n"
f"UNIX command:")
response = openai.Completion.create(
engine="text-davinci-002",
prompt=prompt,
response = openai.ChatCompletion.create(
model='gpt-3.5-turbo',
max_tokens=60,
n=1,
stop=None,
temperature=0.2,
stream=True,
messages=[
{'role': 'system', 'content': 'You are a bot translating natural language instructions into UNIX commands. Your output is just an executable command'},
{'role': 'user', 'content': 'generate random UUID'},
{'role': 'assistant', 'content': 'uuidgen'},
{'role': 'user', 'content': nl_description},
]
)
return response.choices[0].text.strip()

for chunk in response:
if 'content' in chunk.choices[0].delta:
yield chunk.choices[0].delta.content
4 changes: 2 additions & 2 deletions tests/test_comai.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,5 @@ def test_missing_instruction():
assert result.exit_code != 0

def test_translation():
c1 = translation.translate_to_command("show files", api_key)
assert c1 == 'ls'
command = translation.translate_to_command("show files", api_key)
assert ''.join(command) == 'ls'