Skip to content

Commit

Permalink
feat: Shorten command names to CChat for legacy command and improve e…
Browse files Browse the repository at this point in the history
…rror handling
  • Loading branch information
jellydn committed Mar 2, 2024
1 parent eb31065 commit 5422646
Show file tree
Hide file tree
Showing 4 changed files with 62 additions and 109 deletions.
26 changes: 8 additions & 18 deletions rplugin/python3/CopilotChat/copilot_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
from CopilotChat.handlers.vsplit_chat_handler import VSplitChatHandler
from CopilotChat.mypynvim.core.nvim import MyNvim

PLUGIN_MAPPING_CMD = "CopilotChatMapping"
PLUGIN_AUTOCMD_CMD = "CopilotChatAutocmd"
PLUGIN_MAPPING_CMD = "CChatMapping"
PLUGIN_AUTOCMD_CMD = "CChatAutocmd"


# @pynvim.plugin
@pynvim.plugin
class CopilotPlugin(object):
def __init__(self, nvim: pynvim.Nvim):
self.nvim: MyNvim = MyNvim(nvim, PLUGIN_MAPPING_CMD, PLUGIN_AUTOCMD_CMD)
Expand All @@ -18,13 +18,13 @@ def init_vsplit_chat_handler(self):
if self.vsplit_chat_handler is None:
self.vsplit_chat_handler = VSplitChatHandler(self.nvim)

@pynvim.command("CopilotChatVsplitToggle")
@pynvim.command("CChatVsplitToggle")
def copilot_chat_toggle_cmd(self):
self.init_vsplit_chat_handler()
if self.vsplit_chat_handler:
self.vsplit_chat_handler.toggle_vsplit()

@pynvim.command("CopilotChatBuffer", nargs="1")
@pynvim.command("CChatBuffer", nargs="1")
def copilot_agent_buffer_cmd(self, args: list[str]):
self.init_vsplit_chat_handler()
current_buffer = self.nvim.current.buffer
Expand All @@ -36,7 +36,7 @@ def copilot_agent_buffer_cmd(self, args: list[str]):
self.vsplit_chat_handler.vsplit()
self.vsplit_chat_handler.chat(args[0], file_type, code)

@pynvim.command("CopilotChat", nargs="1")
@pynvim.command("CChat", nargs="1")
def copilot_agent_cmd(self, args: list[str]):
self.init_vsplit_chat_handler()
if self.vsplit_chat_handler:
Expand All @@ -46,21 +46,11 @@ def copilot_agent_cmd(self, args: list[str]):
code = self.nvim.eval("getreg('\"')")
self.vsplit_chat_handler.chat(args[0], file_type, code)

@pynvim.command("CopilotChatReset")
@pynvim.command("CChatReset")
def copilot_agent_reset_cmd(self):
if self.vsplit_chat_handler:
self.vsplit_chat_handler.reset_buffer()

@pynvim.command("CopilotChatVisual", nargs="1", range="")
def copilot_agent_visual_cmd(self, args: list[str], range: list[int]):
self.init_vsplit_chat_handler()
if self.vsplit_chat_handler:
file_type = self.nvim.current.buffer.options["filetype"]
code_lines = self.nvim.current.buffer[range[0] - 1 : range[1]]
code = "\n".join(code_lines)
self.vsplit_chat_handler.vsplit()
self.vsplit_chat_handler.chat(args[0], file_type, code)

def init_inplace_chat_handler(self):
if self.inplace_chat_handler is None:
self.inplace_chat_handler = InPlaceChatHandler(self.nvim)
Expand All @@ -76,7 +66,7 @@ def plugin_autocmd_cmd(self, args):
event, id, bufnr = args
self.nvim.autocmd_mapper.execute(event, id, bufnr)

@pynvim.command("CopilotChatInPlace", nargs="*", range="")
@pynvim.command("CChatInPlace", nargs="*", range="")
def inplace_cmd(self, args: list[str], range: list[int]):
self.init_inplace_chat_handler()
if self.inplace_chat_handler:
Expand Down
81 changes: 32 additions & 49 deletions rplugin/python3/CopilotChat/handlers/chat_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,10 @@ def __init__(self, nvim: MyNvim, buffer: MyBuffer):
self.copilot: Copilot = None
self.buffer: MyBuffer = buffer
self.proxy: str = os.getenv("HTTPS_PROXY") or os.getenv("ALL_PROXY") or ""
self.language = self.nvim.eval("g:copilot_chat_language")
try:
self.language = self.nvim.eval("g:copilot_chat_language")
except Exception:
self.language = ""

# public

Expand All @@ -47,9 +50,12 @@ def chat(
"""Disable vim diagnostics on the chat buffer"""
self.nvim.command(":lua vim.diagnostic.disable()")

disable_separators = (
self.nvim.eval("g:copilot_chat_disable_separators") == "yes"
)
try:
disable_separators = (
self.nvim.eval("g:copilot_chat_disable_separators") == "yes"
)
except Exception:
disable_separators = False

# Validate and set temperature
temperature = self._get_temperature()
Expand All @@ -59,12 +65,6 @@ def chat(

if system_prompt is None:
system_prompt = self._construct_system_prompt(prompt)
# Start the spinner
self.nvim.exec_lua('require("CopilotChat.spinner").show()')

self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"Chatting with {model} model"
)

if not disable_start_separator:
self._add_start_separator(
Expand All @@ -75,20 +75,23 @@ def chat(
system_prompt, prompt, code, filetype, model, temperature=temperature
)

# Stop the spinner
self.nvim.exec_lua('require("CopilotChat.spinner").hide()')

if not disable_end_separator:
self._add_end_separator(model, disable_separators)

# private
def _set_proxy(self):
self.proxy = self.nvim.eval("g:copilot_chat_proxy")
try:
self.proxy = self.nvim.eval("g:copilot_chat_proxy")
except Exception:
self.proxy = ""
if "://" not in self.proxy:
self.proxy = None

def _get_temperature(self):
temperature = self.nvim.eval("g:copilot_chat_temperature")
try:
temperature = self.nvim.eval("g:copilot_chat_temperature")
except Exception:
temperature = DEFAULT_TEMPERATURE
try:
temperature = float(temperature)
if not 0 <= temperature <= 1:
Expand Down Expand Up @@ -146,9 +149,12 @@ def _add_regular_start_separator(
winnr: int,
no_annoyance: bool = False,
):
hide_system_prompt = (
self.nvim.eval("g:copilot_chat_hide_system_prompt") == "yes"
)
try:
hide_system_prompt = (
self.nvim.eval("g:copilot_chat_hide_system_prompt") == "yes"
)
except Exception:
hide_system_prompt = True

if hide_system_prompt:
system_prompt = "...System prompt hidden..."
Expand Down Expand Up @@ -193,9 +199,13 @@ def _add_start_separator_with_token_count(

encoding = tiktoken.encoding_for_model("gpt-4")

hide_system_prompt = (
self.nvim.eval("g:copilot_chat_hide_system_prompt") == "yes"
)
try:
hide_system_prompt = (
self.nvim.eval("g:copilot_chat_hide_system_prompt") == "yes"
)
except Exception:
hide_system_prompt = True

num_total_tokens = len(encoding.encode(f"{system_prompt}\n{prompt}\n{code}"))
num_system_tokens = len(encoding.encode(system_prompt))
num_prompt_tokens = len(encoding.encode(prompt))
Expand Down Expand Up @@ -282,28 +292,7 @@ def _add_chat_messages(
self.copilot.authenticate()

last_line_col = 0
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)',
f"System prompt: {system_prompt}",
)
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"Prompt: {prompt}"
)
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"Code: {code}"
)
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"File type: {file_type}"
)
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"Model: {model}"
)
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"Temperature: {temperature}"
)
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', "Asking Copilot"
)

# TODO: Abort request if the user closes the layout
for token in self.copilot.ask(
system_prompt,
Expand All @@ -313,9 +302,6 @@ def _add_chat_messages(
model=model,
temperature=temperature,
):
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', f"Token: {token}"
)
buffer_lines = cast(list[str], self.buffer.lines())
last_line_row = len(buffer_lines) - 1
self.nvim.api.buf_set_text(
Expand All @@ -329,9 +315,6 @@ def _add_chat_messages(
last_line_col += len(token.encode("utf-8"))
if "\n" in token:
last_line_col = 0
self.nvim.exec_lua(
'require("CopilotChat.utils").log_info(...)', "Copilot answered"
)

""" Enable vim diagnostics on the chat buffer after the chat is complete """
self.nvim.command(":lua vim.diagnostic.enable()")
Expand Down
49 changes: 11 additions & 38 deletions rplugin/python3/CopilotChat/handlers/inplace_chat_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,10 @@ def __init__(self, nvim: MyNvim):
self.diff_mode: bool = False
self.model: str = MODEL_GPT4
self.system_prompt: str = "SENIOR_DEVELOPER_PROMPT"
self.language = self.nvim.eval("g:copilot_chat_language")

# Add user prompts collection
self.user_prompts = self.nvim.eval("g:copilot_chat_user_prompts")
self.current_user_prompt = 0
try:
self.language = self.nvim.eval("g:copilot_chat_language")
except Exception:
self.language = ""

# Initialize popups
self.original_popup = PopUp(nvim, title="Original")
Expand All @@ -45,7 +44,13 @@ def __init__(self, nvim: MyNvim):
]

# Initialize layout base on help text option
self.help_popup_visible = self.nvim.eval("g:copilot_chat_show_help") == "yes"
try:
self.help_popup_visible = (
self.nvim.eval("g:copilot_chat_show_help") == "yes"
)
except Exception:
self.help_popup_visible = False

if self.help_popup_visible:
self.layout = self._create_layout()
self.popups.append(self.help_popup)
Expand Down Expand Up @@ -174,20 +179,6 @@ def _chat(self):
def _set_prompt(self, prompt: str):
self.prompt_popup.buffer.lines(prompt)

def _set_next_user_prompt(self):
self.current_user_prompt = (self.current_user_prompt + 1) % len(
self.user_prompts
)
prompt = list(self.user_prompts.keys())[self.current_user_prompt]
self.prompt_popup.buffer.lines(self.user_prompts[prompt])

def _set_previous_user_prompt(self):
self.current_user_prompt = (self.current_user_prompt - 1) % len(
self.user_prompts
)
prompt = list(self.user_prompts.keys())[self.current_user_prompt]
self.prompt_popup.buffer.lines(self.user_prompts[prompt])

def _toggle_model(self):
if self.model == MODEL_GPT4:
self.model = MODEL_GPT35_TURBO
Expand Down Expand Up @@ -252,18 +243,6 @@ def _set_keymaps(self):
"i", "<C-s>", lambda: (self.nvim.feed("<Esc>"), self._chat())
)

self.prompt_popup.map(
"n",
"<C-n>",
lambda: self._set_next_user_prompt(),
)

self.prompt_popup.map(
"n",
"<C-p>",
lambda: self._set_previous_user_prompt(),
)

for i, popup in enumerate(self.popups):
popup.buffer.map("n", "q", lambda: self.layout.unmount())
popup.buffer.map("n", "<C-l>", lambda: self._clear_chat_history())
Expand Down Expand Up @@ -298,19 +277,13 @@ def _set_help_content(self):
"Prompt Binding:",
" ': Set prompt to SIMPLE_DOCSTRING",
" s: Set prompt to SEPARATE",
" <C-p>: Get the previous user prompt",
" <C-n>: Set prompt to next item in user prompts",
"",
"Model:",
" <C-g>: Toggle AI model",
" <C-m>: Set system prompt to next item in system prompts",
"",
"User prompts:",
]

for prompt in self.user_prompts:
help_content.append(f" {prompt}: {self.user_prompts[prompt]}")

self.help_popup.buffer.lines(help_content)

def _toggle_help(self):
Expand Down
15 changes: 11 additions & 4 deletions rplugin/python3/CopilotChat/handlers/vsplit_chat_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,17 @@ def __init__(self, nvim: MyNvim):
"filetype": "copilot-chat",
},
)
self.language = self.nvim.eval("g:copilot_chat_language")
self.clear_chat_on_new_prompt = (
self.nvim.eval("g:copilot_chat_clear_chat_on_new_prompt") == "yes"
)
try:
self.language = self.nvim.eval("g:copilot_chat_language")
except Exception:
self.language = ""

try:
self.clear_chat_on_new_prompt = (
self.nvim.eval("g:copilot_chat_clear_chat_on_new_prompt") == "yes"
)
except Exception:
self.clear_chat_on_new_prompt = False

def vsplit(self):
self.buffer.option("filetype", "copilot-chat")
Expand Down

0 comments on commit 5422646

Please sign in to comment.