Skip to content

Commit

Permalink
enhanced
Browse files Browse the repository at this point in the history
  • Loading branch information
ParisNeo committed Oct 1, 2024
1 parent 2fcdb88 commit 9050181
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions lollms/personality.py
Original file line number Diff line number Diff line change
Expand Up @@ -2401,19 +2401,22 @@ def generate_codes(self, prompt, max_size = None, temperature = None, top_k = No
codes = self.extract_code_blocks(response)
return codes

def generate_code(self, prompt, images=[], max_size = None, temperature = None, top_k = None, top_p=None, repeat_penalty=None, repeat_last_n=None, callback=None, debug=False ):
def generate_code(self, prompt, images=[], max_size = None, temperature = None, top_k = None, top_p=None, repeat_penalty=None, repeat_last_n=None, callback=None, debug=False, return_full_generated_code=False ):
response_full = ""
if len(self.personality.image_files)>0:
response = self.personality.generate_with_images(self.system_custom_header("Generation infos")+ "Generated code must be put inside the adequate markdown code tag. Use this template:\n```language name\nCode\n```\nMake sure only a single code tag is generated at each dialogue turn." + self.separator_template + prompt, self.personality.image_files, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
elif len(images)>0:
response = self.personality.generate_with_images(self.system_custom_header("Generation infos")+ "Generated code must be put inside the adequate markdown code tag. Use this template:\n```language name\nCode\n```\nMake sure only a single code tag is generated at each dialogue turn." + self.separator_template + prompt, images, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
else:
response = self.personality.generate(self.system_custom_header("Generation infos")+ "Generated code must be put inside the adequate markdown code tag. Use this template:\n```language name\nCode\n```\nMake sure only a single code tag is generated at each dialogue turn." + self.separator_template + prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
response_full += response
codes = self.extract_code_blocks(response)
if len(codes)>0:
if not codes[-1]["is_complete"]:
code = "\n".join(codes[-1]["content"].split("\n")[:-1])
while not codes[-1]["is_complete"]:
response = self.personality.generate(prompt+code+self.user_full_header+"continue the code. Start from last line and continue the code. Put the code inside a markdown code tag."+self.separator_template+self.ai_full_header, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
response_full += response
codes = self.extract_code_blocks(response)
if len(codes)==0:
break
Expand All @@ -2424,8 +2427,11 @@ def generate_code(self, prompt, images=[], max_size = None, temperature = None,
code +="\n"+ "\n".join(codes[-1]["content"].split("\n"))
else:
code = codes[-1]["content"]

return code

if return_full_generated_code:
return code, response_full
else:
return code
else:
return None

Expand Down

0 comments on commit 9050181

Please sign in to comment.