Skip to content

Commit

Permalink
Update personality.py
Browse files Browse the repository at this point in the history
  • Loading branch information
ParisNeo authored Sep 4, 2024
1 parent add8b43 commit 86238fe
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions lollms/personality.py
Original file line number Diff line number Diff line change
Expand Up @@ -749,7 +749,7 @@ def generate_with_images(self, prompt, images, max_size, temperature = None, top
top_p=self.model_top_p if top_p is None else top_p,
repeat_penalty=self.model_repeat_penalty if repeat_penalty is None else repeat_penalty,
repeat_last_n = self.model_repeat_last_n if repeat_last_n is None else repeat_last_n
).strip()
)
return self.bot_says

def generate(self, prompt, max_size = None, temperature = None, top_k = None, top_p=None, repeat_penalty=None, repeat_last_n=None, callback=None, debug=False, show_progress=False ):
Expand All @@ -769,7 +769,7 @@ def generate(self, prompt, max_size = None, temperature = None, top_k = None, to
top_p=self.model_top_p if top_p is None else top_p,
repeat_penalty=self.model_repeat_penalty if repeat_penalty is None else repeat_penalty,
repeat_last_n = self.model_repeat_last_n if repeat_last_n is None else repeat_last_n,
).strip()
)
if debug:
self.print_prompt("prompt", prompt+self.bot_says)

Expand Down Expand Up @@ -3935,7 +3935,7 @@ def mix_it_up(self, prompt: str, models, master_model, nb_rounds=2, max_generati

return rounds_info

def answer(self, context_details, callback=None, send_full=True, custom_entries = ""):
def answer(self, context_details, custom_entries = "", send_full=True, callback=None):
if context_details["is_continue"]:
full_prompt = self.build_prompt_from_context_details(context_details, custom_entries=custom_entries, suppress= ["ai_prefix"])
else:
Expand Down

0 comments on commit 86238fe

Please sign in to comment.