Skip to content

Commit

Permalink
refine markdown prompt (infiniflow#2551)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?


### Type of change

- [x] Performance Improvement
  • Loading branch information
KevinHuSh authored Sep 24, 2024
1 parent b4ae429 commit 79c64ed
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions api/db/services/dialog_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def chat(dialog, messages, stream=True, **kwargs):
yield {"answer": empty_res, "reference": kbinfos, "audio_binary": tts(tts_mdl, empty_res)}
return {"answer": prompt_config["empty_response"], "reference": kbinfos}

kwargs["knowledge"] = "\n------\n".join(knowledges)
kwargs["knowledge"] = "\n\n------\n\n".join(knowledges)
gen_conf = dialog.llm_setting

msg = [{"role": "system", "content": prompt_config["system"].format(**kwargs)}]
Expand Down Expand Up @@ -221,7 +221,7 @@ def decorate_answer(answer):
if answer.lower().find("invalid key") >= 0 or answer.lower().find("invalid api") >= 0:
answer += " Please set LLM API-Key in 'User Setting -> Model Providers -> API-Key'"
done_tm = timer()
prompt += "\n### Elapsed\n - Retrieval: %.1f ms\n - LLM: %.1f ms"%((retrieval_tm-st)*1000, (done_tm-st)*1000)
prompt += "\n\n### Elapsed\n - Retrieval: %.1f ms\n - LLM: %.1f ms"%((retrieval_tm-st)*1000, (done_tm-st)*1000)
return {"answer": answer, "reference": refs, "prompt": prompt}

if stream:
Expand Down

0 comments on commit 79c64ed

Please sign in to comment.