Skip to content

Commit

Permalink
llama : C++20 compatibility for u8 strings (#8408)
Browse files Browse the repository at this point in the history
  • Loading branch information
iboB authored Jul 10, 2024
1 parent 7a80710 commit cc61948
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions src/llama.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,12 @@
#include <io.h>
#endif

#if __cplusplus >= 202000L
#define LU8(x) (const char*)(u8##x)
#else
#define LU8(x) u8##x
#endif

#include <algorithm>
#include <array>
#include <cassert>
Expand Down Expand Up @@ -21511,12 +21517,12 @@ static int32_t llama_chat_apply_template_internal(
if (add_ass) {
ss << "<|assistant|>";
}
} else if (tmpl == "minicpm" || tmpl_contains(u8"<用户>")) {
} else if (tmpl == "minicpm" || tmpl_contains(LU8("<用户>"))) {
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
for (auto message : chat) {
std::string role(message->role);
if (role == "user") {
ss << u8"<用户>";
ss << LU8("<用户>");
ss << trim(message->content);
ss << "<AI>";
} else {
Expand All @@ -21532,7 +21538,7 @@ static int32_t llama_chat_apply_template_internal(
} else if (role == "user") {
ss << "User: " << message->content << "\n\n";
} else if (role == "assistant") {
ss << "Assistant: " << message->content << u8"<|end▁of▁sentence|>";
ss << "Assistant: " << message->content << LU8("<|end▁of▁sentence|>");
}
}
if (add_ass) {
Expand Down

0 comments on commit cc61948

Please sign in to comment.