Skip to content

Commit 5bf9b75

Browse files
iboBNexesenex
authored andcommitted
llama : C++20 compatibility for u8 strings (ggml-org#8408)
1 parent adaa364 commit 5bf9b75

File tree

1 file changed

+9
-3
lines changed

1 file changed

+9
-3
lines changed

llama.cpp

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,12 @@
6262
#include <io.h>
6363
#endif
6464

65+
#if __cplusplus >= 202000L
66+
#define LU8(x) (const char*)(u8##x)
67+
#else
68+
#define LU8(x) u8##x
69+
#endif
70+
6571
#include <algorithm>
6672
#include <array>
6773
#include <cassert>
@@ -24173,12 +24179,12 @@ static int32_t llama_chat_apply_template_internal(
2417324179
if (add_ass) {
2417424180
ss << "<|assistant|>";
2417524181
}
24176-
} else if (tmpl == "minicpm" || tmpl_contains(u8"<用户>")) {
24182+
} else if (tmpl == "minicpm" || tmpl_contains(LU8("<用户>"))) {
2417724183
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
2417824184
for (auto message : chat) {
2417924185
std::string role(message->role);
2418024186
if (role == "user") {
24181-
ss << u8"<用户>";
24187+
ss << LU8("<用户>");
2418224188
ss << trim(message->content);
2418324189
ss << "<AI>";
2418424190
} else {
@@ -24194,7 +24200,7 @@ static int32_t llama_chat_apply_template_internal(
2419424200
} else if (role == "user") {
2419524201
ss << "User: " << message->content << "\n\n";
2419624202
} else if (role == "assistant") {
24197-
ss << "Assistant: " << message->content << u8"<|end▁of▁sentence|>";
24203+
ss << "Assistant: " << message->content << LU8("<|end▁of▁sentence|>");
2419824204
}
2419924205
}
2420024206
if (add_ass) {

0 commit comments

Comments
 (0)