File tree Expand file tree Collapse file tree 1 file changed +9
-3
lines changed Expand file tree Collapse file tree 1 file changed +9
-3
lines changed Original file line number Diff line number Diff line change 62
62
#include <io.h>
63
63
#endif
64
64
65
+ #if __cplusplus >= 202000L
66
+ #define LU8(x) (const char*)(u8##x)
67
+ #else
68
+ #define LU8(x) u8##x
69
+ #endif
70
+
65
71
#include <algorithm>
66
72
#include <array>
67
73
#include <cassert>
@@ -24173,12 +24179,12 @@ static int32_t llama_chat_apply_template_internal(
24173
24179
if (add_ass) {
24174
24180
ss << "<|assistant|>";
24175
24181
}
24176
- } else if (tmpl == "minicpm" || tmpl_contains(u8 "<用户>")) {
24182
+ } else if (tmpl == "minicpm" || tmpl_contains(LU8( "<用户>") )) {
24177
24183
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
24178
24184
for (auto message : chat) {
24179
24185
std::string role(message->role);
24180
24186
if (role == "user") {
24181
- ss << u8 "<用户>";
24187
+ ss << LU8( "<用户>") ;
24182
24188
ss << trim(message->content);
24183
24189
ss << "<AI>";
24184
24190
} else {
@@ -24194,7 +24200,7 @@ static int32_t llama_chat_apply_template_internal(
24194
24200
} else if (role == "user") {
24195
24201
ss << "User: " << message->content << "\n\n";
24196
24202
} else if (role == "assistant") {
24197
- ss << "Assistant: " << message->content << u8 "<|end▁of▁sentence|>";
24203
+ ss << "Assistant: " << message->content << LU8( "<|end▁of▁sentence|>") ;
24198
24204
}
24199
24205
}
24200
24206
if (add_ass) {
You can’t perform that action at this time.
0 commit comments