Skip to content

Commit fed89b9

Browse files
Fixing errors. [skip ci]
1 parent 8f71d0f commit fed89b9

File tree

4 files changed

+460
-514
lines changed

4 files changed

+460
-514
lines changed

CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
cmake_minimum_required(VERSION 3.14) # for add_link_options and implicit target directories.
22
project("llama.cpp" C CXX)
33
include(CheckIncludeFileCXX)
4-
4+
set(CMAKE_CXX_STANDARD 23)
55
#set(CMAKE_WARN_DEPRECATED YES)
66
set(CMAKE_WARN_UNUSED_CLI YES)
77

src/llama-chat.cpp

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,7 @@
77
#include <algorithm>
88

99
#if __cplusplus >= 202000L
10-
#define LU8(x) (const char*)(u8##x)
1110
#else
12-
#define LU8(x) u8##x
1311
#endif
1412

1513
// trim whitespace from the beginning and end of a string
@@ -158,12 +156,12 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
158156
} else if (tmpl_contains("[gMASK]sop")) {
159157
// chatglm3-6b
160158
return LLM_CHAT_TEMPLATE_CHATGLM_3;
161-
} else if (tmpl_contains(LU8("<用户>"))) {
159+
} else if (tmpl_contains(("<用户>"))) {
162160
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
163161
return LLM_CHAT_TEMPLATE_MINICPM;
164162
} else if (tmpl_contains("'Assistant: ' + message['content'] + eos_token")) {
165163
return LLM_CHAT_TEMPLATE_DEEPSEEK_2;
166-
} else if (tmpl_contains(LU8("<|Assistant|>")) && tmpl_contains(LU8("<|User|>")) && tmpl_contains(LU8("<|end▁of▁sentence|>"))) {
164+
} else if (tmpl_contains(("<|Assistant|>")) && tmpl_contains(("<|User|>")) && tmpl_contains(("<|end▁of▁sentence|>"))) {
167165
return LLM_CHAT_TEMPLATE_DEEPSEEK_3;
168166
} else if (tmpl_contains("[|system|]") && tmpl_contains("[|assistant|]") && tmpl_contains("[|endofturn|]")) {
169167
// ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
@@ -473,7 +471,7 @@ int32_t llm_chat_apply_template(
473471
for (auto message : chat) {
474472
std::string role(message->role);
475473
if (role == "user") {
476-
ss << LU8("<用户>");
474+
ss << ("<用户>");
477475
ss << trim(message->content);
478476
ss << "<AI>";
479477
} else {
@@ -489,7 +487,7 @@ int32_t llm_chat_apply_template(
489487
} else if (role == "user") {
490488
ss << "User: " << message->content << "\n\n";
491489
} else if (role == "assistant") {
492-
ss << "Assistant: " << message->content << LU8("<|end▁of▁sentence|>");
490+
ss << "Assistant: " << message->content << ("<|end▁of▁sentence|>");
493491
}
494492
}
495493
if (add_ass) {
@@ -502,13 +500,13 @@ int32_t llm_chat_apply_template(
502500
if (role == "system") {
503501
ss << message->content << "\n\n";
504502
} else if (role == "user") {
505-
ss << LU8("<|User|>") << message->content;
503+
ss << ("<|User|>") << message->content;
506504
} else if (role == "assistant") {
507-
ss << LU8("<|Assistant|>") << message->content << LU8("<|end▁of▁sentence|>");
505+
ss << ("<|Assistant|>") << message->content << ("<|end▁of▁sentence|>");
508506
}
509507
}
510508
if (add_ass) {
511-
ss << LU8("<|Assistant|>");
509+
ss << ("<|Assistant|>");
512510
}
513511
} else if (tmpl == LLM_CHAT_TEMPLATE_EXAONE_3) {
514512
// ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb

0 commit comments

Comments
 (0)