1
0
Fork 0
mirror of https://gitlab.com/niansa/discord_llama.git synced 2025-03-06 20:48:25 +01:00

Implemented no_instruct_prompt and use for mpt-7b-chat

This commit is contained in:
niansa/tuxifan 2023-05-17 11:46:22 +02:00
parent 095acfc7ae
commit dc37d6d763
4 changed files with 6 additions and 2 deletions

View file

@ -57,6 +57,8 @@ void Configuration::Model::fill(const Configuration& cfg, std::unordered_map<std
emits_eos = parse_bool(value);
} else if (key == "no_translate") {
no_translate = parse_bool(value);
} else if (key == "no_instruct_prompt") {
no_instruct_prompt = parse_bool(value);
} else if (!ignore_extra) {
throw Exception("Error: Failed to parse model configuration file: Unknown key: "+key);
}

View file

@ -42,7 +42,8 @@ public:
user_prompt,
bot_prompt;
bool emits_eos = false,
no_translate = false;
no_translate = false,
no_instruct_prompt = false;
enum class InstructModePolicy {
Allow = 0b11,
Force = 0b10,

View file

@ -3,3 +3,4 @@ instruct_mode_policy force
user_prompt <|im_start|>user
bot_prompt <|im_end|>
emits_eos true
no_instruct_prompt true

View file

@ -260,7 +260,7 @@ private:
auto llm = LM::Inference::construct(model_config.weights_path, llm_get_params());
// Add initial context
std::string prompt;
if (config.instruct_prompt_file != "none") {
if (config.instruct_prompt_file != "none" && !model_config.no_instruct_prompt) {
// Read whole file
std::ifstream f(config.instruct_prompt_file);
if (!f) {