1
0
Fork 0
mirror of https://gitlab.com/niansa/libjustchat.git synced 2025-03-06 20:48:31 +01:00

Added generate() with custom prompt config

This commit is contained in:
niansa 2023-06-10 17:06:44 +02:00
parent 1cac1b2a75
commit d0ef967a20
2 changed files with 6 additions and 3 deletions

View file

@ -78,12 +78,14 @@ LM_SCHEDULABLE(bool) Inference::append(const std::string &message, const Evaluat
LM_CORETURN LM_COAWAIT append(config.prompt, message, on_evaluate);
}
LM_SCHEDULABLE(std::string) LM::Chat::Inference::generate(const GenerateCallback &on_generate) {
// Run inference
LM_CORETURN LM_COAWAIT inference->run(config.prompt.prefix, [on_generate] (const char *token) {
LM_SCHEDULABLE(std::string) LM::Chat::Inference::generate(const ModelConfig::Prompt &promptConfig, const GenerateCallback &on_generate) {
LM_CORETURN LM_COAWAIT inference->run(promptConfig.prefix, [on_generate] (const char *token) {
return on_generate(token);
});
}
LM_SCHEDULABLE(std::string) LM::Chat::Inference::generate(const GenerateCallback &on_generate) {
LM_CORETURN LM_COAWAIT generate(config.prompt, on_generate);
}
LM_SCHEDULABLE(std::string) Inference::prompt(const ModelConfig::Prompt &promptConfig, const std::string &message, const GenerateCallback &on_generate, const EvaluateCallback &on_evaluate) {
if (!LM_COAWAIT append(promptConfig, message, on_evaluate)) LM_CORETURN "";

View file

@ -52,6 +52,7 @@ public:
LM_SCHEDULABLE(bool) reset();
LM_SCHEDULABLE(bool) append(const ModelConfig::Prompt& promptConfig, const std::string& message, const EvaluateCallback& on_evaluate = nullptr);
LM_SCHEDULABLE(bool) append(const std::string& message, const EvaluateCallback& on_evaluate = nullptr);
LM_SCHEDULABLE(std::string) generate(const ModelConfig::Prompt& promptConfig, const GenerateCallback& on_generate = nullptr);
LM_SCHEDULABLE(std::string) generate(const GenerateCallback& on_generate = nullptr);
LM_SCHEDULABLE(std::string) prompt(const std::string& message, const GenerateCallback& on_generate = nullptr, const EvaluateCallback& on_evaluate = nullptr);
LM_SCHEDULABLE(std::string) prompt(const ModelConfig::Prompt& promptConfig, const std::string& message, const GenerateCallback& on_generate = nullptr, const EvaluateCallback& on_evaluate = nullptr);