1
0
Fork 0
mirror of https://gitlab.com/niansa/discord_llama.git synced 2025-03-06 20:48:25 +01:00

Updated example configs

This commit is contained in:
niansa/tuxifan 2024-03-25 01:56:21 +01:00
parent 66999b6041
commit 72f4a63691
4 changed files with 0 additions and 10 deletions

View file

@ -3,13 +3,11 @@ token MTA0MDYxMTQzNjUwNzk1OTMyNw.Gl_iMU.jVVM3bRqBJVi8ORVpWHquOivlASGJpRySt8qFg
# The following parameters are set to their defaults here and can be ommited
models_dir models
texts_file none
language EN
threads_only true
random_response_chance 0
live_edit false
default_inference_model 13b-vanilla
translation_model none
prompt_file none
instruct_prompt_file none

View file

@ -3,4 +3,3 @@ instruct_mode_policy force
user_prompt USER:
bot_prompt ASSISTANT:
emits_eos true
no_translate true

View file

@ -3,4 +3,3 @@ instruct_mode_policy allow
user_prompt ### Instruction:
bot_prompt ### Response:
emits_eos true
no_translate true

View file

@ -8,9 +8,6 @@ models_dir models
# File containing status texts. For example, see example_texts.txt
texts_file none
# Language everything is translated to (will be disabled if set to "EN" anyways)
language EN
# Weather the bot should respond to pings outside threads. Disabling this may increase load by a LOT
threads_only true
@ -23,9 +20,6 @@ live_edit false
# Model to use outside threads
default_inference_model 13b-vanilla
# Model to be used for translation
translation_model none
# Few-shot prompt for non-instruct-mode. See example_prompt.txt
prompt_file none