text-generation-webui/settings-template.json
oobabooga 185587a33e Add a history size parameter to the chat
If too many messages are used in the prompt, the model
gets really slow. It is useful to have the ability to
limit this.
2023-01-20 17:03:09 -03:00

20 lines
725 B
JSON

{
"max_new_tokens": 200,
"max_new_tokens_min": 1,
"max_new_tokens_max": 2000,
"preset": "NovelAI-Sphinx Moth",
"name1": "Person 1",
"name2": "Person 2",
"context": "This is a conversation between two people.",
"prompt": "Common sense questions and answers\n\nQuestion: \nFactual answer:",
"prompt_gpt4chan": "-----\n--- 865467536\nInput text\n--- 865467537\n",
"stop_at_newline": true,
"history_size": 8,
"history_size_min": 0,
"history_size_max": 64,
"preset_pygmalion": "Pygmalion",
"name1_pygmalion": "You",
"name2_pygmalion": "Kawaii",
"context_pygmalion": "This is a conversation between two people.\n<START>",
"stop_at_newline_pygmalion": false
}