text-generation-webui/settings-template.json

69 lines
2.3 KiB
JSON
Raw Normal View History

{
"autoload_model": true,
2023-03-17 15:41:12 +01:00
"max_new_tokens": 200,
"max_new_tokens_min": 1,
"max_new_tokens_max": 2000,
2023-03-31 17:22:07 +02:00
"seed": -1,
"character": "None",
"name1": "You",
"name2": "Assistant",
"context": "This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.",
2023-04-12 22:09:56 +02:00
"greeting": "",
2023-04-26 08:21:53 +02:00
"turn_template": "",
"custom_stopping_strings": "",
2023-03-18 14:55:57 +01:00
"stop_at_newline": false,
2023-04-10 21:53:07 +02:00
"add_bos_token": true,
2023-04-12 23:30:43 +02:00
"ban_eos_token": false,
"skip_special_tokens": true,
"truncation_length": 2048,
"truncation_length_min": 0,
2023-04-19 22:35:38 +02:00
"truncation_length_max": 8192,
"mode": "chat",
"chat_style": "cai-chat",
"instruction_template": "None",
2023-03-17 15:41:12 +01:00
"chat_prompt_size": 2048,
"chat_prompt_size_min": 0,
"chat_prompt_size_max": 2048,
"chat_generation_attempts": 1,
"chat_generation_attempts_min": 1,
"chat_generation_attempts_max": 5,
"default_extensions": [],
"chat_default_extensions": [
"gallery"
],
"presets": {
"default": "Default",
2023-04-24 01:32:22 +02:00
".*(alpaca|llama|llava)": "LLaMA-Precise",
2023-03-31 15:43:05 +02:00
".*pygmalion": "NovelAI-Storywriter",
".*RWKV": "Naive"
2023-03-17 15:41:12 +01:00
},
"prompts": {
"default": "QA",
".*(gpt4chan|gpt-4chan|4chan)": "GPT-4chan",
".*(oasst|stablelm-7b-sft-v7-epoch-3)": "Open Assistant",
".*(alpac|dolly)": "Alpaca",
"(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable).*vicuna": "Vicuna v0",
".*vicuna.*v0": "Vicuna v0",
".*vicuna.*(1.1|1_1)": "Vicuna v1.1",
".*stable.*vicuna": "StableVicuna",
".*guanaco": "Guanaco-Chat",
".*koala": "Koala",
".*stablelm-tuned": "StableLM",
".*wizardlm": "WizardLM",
".*galactica.*finetuned": "Galactica Finetuned",
".*galactica.*-v2": "Galactica v2",
"(?!.*finetuned)(?!.*-v2).*galactica": "Galactica",
".*baize": "Baize",
".*mpt-.*instruct": "Alpaca",
".*mpt-.*chat": "MPT-Chat",
"(?!.*-flan-)(?!.*-t5-).*lamini-": "Alpaca",
".*incite.*chat": "INCITE-Chat",
".*incite.*instruct": "INCITE-Instruct"
2023-03-17 15:41:12 +01:00
},
"lora_prompts": {
"default": "QA",
".*(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)": "Alpaca",
".*baize": "Baize"
2023-03-17 15:41:12 +01:00
}
}