Load more than one LoRA with --lora, fix a bug

This commit is contained in:
oobabooga 2023-04-25 22:58:48 -03:00
parent 15940e762e
commit f39c99fa14
4 changed files with 4 additions and 4 deletions

View file

@ -190,7 +190,7 @@ Optionally, you can use the following command-line flags:
| `--chat` | Launch the web UI in chat mode. | | `--chat` | Launch the web UI in chat mode. |
| `--character CHARACTER` | The name of the character to load in chat mode by default. | | `--character CHARACTER` | The name of the character to load in chat mode by default. |
| `--model MODEL` | Name of the model to load by default. | | `--model MODEL` | Name of the model to load by default. |
| `--lora LORA` | Name of the LoRA to apply to the model by default. | | `--lora LORA [LORA ...]` | The list of LoRAs to load. If you want to load more than one LoRA, write the names separated by spaces. |
| `--model-dir MODEL_DIR` | Path to directory with all the models. | | `--model-dir MODEL_DIR` | Path to directory with all the models. |
| `--lora-dir LORA_DIR` | Path to directory with all the loras. | | `--lora-dir LORA_DIR` | Path to directory with all the loras. |
| `--model-menu` | Show a model menu in the terminal when the web UI is first launched. | | `--model-menu` | Show a model menu in the terminal when the web UI is first launched. |

View file

@ -7,10 +7,10 @@ import modules.shared as shared
def add_lora_to_model(lora_names): def add_lora_to_model(lora_names):
shared.lora_names = list(lora_names)
prior_set = set(shared.lora_names) prior_set = set(shared.lora_names)
added_set = set(lora_names) - prior_set added_set = set(lora_names) - prior_set
removed_set = prior_set - set(lora_names) removed_set = prior_set - set(lora_names)
shared.lora_names = list(lora_names)
# If no LoRA needs to be added or removed, exit # If no LoRA needs to be added or removed, exit
if len(added_set) == 0 and len(removed_set) == 0: if len(added_set) == 0 and len(removed_set) == 0:

View file

@ -96,7 +96,7 @@ parser.add_argument('--chat', action='store_true', help='Launch the web UI in ch
parser.add_argument('--cai-chat', action='store_true', help='DEPRECATED: use --chat instead.') parser.add_argument('--cai-chat', action='store_true', help='DEPRECATED: use --chat instead.')
parser.add_argument('--character', type=str, help='The name of the character to load in chat mode by default.') parser.add_argument('--character', type=str, help='The name of the character to load in chat mode by default.')
parser.add_argument('--model', type=str, help='Name of the model to load by default.') parser.add_argument('--model', type=str, help='Name of the model to load by default.')
parser.add_argument('--lora', type=str, help='Name of the LoRA to apply to the model by default.') parser.add_argument('--lora', type=str, nargs="+", help='The list of LoRAs to load. If you want to load more than one LoRA, write the names separated by spaces.')
parser.add_argument("--model-dir", type=str, default='models/', help="Path to directory with all the models") parser.add_argument("--model-dir", type=str, default='models/', help="Path to directory with all the models")
parser.add_argument("--lora-dir", type=str, default='loras/', help="Path to directory with all the loras") parser.add_argument("--lora-dir", type=str, default='loras/', help="Path to directory with all the loras")
parser.add_argument('--model-menu', action='store_true', help='Show a model menu in the terminal when the web UI is first launched.') parser.add_argument('--model-menu', action='store_true', help='Show a model menu in the terminal when the web UI is first launched.')

View file

@ -913,7 +913,7 @@ if __name__ == "__main__":
# Load the model # Load the model
shared.model, shared.tokenizer = load_model(shared.model_name) shared.model, shared.tokenizer = load_model(shared.model_name)
if shared.args.lora: if shared.args.lora:
add_lora_to_model([shared.args.lora]) add_lora_to_model(shared.args.lora)
# Force a character to be loaded # Force a character to be loaded
if shared.is_chat(): if shared.is_chat():