From 3e70bce576926b6c9e1a9b2fcefeab79749af1a1 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Thu, 3 Aug 2023 06:57:21 -0700 Subject: [PATCH] Properly format exceptions in the UI --- modules/training.py | 4 ++-- server.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/training.py b/modules/training.py index c98fded2..ef833679 100644 --- a/modules/training.py +++ b/modules/training.py @@ -483,7 +483,7 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch exc = traceback.format_exc() logger.error('Failed to reload the model.') print(exc) - return exc + return exc.replace('\n', '\n\n') # == Start prepping the model itself == if not hasattr(shared.model, 'lm_head') or hasattr(shared.model.lm_head, 'weight'): @@ -518,7 +518,7 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch state_dict_peft = torch.load(f"{lora_file_path}/adapter_model.bin") set_peft_model_state_dict(lora_model, state_dict_peft) except: - yield traceback.format_exc() + yield traceback.format_exc().replace('\n', '\n\n') return if shared.args.monkey_patch: diff --git a/server.py b/server.py index 679c9e93..601ae33f 100644 --- a/server.py +++ b/server.py @@ -75,7 +75,7 @@ def load_model_wrapper(selected_model, loader, autoload=False): exc = traceback.format_exc() logger.error('Failed to load the model.') print(exc) - yield exc + yield exc.replace('\n', '\n\n') def load_lora_wrapper(selected_loras): @@ -159,7 +159,7 @@ def download_model_wrapper(repo_id, progress=gr.Progress()): yield ("Done!") except: progress(1.0) - yield traceback.format_exc() + yield traceback.format_exc().replace('\n', '\n\n') def create_model_menus():