From b4662bf4afec95c9fda7b8a0447543ea836460f7 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 29 May 2023 16:12:54 -0300 Subject: [PATCH] Download gptq_model*.py using download-model.py --- download-model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/download-model.py b/download-model.py index 44b87a8c..7a027fe7 100644 --- a/download-model.py +++ b/download-model.py @@ -111,7 +111,7 @@ def get_download_links_from_huggingface(model, branch, text_only=False): if not is_lora and fname.endswith(('adapter_config.json', 'adapter_model.bin')): is_lora = True - is_pytorch = re.match("(pytorch|adapter)_model.*\.bin", fname) + is_pytorch = re.match("(pytorch|adapter|gptq)_model.*\.bin", fname) is_safetensors = re.match(".*\.safetensors", fname) is_pt = re.match(".*\.pt", fname) is_ggml = re.match(".*ggml.*\.bin", fname)