From ff453170329ad9f5414aa1e396130e82472f0b1b Mon Sep 17 00:00:00 2001 From: "Xiaojian \"JJ\" Deng" Date: Wed, 5 Jul 2023 20:40:43 -0400 Subject: [PATCH] Update models.py (#3020) Hopefully fixed error with "ValueError: Tokenizer class GPTNeoXTokenizer does not exist or is not currently imported." --- modules/models.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/modules/models.py b/modules/models.py index 160ca9e3..26a4880d 100644 --- a/modules/models.py +++ b/modules/models.py @@ -95,11 +95,18 @@ def load_tokenizer(model_name, model): if any(s in model_name.lower() for s in ['gpt-4chan', 'gpt4chan']) and Path(f"{shared.args.model_dir}/gpt-j-6B/").exists(): tokenizer = AutoTokenizer.from_pretrained(Path(f"{shared.args.model_dir}/gpt-j-6B/")) elif path_to_model.exists(): - tokenizer = AutoTokenizer.from_pretrained( - path_to_model, - trust_remote_code=shared.args.trust_remote_code, - use_fast=False - ) + try: + tokenizer = AutoTokenizer.from_pretrained( + path_to_model, + trust_remote_code=shared.args.trust_remote_code, + use_fast=False + ) + except ValueError: + tokenizer = AutoTokenizer.from_pretrained( + path_to_model, + trust_remote_code=shared.args.trust_remote_code, + use_fast=True + ) if tokenizer.__class__.__name__ == 'LlamaTokenizer': pairs = [