From a36fa7307100744a5ee1b2a8b66be186d47db249 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Wed, 12 Jun 2024 19:00:21 -0700 Subject: [PATCH] Lint --- modules/models.py | 4 +--- modules/sampler_hijack.py | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/modules/models.py b/modules/models.py index b1d0b1cb..cb32a3da 100644 --- a/modules/models.py +++ b/modules/models.py @@ -1,5 +1,4 @@ import gc -import logging import os import pprint import re @@ -29,7 +28,6 @@ import modules.shared as shared from modules import RoPE, sampler_hijack from modules.logging_colors import logger from modules.models_settings import get_model_metadata -from modules.relative_imports import RelativeImport transformers.logging.set_verbosity_error() @@ -267,7 +265,7 @@ def llamacpp_loader(model_name): if path.is_file(): model_file = path else: - model_file = sorted(Path(f'{shared.args.model_dir}/{model_name}').glob('*.gguf'))[0] + model_file = sorted(Path(f'{shared.args.model_dir}/{model_name}').glob('*.gguf'))[0] logger.info(f"llama.cpp weights detected: \"{model_file}\"") model, tokenizer = LlamaCppModel.from_pretrained(model_file) diff --git a/modules/sampler_hijack.py b/modules/sampler_hijack.py index 55d1997b..bf8ecf3a 100644 --- a/modules/sampler_hijack.py +++ b/modules/sampler_hijack.py @@ -218,7 +218,7 @@ class DRYLogitsProcessor(LogitsProcessor): match_lengths = {} for i in match_indices: - next_token = input_ids_row[i+1].item() + next_token = input_ids_row[i + 1].item() if next_token in self.sequence_breakers: continue @@ -234,7 +234,7 @@ class DRYLogitsProcessor(LogitsProcessor): # Start of input reached. break - previous_token = input_ids_row[-(match_length+1)].item() + previous_token = input_ids_row[-(match_length + 1)].item() if input_ids_row[j] != previous_token: # Start of match reached. break