Sort the imports

This commit is contained in:
oobabooga 2023-03-17 11:42:25 -03:00
parent 7d97287e69
commit a717fd709d
4 changed files with 5 additions and 4 deletions

View file

@ -7,6 +7,7 @@ import transformers
import modules.shared as shared import modules.shared as shared
# Copied from https://github.com/PygmalionAI/gradio-ui/ # Copied from https://github.com/PygmalionAI/gradio-ui/
class _SentinelTokenStoppingCriteria(transformers.StoppingCriteria): class _SentinelTokenStoppingCriteria(transformers.StoppingCriteria):

View file

@ -12,7 +12,8 @@ import modules.extensions as extensions_module
import modules.shared as shared import modules.shared as shared
from modules.extensions import apply_extensions from modules.extensions import apply_extensions
from modules.html_generator import generate_chat_html from modules.html_generator import generate_chat_html
from modules.text_generation import encode, generate_reply, get_max_prompt_length from modules.text_generation import (encode, generate_reply,
get_max_prompt_length)
# This gets the new line characters right. # This gets the new line characters right.

View file

@ -8,11 +8,10 @@ import numpy as np
import torch import torch
import transformers import transformers
from accelerate import infer_auto_device_map, init_empty_weights from accelerate import infer_auto_device_map, init_empty_weights
from peft import PeftModel
from transformers import (AutoConfig, AutoModelForCausalLM, AutoTokenizer, from transformers import (AutoConfig, AutoModelForCausalLM, AutoTokenizer,
BitsAndBytesConfig) BitsAndBytesConfig)
from peft import PeftModel
import modules.shared as shared import modules.shared as shared
transformers.logging.set_verbosity_error() transformers.logging.set_verbosity_error()

View file

@ -15,9 +15,9 @@ import modules.extensions as extensions_module
import modules.shared as shared import modules.shared as shared
import modules.ui as ui import modules.ui as ui
from modules.html_generator import generate_chat_html from modules.html_generator import generate_chat_html
from modules.LoRA import add_lora_to_model
from modules.models import load_model, load_soft_prompt from modules.models import load_model, load_soft_prompt
from modules.text_generation import generate_reply from modules.text_generation import generate_reply
from modules.LoRA import add_lora_to_model
# Loading custom settings # Loading custom settings
settings_file = None settings_file = None