From 76d3d7ddb37a920e9f0bc35f3b0289e301e7a063 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Fri, 10 Feb 2023 15:57:55 -0300 Subject: [PATCH] Reorder the imports here too --- convert-to-torch.py | 9 +++++---- modules/html_generator.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/convert-to-torch.py b/convert-to-torch.py index 8159b67f..3eeaec73 100644 --- a/convert-to-torch.py +++ b/convert-to-torch.py @@ -6,11 +6,12 @@ python convert-to-torch.py models/opt-1.3b The output will be written to torch-dumps/name-of-the-model.pt ''' - -from transformers import AutoModelForCausalLM -import torch -from sys import argv + from pathlib import Path +from sys import argv + +import torch +from transformers import AutoModelForCausalLM path = Path(argv[1]) model_name = path.name diff --git a/modules/html_generator.py b/modules/html_generator.py index 3ede366c..1bd34466 100644 --- a/modules/html_generator.py +++ b/modules/html_generator.py @@ -4,9 +4,9 @@ This is a library for formatting GPT-4chan and chat outputs as nice HTML. ''' +import copy import re from pathlib import Path -import copy def generate_basic_html(s): s = '\n'.join([f'

{line}

' for line in s.split('\n')])