Merge pull request #6203 from oobabooga/dev

Merge dev branch
This commit is contained in:
oobabooga 2024-07-05 07:37:19 -03:00 committed by GitHub
commit e813b322cf
WARNING! Although there is a key with this ID in the database it does not verify this commit! This commit is SUSPICIOUS.
GPG key ID: B5690EEEBB952194
2 changed files with 7 additions and 2 deletions

View file

@ -577,7 +577,7 @@ def find_all_histories_with_first_prompts(state):
data = json.load(f)
first_prompt = ""
if 'visible' in data and len(data['visible']) > 0:
if data and 'visible' in data and len(data['visible']) > 0:
if data['internal'][0][0] == '<|BEGIN-VISIBLE-CHAT|>':
if len(data['visible']) > 1:
first_prompt = html.unescape(data['visible'][1][0])

View file

@ -100,9 +100,11 @@ def eval_with_progress(self, tokens: Sequence[int]):
def monkey_patch_llama_cpp_python(lib):
if getattr(lib.Llama, '_is_patched', False):
# If the patch is already applied, do nothing
return
def my_generate(self, *args, **kwargs):
if shared.args.streaming_llm:
new_sequence = args[0]
past_sequence = self._input_ids
@ -116,3 +118,6 @@ def monkey_patch_llama_cpp_python(lib):
lib.Llama.eval = eval_with_progress
lib.Llama.original_generate = lib.Llama.generate
lib.Llama.generate = my_generate
# Set the flag to indicate that the patch has been applied
lib.Llama._is_patched = True