text-generation-webui/modules/chat.py

383 lines
18 KiB
Python
Raw Normal View History

2023-02-23 17:28:30 +01:00
import base64
import copy
import io
import json
import re
from datetime import datetime
from pathlib import Path
2023-02-23 18:41:42 +01:00
from PIL import Image
import modules.extensions as extensions_module
2023-02-25 13:23:02 +01:00
import modules.shared as shared
from modules.extensions import apply_extensions
2023-03-17 20:06:11 +01:00
from modules.html_generator import fix_newlines, generate_chat_html
2023-03-17 15:42:25 +01:00
from modules.text_generation import (encode, generate_reply,
get_max_prompt_length)
2023-02-23 17:03:52 +01:00
2023-02-25 13:23:02 +01:00
def generate_chat_output(history, name1, name2, character):
if shared.args.cai_chat:
return generate_chat_html(history, name1, name2, character)
else:
return history
def generate_chat_prompt(user_input, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=False):
2023-03-17 20:06:11 +01:00
user_input = fix_newlines(user_input)
rows = [f"{context.strip()}\n"]
if shared.soft_prompt:
2023-02-24 12:31:30 +01:00
chat_prompt_size -= shared.soft_prompt_tensor.shape[1]
max_length = min(get_max_prompt_length(max_new_tokens), chat_prompt_size)
2023-02-24 12:31:30 +01:00
i = len(shared.history['internal'])-1
while i >= 0 and len(encode(''.join(rows), max_new_tokens)[0]) < max_length:
2023-02-23 19:11:18 +01:00
rows.insert(1, f"{name2}: {shared.history['internal'][i][1].strip()}\n")
if not (shared.history['internal'][i][0] == '<|BEGIN-VISIBLE-CHAT|>'):
rows.insert(1, f"{name1}: {shared.history['internal'][i][0].strip()}\n")
i -= 1
if not impersonate:
2023-02-24 12:31:30 +01:00
rows.append(f"{name1}: {user_input}\n")
rows.append(apply_extensions(f"{name2}:", "bot_prefix"))
limit = 3
else:
rows.append(f"{name1}:")
limit = 2
while len(rows) > limit and len(encode(''.join(rows), max_new_tokens)[0]) >= max_length:
rows.pop(1)
2023-02-24 12:31:30 +01:00
prompt = ''.join(rows)
return prompt
def extract_message_from_reply(reply, name1, name2, check):
next_character_found = False
if check:
2023-03-05 20:08:50 +01:00
lines = reply.split('\n')
reply = lines[0].strip()
if len(lines) > 1:
next_character_found = True
else:
for string in [f"\n{name1}:", f"\n{name2}:"]:
idx = reply.find(string)
if idx != -1:
reply = reply[:idx]
next_character_found = True
# If something like "\nYo" is generated just before "\nYou:"
# is completed, trim it
if not next_character_found:
for string in [f"\n{name1}:", f"\n{name2}:"]:
for j in range(len(string)-1, 0, -1):
if reply[-j:] == string[:j]:
reply = reply[:-j]
break
reply = fix_newlines(reply)
return reply, next_character_found
def stop_everything_event():
2023-02-23 19:26:41 +01:00
shared.stop_everything = True
2023-03-22 19:40:20 +01:00
def chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1, regenerate=False):
2023-02-23 19:26:41 +01:00
shared.stop_everything = False
just_started = True
eos_token = '\n' if check else None
2023-03-05 20:08:50 +01:00
name1_original = name1
if 'pygmalion' in shared.model_name.lower():
name1 = "You"
2023-02-25 05:08:17 +01:00
# Check if any extension wants to hijack this function call
visible_text = None
custom_generate_chat_prompt = None
2023-02-25 05:08:17 +01:00
for extension, _ in extensions_module.iterator():
if hasattr(extension, 'input_hijack') and extension.input_hijack['state'] == True:
2023-02-25 06:36:01 +01:00
extension.input_hijack['state'] = False
2023-02-25 05:08:17 +01:00
text, visible_text = extension.input_hijack['value']
if custom_generate_chat_prompt is None and hasattr(extension, 'custom_generate_chat_prompt'):
custom_generate_chat_prompt = extension.custom_generate_chat_prompt
2023-02-25 05:08:17 +01:00
if visible_text is None:
visible_text = text
if shared.args.chat:
visible_text = visible_text.replace('\n', '<br>')
text = apply_extensions(text, "input")
if custom_generate_chat_prompt is None:
prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size)
2023-02-25 05:08:17 +01:00
else:
prompt = custom_generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size)
2023-02-24 12:31:30 +01:00
# Yield *Is typing...*
2023-03-12 07:42:29 +01:00
if not regenerate:
yield shared.history['visible']+[[visible_text, shared.processing_message]]
2023-02-24 12:31:30 +01:00
# Generate
2023-02-25 13:30:59 +01:00
reply = ''
2023-02-25 05:42:19 +01:00
for i in range(chat_generation_attempts):
for reply in generate_reply(f"{prompt}{' ' if len(reply) > 0 else ''}{reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
2023-02-25 05:31:01 +01:00
# Extracting the reply
reply, next_character_found = extract_message_from_reply(reply, name1, name2, check)
2023-03-05 21:01:47 +01:00
visible_reply = re.sub("(<USER>|<user>|{{user}})", name1_original, reply)
visible_reply = apply_extensions(visible_reply, "output")
2023-02-25 05:31:01 +01:00
if shared.args.chat:
visible_reply = visible_reply.replace('\n', '<br>')
# We need this global variable to handle the Stop event,
# otherwise gradio gets confused
if shared.stop_everything:
return shared.history['visible']
if just_started:
just_started = False
shared.history['internal'].append(['', ''])
shared.history['visible'].append(['', ''])
shared.history['internal'][-1] = [text, reply]
shared.history['visible'][-1] = [visible_text, visible_reply]
if not shared.args.no_stream:
2023-02-25 05:31:01 +01:00
yield shared.history['visible']
if next_character_found:
break
2023-03-05 20:08:50 +01:00
yield shared.history['visible']
2023-03-22 19:40:20 +01:00
def impersonate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1):
2023-02-24 12:44:54 +01:00
eos_token = '\n' if check else None
if 'pygmalion' in shared.model_name.lower():
name1 = "You"
prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=True)
2023-02-24 12:44:54 +01:00
2023-02-25 13:30:59 +01:00
reply = ''
# Yield *Is typing...*
yield shared.processing_message
2023-02-25 05:55:32 +01:00
for i in range(chat_generation_attempts):
for reply in generate_reply(prompt+reply, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
reply, next_character_found = extract_message_from_reply(reply, name1, name2, check)
yield reply
2023-02-25 05:42:19 +01:00
if next_character_found:
break
yield reply
2023-03-22 19:40:20 +01:00
def cai_chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1):
for _history in chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts):
2023-02-23 19:11:18 +01:00
yield generate_chat_html(_history, name1, name2, shared.character)
2023-03-22 19:40:20 +01:00
def regenerate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1):
if (shared.character != 'None' and len(shared.history['visible']) == 1) or len(shared.history['internal']) == 0:
yield generate_chat_output(shared.history['visible'], name1, name2, shared.character)
else:
2023-02-23 19:11:18 +01:00
last_visible = shared.history['visible'].pop()
last_internal = shared.history['internal'].pop()
# Yield '*Is typing...*'
yield generate_chat_output(shared.history['visible']+[[last_visible[0], shared.processing_message]], name1, name2, shared.character)
2023-03-22 19:40:20 +01:00
for _history in chatbot_wrapper(last_internal[0], max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts, regenerate=True):
if shared.args.cai_chat:
2023-02-23 19:11:18 +01:00
shared.history['visible'][-1] = [last_visible[0], _history[-1][1]]
else:
2023-02-23 19:11:18 +01:00
shared.history['visible'][-1] = (last_visible[0], _history[-1][1])
yield generate_chat_output(shared.history['visible'], name1, name2, shared.character)
def remove_last_message(name1, name2):
if len(shared.history['visible']) > 0 and not shared.history['internal'][-1][0] == '<|BEGIN-VISIBLE-CHAT|>':
2023-02-23 19:11:18 +01:00
last = shared.history['visible'].pop()
shared.history['internal'].pop()
else:
last = ['', '']
if shared.args.cai_chat:
2023-02-23 19:11:18 +01:00
return generate_chat_html(shared.history['visible'], name1, name2, shared.character), last[0]
else:
2023-02-23 19:11:18 +01:00
return shared.history['visible'], last[0]
def send_last_reply_to_input():
2023-02-23 19:11:18 +01:00
if len(shared.history['internal']) > 0:
return shared.history['internal'][-1][1]
else:
return ''
def replace_last_reply(text, name1, name2):
2023-02-23 19:11:18 +01:00
if len(shared.history['visible']) > 0:
if shared.args.cai_chat:
2023-02-23 19:11:18 +01:00
shared.history['visible'][-1][1] = text
else:
2023-02-23 19:11:18 +01:00
shared.history['visible'][-1] = (shared.history['visible'][-1][0], text)
shared.history['internal'][-1][1] = apply_extensions(text, "input")
return generate_chat_output(shared.history['visible'], name1, name2, shared.character)
def clear_html():
2023-02-23 19:11:18 +01:00
return generate_chat_html([], "", "", shared.character)
def clear_chat_log(name1, name2):
if shared.character != 'None':
2023-02-27 03:58:04 +01:00
found = False
2023-02-23 19:11:18 +01:00
for i in range(len(shared.history['internal'])):
if '<|BEGIN-VISIBLE-CHAT|>' in shared.history['internal'][i][0]:
2023-02-23 19:26:41 +01:00
shared.history['visible'] = [['', apply_extensions(shared.history['internal'][i][1], "output")]]
shared.history['internal'] = [shared.history['internal'][i]]
2023-02-27 03:58:04 +01:00
found = True
break
2023-02-27 03:58:04 +01:00
if not found:
shared.history['visible'] = []
shared.history['internal'] = []
else:
2023-02-23 19:11:18 +01:00
shared.history['internal'] = []
shared.history['visible'] = []
return generate_chat_output(shared.history['visible'], name1, name2, shared.character)
def redraw_html(name1, name2):
2023-02-23 19:11:18 +01:00
return generate_chat_html(shared.history['visible'], name1, name2, shared.character)
def tokenize_dialogue(dialogue, name1, name2):
_history = []
dialogue = re.sub('<START>', '', dialogue)
dialogue = re.sub('<start>', '', dialogue)
dialogue = re.sub('(\n|^)[Aa]non:', '\\1You:', dialogue)
dialogue = re.sub('(\n|^)\[CHARACTER\]:', f'\\g<1>{name2}:', dialogue)
idx = [m.start() for m in re.finditer(f"(^|\n)({re.escape(name1)}|{re.escape(name2)}):", dialogue)]
if len(idx) == 0:
return _history
messages = []
for i in range(len(idx)-1):
messages.append(dialogue[idx[i]:idx[i+1]].strip())
messages.append(dialogue[idx[-1]:].strip())
entry = ['', '']
for i in messages:
if i.startswith(f'{name1}:'):
entry[0] = i[len(f'{name1}:'):].strip()
elif i.startswith(f'{name2}:'):
entry[1] = i[len(f'{name2}:'):].strip()
if not (len(entry[0]) == 0 and len(entry[1]) == 0):
_history.append(entry)
entry = ['', '']
print("\033[1;32;1m\nDialogue tokenized to:\033[0;37;0m\n", end='')
for row in _history:
for column in row:
print("\n")
for line in column.strip().split('\n'):
print("| "+line+"\n")
print("|\n")
print("------------------------------")
return _history
def save_history(timestamp=True):
2023-02-23 20:04:47 +01:00
prefix = '' if shared.character == 'None' else f"{shared.character}_"
if timestamp:
2023-02-23 20:04:47 +01:00
fname = f"{prefix}{datetime.now().strftime('%Y%m%d-%H%M%S')}.json"
else:
2023-02-23 20:04:47 +01:00
fname = f"{prefix}persistent.json"
if not Path('logs').exists():
Path('logs').mkdir()
2023-03-12 06:40:01 +01:00
with open(Path(f'logs/{fname}'), 'w', encoding='utf-8') as f:
2023-02-23 19:11:18 +01:00
f.write(json.dumps({'data': shared.history['internal'], 'data_visible': shared.history['visible']}, indent=2))
return Path(f'logs/{fname}')
def load_history(file, name1, name2):
file = file.decode('utf-8')
try:
j = json.loads(file)
if 'data' in j:
2023-02-23 19:11:18 +01:00
shared.history['internal'] = j['data']
if 'data_visible' in j:
2023-02-23 19:11:18 +01:00
shared.history['visible'] = j['data_visible']
else:
2023-02-23 19:11:18 +01:00
shared.history['visible'] = copy.deepcopy(shared.history['internal'])
# Compatibility with Pygmalion AI's official web UI
elif 'chat' in j:
2023-02-23 19:11:18 +01:00
shared.history['internal'] = [':'.join(x.split(':')[1:]).strip() for x in j['chat']]
if len(j['chat']) > 0 and j['chat'][0].startswith(f'{name2}:'):
2023-02-23 19:11:18 +01:00
shared.history['internal'] = [['<|BEGIN-VISIBLE-CHAT|>', shared.history['internal'][0]]] + [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(1, len(shared.history['internal'])-1, 2)]
shared.history['visible'] = copy.deepcopy(shared.history['internal'])
shared.history['visible'][0][0] = ''
else:
2023-02-23 19:11:18 +01:00
shared.history['internal'] = [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(0, len(shared.history['internal'])-1, 2)]
shared.history['visible'] = copy.deepcopy(shared.history['internal'])
except:
2023-02-23 19:11:18 +01:00
shared.history['internal'] = tokenize_dialogue(file, name1, name2)
shared.history['visible'] = copy.deepcopy(shared.history['internal'])
def load_default_history(name1, name2):
if Path('logs/persistent.json').exists():
load_history(open(Path('logs/persistent.json'), 'rb').read(), name1, name2)
else:
shared.history['internal'] = []
shared.history['visible'] = []
def load_character(_character, name1, name2):
context = ""
2023-02-23 19:11:18 +01:00
shared.history['internal'] = []
shared.history['visible'] = []
if _character != 'None':
2023-02-23 19:11:18 +01:00
shared.character = _character
2023-03-12 06:21:30 +01:00
data = json.loads(open(Path(f'characters/{_character}.json'), 'r', encoding='utf-8').read())
name2 = data['char_name']
if 'char_persona' in data and data['char_persona'] != '':
context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
if 'world_scenario' in data and data['world_scenario'] != '':
context += f"Scenario: {data['world_scenario']}\n"
context = f"{context.strip()}\n<START>\n"
if 'example_dialogue' in data and data['example_dialogue'] != '':
data['example_dialogue'] = data['example_dialogue'].replace('{{user}}', name1).replace('{{char}}', name2)
2023-02-28 05:05:43 +01:00
data['example_dialogue'] = data['example_dialogue'].replace('<USER>', name1).replace('<BOT>', name2)
context += f"{data['example_dialogue'].strip()}\n"
if 'char_greeting' in data and len(data['char_greeting'].strip()) > 0:
2023-02-23 19:11:18 +01:00
shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', data['char_greeting']]]
shared.history['visible'] += [['', apply_extensions(data['char_greeting'], "output")]]
else:
2023-02-23 19:11:18 +01:00
shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', "Hello there!"]]
shared.history['visible'] += [['', "Hello there!"]]
else:
2023-02-23 19:11:18 +01:00
shared.character = None
2023-02-23 17:28:30 +01:00
context = shared.settings['context_pygmalion']
name2 = shared.settings['name2_pygmalion']
2023-02-23 19:11:18 +01:00
if Path(f'logs/{shared.character}_persistent.json').exists():
load_history(open(Path(f'logs/{shared.character}_persistent.json'), 'rb').read(), name1, name2)
if shared.args.cai_chat:
2023-02-23 19:11:18 +01:00
return name2, context, generate_chat_html(shared.history['visible'], name1, name2, shared.character)
else:
2023-02-23 19:11:18 +01:00
return name2, context, shared.history['visible']
def upload_character(json_file, img, tavern=False):
json_file = json_file if type(json_file) == str else json_file.decode('utf-8')
data = json.loads(json_file)
outfile_name = data["char_name"]
i = 1
while Path(f'characters/{outfile_name}.json').exists():
outfile_name = f'{data["char_name"]}_{i:03d}'
i += 1
if tavern:
outfile_name = f'TavernAI-{outfile_name}'
2023-03-12 06:21:30 +01:00
with open(Path(f'characters/{outfile_name}.json'), 'w', encoding='utf-8') as f:
f.write(json_file)
if img is not None:
img = Image.open(io.BytesIO(img))
img.save(Path(f'characters/{outfile_name}.png'))
print(f'New character saved to "characters/{outfile_name}.json".')
return outfile_name
def upload_tavern_character(img, name1, name2):
_img = Image.open(io.BytesIO(img))
_img.getexif()
decoded_string = base64.b64decode(_img.info['chara'])
_json = json.loads(decoded_string)
_json = {"char_name": _json['name'], "char_persona": _json['description'], "char_greeting": _json["first_mes"], "example_dialogue": _json['mes_example'], "world_scenario": _json['scenario']}
return upload_character(json.dumps(_json), img, tavern=True)
def upload_your_profile_picture(img):
img = Image.open(io.BytesIO(img))
img.save(Path('img_me.png'))
print('Profile picture saved to "img_me.png"')