UI: make the markdown LRU cache infinite (for really long conversations)

This commit is contained in:
oobabooga 2024-07-24 11:54:26 -07:00
parent 3b2c23dfb5
commit 947016d010
2 changed files with 18 additions and 3 deletions

View file

@ -17,7 +17,11 @@ from PIL import Image
import modules.shared as shared
from modules import utils
from modules.extensions import apply_extensions
from modules.html_generator import chat_html_wrapper, make_thumbnail
from modules.html_generator import (
chat_html_wrapper,
convert_to_markdown,
make_thumbnail
)
from modules.logging_colors import logger
from modules.text_generation import (
generate_reply,
@ -368,7 +372,6 @@ def chatbot_wrapper(text, state, regenerate=False, _continue=False, loading_mess
def impersonate_wrapper(text, state):
static_output = chat_html_wrapper(state['history'], state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
prompt = generate_chat_prompt('', state, impersonate=True)
@ -1044,6 +1047,8 @@ def handle_unique_id_select(state):
history = load_history(state['unique_id'], state['character_menu'], state['mode'])
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
convert_to_markdown.cache_clear()
return [history, html]
@ -1052,6 +1057,8 @@ def handle_start_new_chat_click(state):
histories = find_all_histories_with_first_prompts(state)
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
convert_to_markdown.cache_clear()
return [history, html, gr.update(choices=histories, value=histories[0][1])]
@ -1061,6 +1068,8 @@ def handle_delete_chat_confirm_click(state):
history, unique_id = load_history_after_deletion(state, index)
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
convert_to_markdown.cache_clear()
return [
history,
html,
@ -1099,6 +1108,8 @@ def handle_upload_chat_history(load_chat_history, state):
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
convert_to_markdown.cache_clear()
return [
history,
html,
@ -1119,6 +1130,8 @@ def handle_character_menu_change(state):
histories = find_all_histories_with_first_prompts(state)
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
convert_to_markdown.cache_clear()
return [
history,
html,
@ -1136,6 +1149,8 @@ def handle_mode_change(state):
histories = find_all_histories_with_first_prompts(state)
html = redraw_html(history, state['name1'], state['name2'], state['mode'], state['chat_style'], state['character_menu'])
convert_to_markdown.cache_clear()
return [
history,
html,

View file

@ -69,7 +69,7 @@ def replace_blockquote(m):
return m.group().replace('\n', '\n> ').replace('\\begin{blockquote}', '').replace('\\end{blockquote}', '')
@functools.lru_cache(maxsize=4096)
@functools.lru_cache(maxsize=None)
def convert_to_markdown(string):
# Quote to <q></q>