Set use_cache=True by default for all models

This commit is contained in:
oobabooga 2023-08-30 13:26:27 -07:00
parent 5190e153ed
commit 47e490c7b4

View file

@ -265,9 +265,7 @@ def generate_reply_HF(question, original_question, seed, state, stopping_strings
if state['ban_eos_token']: if state['ban_eos_token']:
generate_params['suppress_tokens'] = [shared.tokenizer.eos_token_id] generate_params['suppress_tokens'] = [shared.tokenizer.eos_token_id]
if shared.args.no_cache: generate_params.update({'use_cache': not shared.args.no_cache})
generate_params.update({'use_cache': False})
if shared.args.deepspeed: if shared.args.deepspeed:
generate_params.update({'synced_gpus': True}) generate_params.update({'synced_gpus': True})