From f0d6ead877b01d12fcbdf7028954d17fd094771d Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 18 Dec 2023 01:51:58 -0300 Subject: [PATCH] llama.cpp: read instruction template from GGUF metadata (#4975) --- modules/metadata_gguf.py | 5 +++-- modules/models_settings.py | 11 ++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/modules/metadata_gguf.py b/modules/metadata_gguf.py index 0ea41a2a..70ad41dc 100644 --- a/modules/metadata_gguf.py +++ b/modules/metadata_gguf.py @@ -82,8 +82,9 @@ def load_metadata(fname): if value_type == GGUFValueType.ARRAY: ltype = GGUFValueType(struct.unpack("