From 8667f5a0d9abbdaa88023d8baa26bf25a2d0e7b9 Mon Sep 17 00:00:00 2001 From: Jael Gu Date: Mon, 31 Jul 2023 10:23:47 +0800 Subject: [PATCH] Update llama-7b-chat Signed-off-by: Jael Gu --- README.md | 6 +++--- llama2.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 085a282..e93003d 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Use the default model to continue the conversation from given messages. ```python from towhee import ops -chat = ops.LLM.Llama_2('path/to/model_file.bin', max_tokens=2048) +chat = ops.LLM.Llama_2('llama-2-13b-chat', max_tokens=2048) message = [{"question": "Building a website can be done in 10 simple steps:"}] answer = chat(message) @@ -100,8 +100,8 @@ A dictionary of supported models with model name as key and huggingface hub id & { 'llama-2-7b-chat': { - 'hf_id': 'TheBloke/Llama-2-7B-GGML', - 'filename': 'llama-2-7b.ggmlv3.q4_0.bin' + 'hf_id': 'TheBloke/Llama-2-7B-Chat-GGML', + 'filename': 'llama-2-7b-chat.ggmlv3.q4_0.bin' }, 'llama-2-13b-chat': { 'hf_id': 'TheBloke/Llama-2-13B-chat-GGML', diff --git a/llama2.py b/llama2.py index 5a64e1e..f8b5fcc 100644 --- a/llama2.py +++ b/llama2.py @@ -77,8 +77,8 @@ class LlamaCpp(PyOperator): def supported_model_names(): models = { 'llama-2-7b-chat': { - 'hf_id': 'TheBloke/Llama-2-7B-GGML', - 'filename': 'llama-2-7b.ggmlv3.q4_0.bin' + 'hf_id': 'TheBloke/Llama-2-7B-Chat-GGML', + 'filename': 'llama-2-7b-chat.ggmlv3.q4_0.bin' }, 'llama-2-13b-chat': { 'hf_id': 'TheBloke/Llama-2-13B-chat-GGML',