logo
Llama-2
repo-copy-icon

copied

Browse Source

Update llama-7b-chat

Signed-off-by: Jael Gu <mengjia.gu@zilliz.com>
main
Jael Gu 1 year ago
parent
commit
8667f5a0d9
  1. 6
      README.md
  2. 4
      llama2.py

6
README.md

@ -25,7 +25,7 @@ Use the default model to continue the conversation from given messages.
```python
from towhee import ops
chat = ops.LLM.Llama_2('path/to/model_file.bin', max_tokens=2048)
chat = ops.LLM.Llama_2('llama-2-13b-chat', max_tokens=2048)
message = [{"question": "Building a website can be done in 10 simple steps:"}]
answer = chat(message)
@ -100,8 +100,8 @@ A dictionary of supported models with model name as key and huggingface hub id &
{
'llama-2-7b-chat': {
'hf_id': 'TheBloke/Llama-2-7B-GGML',
'filename': 'llama-2-7b.ggmlv3.q4_0.bin'
'hf_id': 'TheBloke/Llama-2-7B-Chat-GGML',
'filename': 'llama-2-7b-chat.ggmlv3.q4_0.bin'
},
'llama-2-13b-chat': {
'hf_id': 'TheBloke/Llama-2-13B-chat-GGML',

4
llama2.py

@ -77,8 +77,8 @@ class LlamaCpp(PyOperator):
def supported_model_names():
models = {
'llama-2-7b-chat': {
'hf_id': 'TheBloke/Llama-2-7B-GGML',
'filename': 'llama-2-7b.ggmlv3.q4_0.bin'
'hf_id': 'TheBloke/Llama-2-7B-Chat-GGML',
'filename': 'llama-2-7b-chat.ggmlv3.q4_0.bin'
},
'llama-2-13b-chat': {
'hf_id': 'TheBloke/Llama-2-13B-chat-GGML',

Loading…
Cancel
Save