diff --git a/README.md b/README.md index 52a7025..2896ef8 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,38 @@ question = 'Where was it played?' answer = p(question, [], history).get()[0] ``` +*Write a [retrieval-augmented generation pipeline](https://towhee.io/tasks/detail/pipeline/retrieval-augmented-generation) with explicit inputs/outputs name specifications:* + +```python +from towhee import pipe, ops + + +temp = '''{question} + +Input: +{context} +''' + + +docs = ['You can install Towhee via the command `pip install towhee`.'] +history = [ + ('What is Towhee?', 'Towhee is an open-source machine learning project that helps you encode your unstructured data into embeddings.') +] +question = 'How to install it?' + +p = ( + pipe.input('question', 'docs', 'history') + .map(('question', 'docs', 'history'), + 'prompt', + ops.prompt.template(temp, ['question', 'context'])) + .map('prompt', 'answer', + ops.LLM.Dolly()) + .output('answer') +) + +answer = p(question, docs, history).get()[0] +``` +
## Factory Constructor diff --git a/hf_dolly.py b/hf_dolly.py index 70b04a7..1351bd5 100644 --- a/hf_dolly.py +++ b/hf_dolly.py @@ -21,7 +21,7 @@ from towhee.operator.base import PyOperator, SharedType class HuggingfaceDolly(PyOperator): - '''Wrapper of OpenAI Chat API''' + '''Wrapper of Dolly inference''' def __init__(self, model_name: str = 'databricks/dolly-v2-12b', **kwargs diff --git a/requirements.txt b/requirements.txt index f3592a7..a2d545a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ transformers[torch]>=4.28.1,<5 torch>=1.13.1,<2 +accelerate