From b4a6d24315a85d988ea50006a28d043b11d565d7 Mon Sep 17 00:00:00 2001 From: Jael Gu Date: Tue, 30 May 2023 17:00:39 +0800 Subject: [PATCH] Update README Signed-off-by: Jael Gu --- README.md | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 434f9e4..fc9c035 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# OpenAI Chat Completion +# Dolly Generation *author: Jael* @@ -22,16 +22,15 @@ Use the default model to continue the conversation from given messages. from towhee import pipe, ops p = ( - pipe.input('messages') - .map('messages', 'answer', ops.LLM.Dolly()) - .output('messages', 'answer') + pipe.input('question', 'docs', 'history') + .map(('question', 'docs', 'history'), 'prompt', ops.prompt.question_answer(llm_name='dolly')) + .map('prompt', 'answer', ops.LLM.Dolly()) + .output('answer') ) -messages=[ - {'question': 'Who won the world series in 2020?', 'answer': 'The Los Angeles Dodgers won the World Series in 2020.'}, - {'question': 'Where was it played?'} - ] -answer = p(messages) +history=[('Who won the world series in 2020?', 'The Los Angeles Dodgers won the World Series in 2020.')] +question = 'Where was it played?' +answer = p(question, [], history) ```