|
|
|
from typing import List, Tuple, Dict, Optional
|
|
|
|
import logging
|
|
|
|
|
|
|
|
from towhee.operator import PyOperator
|
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
|
|
gpt_prompt = """Use the following pieces of context to answer the question at the end.
|
|
|
|
If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
|
|
|
|
|
|
|
{context}
|
|
|
|
|
|
|
|
Question: {question}
|
|
|
|
|
|
|
|
Helpful Answer:
|
|
|
|
"""
|
|
|
|
|
|
|
|
dolly_prompt = """{question}
|
|
|
|
|
|
|
|
Input:
|
|
|
|
{context}
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
class QAPrompt(PyOperator):
|
|
|
|
def __init__(self, temp: str = None, llm_name: str = None):
|
|
|
|
super().__init__()
|
|
|
|
if temp:
|
|
|
|
self._template = temp
|
|
|
|
else:
|
|
|
|
if not llm_name:
|
|
|
|
self._template = gpt_prompt
|
|
|
|
elif llm_name.lower() == 'dolly':
|
|
|
|
self._template = dolly_prompt
|
|
|
|
elif llm_name.lower() == 'openai':
|
|
|
|
self._template = gpt_prompt
|
|
|
|
else:
|
|
|
|
logger.warning('Unkown llm_name, use default prompt')
|
|
|
|
self._template = gpt_prompt
|
|
|
|
|
|
|
|
def __call__(self, question: str, docs: List[str], history=Optional[List[Tuple]]) -> List[Dict[str, str]]:
|
|
|
|
"""
|
|
|
|
history:
|
|
|
|
List[Tuple]: [(question1, answer1), (question2, answer2)]
|
|
|
|
"""
|
|
|
|
context = '\n'.join(docs)
|
|
|
|
prompt_str = self._template.format(context=context, question=question)
|
|
|
|
ret = [{'question': prompt_str}]
|
|
|
|
if not isinstance(history, list):
|
|
|
|
return ret
|
|
|
|
else:
|
|
|
|
history_data = []
|
|
|
|
for item in history:
|
|
|
|
history_data.append({'question': item[0], 'answer': item[1]})
|
|
|
|
return history_data + ret
|