#coding=utf-8 # Copyright 2023 Zilliz. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import List, Tuple, Dict, Optional import logging from towhee.operator import PyOperator logger = logging.getLogger() gpt_prompt = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. {context} Question: {question} Helpful Answer: """ dolly_prompt = """{question} Input: {context} """ ernie_prompt = """根据以下材料回答最末尾的问题: {context} 问题:{question} """ class QAPrompt(PyOperator): def __init__(self, temp: str = None, llm_name: str = None): super().__init__() if temp: self._template = temp else: if not llm_name: self._template = gpt_prompt elif llm_name.lower() == 'dolly': self._template = dolly_prompt elif llm_name.lower() == 'openai': self._template = gpt_prompt elif llm_name.lower() == 'ernie': self._template = ernie_prompt else: logger.warning('Unkown llm_name, use default prompt') self._template = gpt_prompt def __call__(self, question: str, context: str, history=Optional[List[Tuple]]) -> List[Dict[str, str]]: """ history: List[Tuple]: [(question1, answer1), (question2, answer2)] """ prompt_str = self._template.format(context=context, question=question) ret = [{'question': prompt_str}] if not isinstance(history, list): return ret else: history_data = [] for item in history: history_data.append({'question': item[0], 'answer': item[1]}) return history_data + ret