Browse Source
Rename model_name to deployment name.
Signed-off-by: wxywb <xy.wang@zilliz.com>
main
wxywb
2 years ago
2 changed files with
7 additions and
24 deletions
-
README.md
-
azure_openai_chat.py
|
|
@ -1,4 +1,4 @@ |
|
|
|
# OpenAI Chat Completion |
|
|
|
# Azure OpenAI Chat Completion |
|
|
|
|
|
|
|
*author: David Wang* |
|
|
|
|
|
|
@ -75,18 +75,13 @@ answer = p(question, docs, history).get()[0] |
|
|
|
|
|
|
|
Create the operator via the following factory method: |
|
|
|
|
|
|
|
***LLM.OpenAI(model_name: str, api_key: str)*** |
|
|
|
***LLM.OpenAI(deployment_name: str, api_key: str)*** |
|
|
|
|
|
|
|
**Parameters:** |
|
|
|
|
|
|
|
***model_name***: *str* |
|
|
|
***deployment_name***: *str* |
|
|
|
|
|
|
|
The model name in string, defaults to 'gpt-3.5-turbo'. Supported model names: |
|
|
|
- gpt-3.5-turbo |
|
|
|
- gpt-3.5-turbo-16k |
|
|
|
- gpt-3.5-turbo-instruct |
|
|
|
- gpt-3.5-turbo-0613 |
|
|
|
- gpt-3.5-turbo-16k-0613 |
|
|
|
Deployments provide endpoints to the Azure CpenAl base model, or your ine-tuned models, conioured with setings to meet your needs, |
|
|
|
|
|
|
|
***api_type***: *str='azure'* |
|
|
|
|
|
|
|
|
|
@ -21,7 +21,7 @@ from towhee.operator.base import PyOperator |
|
|
|
class AzureOpenaiChat(PyOperator): |
|
|
|
'''Wrapper of OpenAI Chat API''' |
|
|
|
def __init__(self, |
|
|
|
model_name: str = 'gpt-3.5-turbo', |
|
|
|
deployment_name: str = 'gpt-3.5-turbo', |
|
|
|
api_type: str = 'azure', |
|
|
|
api_version: str = '2023-07-01-preview', |
|
|
|
api_key: str = None, |
|
|
@ -34,14 +34,14 @@ class AzureOpenaiChat(PyOperator): |
|
|
|
self._api_type = api_type |
|
|
|
self._api_version = api_version |
|
|
|
|
|
|
|
self._model = model_name |
|
|
|
self._deployment = deployment_name |
|
|
|
self.stream = kwargs.pop('stream') if 'stream' in kwargs else False |
|
|
|
self.kwargs = kwargs |
|
|
|
|
|
|
|
def __call__(self, messages: List[dict]): |
|
|
|
messages = self.parse_inputs(messages) |
|
|
|
response = openai.ChatCompletion.create( |
|
|
|
engine=self._model, |
|
|
|
engine=self._deployment, |
|
|
|
messages=messages, |
|
|
|
n=1, |
|
|
|
stream=self.stream, |
|
|
@ -81,15 +81,3 @@ class AzureOpenaiChat(PyOperator): |
|
|
|
for resp in response: |
|
|
|
yield resp['choices'][0]['delta'] |
|
|
|
|
|
|
|
@staticmethod |
|
|
|
def supported_model_names(): |
|
|
|
model_list = [ |
|
|
|
'gpt-3.5-turbo', |
|
|
|
'gpt-3.5-turbo-16k', |
|
|
|
'gpt-3.5-turbo-instruct', |
|
|
|
'gpt-3.5-turbo-0613', |
|
|
|
'gpt-3.5-turbo-16k-0613' |
|
|
|
] |
|
|
|
model_list.sort() |
|
|
|
return model_list |
|
|
|
|
|
|
|