|
|
@ -107,20 +107,21 @@ class AutoTransformers(NNOperator): |
|
|
|
path, |
|
|
|
input_names=["input_ids", "token_type_ids", "attention_mask"], # list(inputs.keys()) |
|
|
|
output_names=["last_hidden_state"], |
|
|
|
opset_version=12, |
|
|
|
opset_version=13, |
|
|
|
dynamic_axes={ |
|
|
|
"input_ids": {0: "batch_size", 1: "input_length"}, |
|
|
|
"token_type_ids": {0: "batch_size", 1: "input_length"}, |
|
|
|
"attention_mask": {0: "batch_size", 1: "input_length"}, |
|
|
|
"last_hidden_state": {0: "batch_size"}, |
|
|
|
}) |
|
|
|
except Exception: |
|
|
|
except Exception as e: |
|
|
|
print(e, '\nTrying with 2 outputs...') |
|
|
|
torch.onnx.export(self.model, |
|
|
|
tuple(inputs.values()), |
|
|
|
path, |
|
|
|
input_names=["input_ids", "token_type_ids", "attention_mask"], # list(inputs.keys()) |
|
|
|
output_names=["last_hidden_state", "pooler_output"], |
|
|
|
opset_version=12, |
|
|
|
opset_version=13, |
|
|
|
dynamic_axes={ |
|
|
|
"input_ids": {0: "batch_size", 1: "input_length"}, |
|
|
|
"token_type_ids": {0: "batch_size", 1: "input_length"}, |
|
|
@ -321,16 +322,11 @@ class AutoTransformers(NNOperator): |
|
|
|
model_list = list(set(full_list) - set(to_remove)) |
|
|
|
elif format == 'onnx': |
|
|
|
to_remove = [ |
|
|
|
'albert-xlarge-v1', |
|
|
|
'albert-xlarge-v2', |
|
|
|
'albert-xxlarge-v1', |
|
|
|
'albert-xxlarge-v2', |
|
|
|
'allenai/led-base-16384', |
|
|
|
'ctrl', |
|
|
|
'distilgpt2', |
|
|
|
'EleutherAI/gpt-j-6B', |
|
|
|
'EleutherAI/gpt-neo-1.3B', |
|
|
|
'funnel-transformer/intermediate', |
|
|
|
'funnel-transformer/large', |
|
|
|
'funnel-transformer/medium', |
|
|
|
'funnel-transformer/small', |
|
|
@ -338,8 +334,6 @@ class AutoTransformers(NNOperator): |
|
|
|
'google/bigbird-pegasus-large-arxiv', |
|
|
|
'google/bigbird-pegasus-large-bigpatent', |
|
|
|
'google/bigbird-pegasus-large-pubmed', |
|
|
|
'google/canine-c', |
|
|
|
'google/canine-s', |
|
|
|
'google/fnet-base', |
|
|
|
'google/fnet-large', |
|
|
|
'google/reformer-crime-and-punishment', |
|
|
|