Browse Source
Disable use_cache
Signed-off-by: Jael Gu <mengjia.gu@zilliz.com>
main
2 changed files with
3 additions and
1 deletions
-
auto_transformers.py
-
test_onnx.py
|
|
@ -206,6 +206,8 @@ class AutoTransformers(NNOperator): |
|
|
|
dynamic_axes[k] = v |
|
|
|
for k, v in self.onnx_config['outputs'].items(): |
|
|
|
dynamic_axes[k] = v |
|
|
|
if hasattr(self._model.config, 'use_cache'): |
|
|
|
self._model.config.use_cache = False |
|
|
|
torch.onnx.export( |
|
|
|
self._model, |
|
|
|
tuple(inputs.values()), |
|
|
|
|
|
@ -21,7 +21,7 @@ t_logging.set_verbosity_error() |
|
|
|
# full_models = op.supported_model_names() |
|
|
|
# checked_models = AutoTransformers.supported_model_names(format='onnx') |
|
|
|
# models = [x for x in full_models if x not in checked_models] |
|
|
|
models = ['distilbert-base-cased', 'sentence-transformers/paraphrase-albert-small-v2'] |
|
|
|
models = ['distilbert-base-cased', 'paraphrase-albert-small-v2'] |
|
|
|
test_txt = 'hello, world.' |
|
|
|
atol = 1e-3 |
|
|
|
log_path = 'transformers_onnx.log' |
|
|
|