logo
Browse Source

Fix for model without pad

Signed-off-by: Jael Gu <mengjia.gu@zilliz.com>
main
Jael Gu 2 years ago
parent
commit
0db0a18ad0
  1. 8
      auto_transformers.py

8
auto_transformers.py

@ -103,9 +103,8 @@ class AutoTransformers(NNOperator):
def __call__(self, txt: str, return_sentence_emb: bool = False) -> numpy.ndarray: def __call__(self, txt: str, return_sentence_emb: bool = False) -> numpy.ndarray:
try: try:
inputs = self.tokenizer(txt, padding=True, truncation=True, return_tensors="pt").to(self.device) inputs = self.tokenizer(txt, padding=True, truncation=True, return_tensors="pt").to(self.device)
except Exception as e:
log.error(f'Invalid input for the tokenizer: {self.model_name}')
raise e
except Exception:
inputs = self.tokenizer(dummy_input, truncation=True, return_tensors='pt').to(self.device)
try: try:
outs = self.model(**inputs) outs = self.model(**inputs)
except Exception as e: except Exception as e:
@ -144,7 +143,10 @@ class AutoTransformers(NNOperator):
raise AttributeError('Unsupported model_type.') raise AttributeError('Unsupported model_type.')
dummy_input = '[CLS]' dummy_input = '[CLS]'
try:
inputs = self.tokenizer(dummy_input, padding=True, truncation=True, return_tensors='pt') # a dictionary inputs = self.tokenizer(dummy_input, padding=True, truncation=True, return_tensors='pt') # a dictionary
except Exception:
inputs = self.tokenizer(dummy_input, truncation=True, return_tensors='pt')
if model_type == 'pytorch': if model_type == 'pytorch':
torch.save(self._model, output_file) torch.save(self._model, output_file)
elif model_type == 'torchscript': elif model_type == 'torchscript':

Loading…
Cancel
Save