Browse Source
fix for gpu inference
Signed-off-by: wxywb <xy.wang@zilliz.com>
v2
wxywb
3 years ago
2 changed files with
3 additions and
2 deletions
-
blip.py
-
requirements.txt
|
|
@ -39,6 +39,8 @@ class Blip(NNOperator): |
|
|
|
|
|
|
|
self._modality = modality |
|
|
|
self.device = "cuda" if torch.cuda.is_available() else "cpu" |
|
|
|
self.model.to(self.device) |
|
|
|
self.model.eval() |
|
|
|
|
|
|
|
self.tfms = transforms.Compose([ |
|
|
|
transforms.Resize((image_size,image_size),interpolation=InterpolationMode.BICUBIC), |
|
|
@ -47,7 +49,6 @@ class Blip(NNOperator): |
|
|
|
]) |
|
|
|
|
|
|
|
def __call__(self, data): |
|
|
|
print('call') |
|
|
|
if self._modality == 'image': |
|
|
|
vec = self._inference_from_image(data) |
|
|
|
elif self._modality == 'text': |
|
|
|
|
|
@ -3,4 +3,4 @@ torchvision>=0.10.0 |
|
|
|
Pillow |
|
|
|
towhee |
|
|
|
timm |
|
|
|
transformers |
|
|
|
transformers>=4.15.0 |
|
|
|