-
Notifications
You must be signed in to change notification settings - Fork 2
/
eval.py
31 lines (22 loc) · 950 Bytes
/
eval.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import torch
from transformers import CLIPProcessor
from vq_clip import VQCLIPModel
from vq_clip.eval import zero_shot_eval
def evaluate(
imagenet_path: str = "",
pretrained_clip_url: str = "openai/clip-vit-large-patch14",
vq_vision_model_url: str = "adams-story/vq-ViT-L-14-k64-d32",
batch_size: int = 1024,
):
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
model = VQCLIPModel.from_pretrained_clip(pretrained_clip_url, vision_vq_adapter_path=vq_vision_model_url)
print("loaded vqclip")
processor = CLIPProcessor.from_pretrained(pretrained_clip_url)
model = model.to(device)
with torch.no_grad():
with torch.autocast(str(device)):
res = zero_shot_eval(model, processor, imagenet_path, batch_size = batch_size)
print(res)
if __name__ == "__main__":
import jsonargparse
jsonargparse.CLI(evaluate)