File size: 946 Bytes
857bc90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
 
from tqdm import tqdm
from PIL import Image
import torch
import os
import numpy as np
 
from transformers import CLIPProcessor, CLIPModel
 
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

model = CLIPModel.from_pretrained("openai/clip-vit-large-patch14").to(device)
processor = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14")
 
def calculate_clip_I(image1, image2):

    inputs1 = processor(images=image1, return_tensors="pt").to(device)
    inputs2 = processor(images=image2, return_tensors="pt").to(device)

    with torch.no_grad():
        image_features1 = model.get_image_features(**inputs1)
        image_features2 = model.get_image_features(**inputs2)

    image_features1 /= image_features1.norm(dim=-1, keepdim=True)
    image_features2 /= image_features2.norm(dim=-1, keepdim=True)

    similarity = torch.matmul(image_features1, image_features2.T).cpu().numpy()[0][0]
    
    return similarity