We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
import lpips import os from tqdm import tqdm from PIL import Image import numpy as np import torch
loss_fn_alex = lpips.LPIPS(net='alex').cuda()
dataPath = r'D:\PycharmProjects\paper_instrument\ImageQualityAssessment\hdrnet' groundTruthPath = r'D:\PycharmProjects\paper_instrument\ImageQualityAssessment\groundtruth' assert len(os.listdir(dataPath)) == len(os.listdir(groundTruthPath))
LPIPS = 0.0 for x_name, y_name in tqdm(zip(os.listdir(dataPath), os.listdir(groundTruthPath))): x = (np.array(Image.open(os.path.join(dataPath, x_name))).transpose(2, 0, 1).astype(np.float32) / 255) * 2 - 1 y = (np.array(Image.open(os.path.join(groundTruthPath, y_name))).transpose(2, 0, 1).astype(np.float32) / 255) * 2 - 1 x = torch.from_numpy(x).unsqueeze(0).cuda() y = torch.from_numpy(y).unsqueeze(0).cuda() LPIPS += loss_fn_alex(x, y)
avgLPIPS = LPIPS / len(os.listdir(dataPath)) print(avgLPIPS)
The text was updated successfully, but these errors were encountered:
Probably because dropout is enabled by default. Try loss_fn_alex = lpips.LPIPS(net='alex', use_dropout=False).cuda()
Sorry, something went wrong.
Double check that it's in eval by setting .eval()
.eval()
I also got the same behaviour even after I created loss_fn_alex with use_dropout=False and called loss_fn_alex.eval().
No branches or pull requests
import lpips
import os
from tqdm import tqdm
from PIL import Image
import numpy as np
import torch
loss_fn_alex = lpips.LPIPS(net='alex').cuda()
dataPath = r'D:\PycharmProjects\paper_instrument\ImageQualityAssessment\hdrnet'
groundTruthPath = r'D:\PycharmProjects\paper_instrument\ImageQualityAssessment\groundtruth'
assert len(os.listdir(dataPath)) == len(os.listdir(groundTruthPath))
LPIPS = 0.0
for x_name, y_name in tqdm(zip(os.listdir(dataPath), os.listdir(groundTruthPath))):
x = (np.array(Image.open(os.path.join(dataPath, x_name))).transpose(2, 0, 1).astype(np.float32) / 255) * 2 - 1
y = (np.array(Image.open(os.path.join(groundTruthPath, y_name))).transpose(2, 0, 1).astype(np.float32) / 255) * 2 - 1
x = torch.from_numpy(x).unsqueeze(0).cuda()
y = torch.from_numpy(y).unsqueeze(0).cuda()
LPIPS += loss_fn_alex(x, y)
avgLPIPS = LPIPS / len(os.listdir(dataPath))
print(avgLPIPS)
The text was updated successfully, but these errors were encountered: