def f_saliency_whitebox_tcebp(wb, im): img_saliency = wb.truncated_contrastive_ebp(wb.net.preprocess(im.pil()), k_poschannel=0, k_negchannel=1, percentile=20) if np.max(img_saliency) == 255: img_saliency = img_saliency.astype(np.float32)/255.0 return np.array(_blend_saliency_map(np.array(im.pil().resize(img_saliency.shape)), img_saliency, gamma=0.5))
def f_saliency_whitebox_weighted_subtree_lightcnn(wb, im): img_probe = wb.net.preprocess(im.pil()) (img_saliency, P_img, P_subtree, k_subtree) = wb.weighted_subtree_ebp(img_probe, k_poschannel=0, k_negchannel=1, topk=64, do_max_subtree=False, subtree_mode='affineonly_with_prior', do_mated_similarity_gating=True, verbose=False) img_saliency = np.float32(img_saliency)/255.0 return np.array(_blend_saliency_map(np.array(im.pil().resize(img_saliency.shape)), img_saliency, gamma=0.5))
def f_saliency_whitebox_ebp(wb, im): P = torch.zeros( (1, wb.net.num_classes()) ); P[0][0] = 1.0; # one-hot prior probability img_saliency = wb.ebp(wb.net.preprocess(im.pil()), P) if np.max(img_saliency) == 255: img_saliency = img_saliency.astype(np.float32)/255.0 return np.array(_blend_saliency_map(np.array(im.pil().resize(img_saliency.shape)), img_saliency, gamma=0.5))