Esempio n. 1
0
 def test_consisitency(G, UCG):#_
     code = torch.randn((1, UCG.codelen))
     # network outputs are the same.
     assert torch.allclose(UCG(code), G(code)['deconv0'][:, [2, 1, 0], :, :])
     # visualization function is the same
     imgnew = UCG.visualize(code).permute([2, 3, 1, 0]).squeeze()
     imgorig = visualize(G, code.numpy(), mode="cpu")
     assert torch.allclose(imgnew, imgorig)
     plt.figure(figsize=[6,3])
     plt.subplot(121)
     plt.imshow(imgnew.detach())
     plt.axis('off')
     plt.subplot(122)
     plt.imshow(imgorig.detach())
     plt.axis('off')
     plt.show()
Esempio n. 2
0
from hessian import hessian
#% Set up PerceptualLoss judger
sys.path.append(r"D:\Github\PerceptualSimilarity")
import models  # from PerceptualSimilarity folder
model_squ = models.PerceptualLoss(model='net-lin', net='squeeze', use_gpu=1, gpu_ids=[0])
model_vgg = models.PerceptualLoss(model='net-lin', net='vgg', use_gpu=1, gpu_ids=[0])
model_alex = models.PerceptualLoss(model='net-lin', net='alex', use_gpu=1, gpu_ids=[0])
#%%
result_dir = r"C:\Users\ponce\OneDrive - Washington University in St. Louis\Artiphysiology\Sample_Diversity"
from torch_net_utils import load_generator, load_caffenet, visualize, preprocess
net_torch = load_caffenet()
G_torch = load_generator()
#%%
sigma = 3.0
codes = sigma * np.random.randn(40, 4096)
img_list = [visualize(G_torch, code, "cuda") for code in codes]
#%
dist_mat = np.zeros((len(codes), len(codes)))
for i in range(len(codes)):
    for j in range(len(codes)):
        dist = model_squ.forward(img_list[i].unsqueeze(0).permute(0,3,1,2), img_list[j].unsqueeze(0).permute(0,3,1,2), normalize=True)
        dist_mat[i, j] = dist.squeeze().detach().cpu().numpy()
dist_mat.mean()
#%%
basis = 5 * np.random.randn(1, 4096)
sigma = 3.0
codes = sigma * np.random.randn(40, 4096) + basis
img_list = [visualize(G_torch, code, "cuda") for code in codes]
dist_mat2 = np.zeros((len(codes), len(codes)))
for i in range(len(codes)):
    for j in range(len(codes)):
Esempio n. 3
0
EphsFN = "Beto64chan-30102019-001"  # "Beto64chan-11112019-006" #

Rspfns = sorted(glob(join(rspPath, EphsFN + "*")))
rspData = h5py.File(Rspfns[1])
spikeID = rspData['meta']['spikeID']
rsp = rspData['rasters']
# Extremely useful code snippet to solve the problem
imgnms_refs = np.array(rspData['Trials']['imageName']).flatten()
imgnms = np.array(
    [''.join(chr(i) for i in rspData[ref]) for ref in imgnms_refs])
#%%
prefchan_idx = np.nonzero(spikeID[0, :] == 26)[0] - 1
prefrsp = rsp[:, :, prefchan_idx]  # Dataset reading takes time
scores = prefrsp[:, 50:, :].mean(axis=1) - prefrsp[:, :40, :].mean(axis=1)
#%%
vis_img = visualize(Generator, codes[3, :])
plt.imshow(vis_img)
plt.show()

#%% If not readable from py side, read from matlab side and export!
imgnms = []  # the one stored in *.mat file. Depleted of .jpg suffix
with open(join(rspPath, EphsFN + "_imgName.csv"), newline='\n') as csvfile:
    reader = csv.reader(csvfile, delimiter=' ', quotechar='|')
    for row in reader:
        imgnms.append(row[0])
# imgnmref = rspData['Trials']['imageName']
# https://docs.python.org/3/library/csv.html
#%% Find the rows for generated images.
gen_rows = [
    'gen' in fn and 'block' in fn and not fn[:2].isnumeric() for fn in imgnms
]
Esempio n. 4
0
img = imread(r"D:\Github\CMAES_optimizer_matlab\fc8_02.jpg")
net = CNNmodel("caffe-net")
net_trc = CNNmodel_Torch("caffe-net")
#%%
unit = ('caffe-net', 'fc8', 5)
net.select_unit(unit)
net_trc.select_unit(unit)
caffe_act = net.score(img[np.newaxis, ])
torch_act = net_trc.score(img[np.newaxis, ])
print("For image , unit %s " % (unit, ))
print("Caffenet run in Caffe scores %.3f " % (caffe_act, ))
print("Caffenet run in PyTorch scores %.3f " % (torch_act, ))
#%% Test that the
from utils import generator
from torch_net_utils import load_generator, visualize
G_torch = load_generator()  # Torch generative network
G_caffe = generator  # Caffe GAN
#%%
code = 5 * np.random.randn(1, 4096)
img_caffe = G_caffe.visualize(code)  # uint8 255
img_torch = visualize(G_torch, code)  # scale 0, 1 by default
#%
plt.figure()
plt.subplot(121)
plt.imshow(img_caffe)
plt.axis("off")
plt.subplot(122)
plt.imshow(img_torch)
plt.axis("off")
plt.show()
Esempio n. 5
0
 def visualize(self, codes):
     # img = visualize(self.G, code)
     code_num = codes.shape[0]
     assert codes.shape[1] == 4096
     imgs = [visualize(self.G, codes[i, :]) for i in range(code_num)]
     return imgs