def main(): parser = argparse.ArgumentParser() parser.add_argument('caffemodel', type=str) parser.add_argument('-o', '--output', default='log-network', type=str) args = parser.parse_args() vz = VzLog(args.output) mm = caffe.proto.caffe_pb2.NetParameter() with open(args.caffemodel, 'rb') as f: mm.ParseFromString(f.read()) vz.title('Network') c = 0 for layer in mm.layers: if layer.blobs: blob = layer.blobs[0] shape = _blob_shape(blob) X = np.asarray(blob.data).reshape(shape) vz.section(layer.name) vz.log('min', X.min(), 'max', X.max(), 'mean', X.mean(), 'std', X.std()) vz.log('shape', X.shape) if c == 0: grid = dd.plot.ColorImageGrid(X.transpose(0, 2, 3, 1), vmin=None, vmax=None) else: grid = dd.plot.ImageGrid(X, cmap=cm.rainbow, vmin=None, vmax=None, vsym=True) grid.save(vz.impath(), scale=4) blob = layer.blobs[1] shape = _blob_shape(blob) X = np.asarray(blob.data).reshape(shape) # Plot biases vz.text('Bias') vz.log('shape', X.shape) plt.figure(figsize=(6, 1.5)) plt.plot(X.ravel()) plt.xlim((0, X.size-1)) plt.savefig(vz.impath('svg')) plt.close() c += 1
def main(): parser = argparse.ArgumentParser() parser.add_argument("caffemodel", type=str) parser.add_argument("-o", "--output", default="log-network", type=str) args = parser.parse_args() vz = VzLog(args.output) mm = caffe.proto.caffe_pb2.NetParameter() with open(args.caffemodel, "rb") as f: mm.ParseFromString(f.read()) vz.title("Network") c = 0 for layer in mm.layers: if layer.blobs: blob = layer.blobs[0] shape = _blob_shape(blob) X = np.asarray(blob.data).reshape(shape) vz.section(layer.name) vz.log("min", X.min(), "max", X.max(), "mean", X.mean(), "std", X.std()) vz.log("shape", X.shape) if c == 0: grid = dd.plot.ColorImageGrid(X.transpose(0, 2, 3, 1), vmin=None, vmax=None) else: grid = dd.plot.ImageGrid(X, cmap=cm.rainbow, vmin=None, vmax=None, vsym=True) grid.save(vz.impath(), scale=4) blob = layer.blobs[1] shape = _blob_shape(blob) X = np.asarray(blob.data).reshape(shape) # Plot biases vz.text("Bias") vz.log("shape", X.shape) plt.figure(figsize=(6, 1.5)) plt.plot(X.ravel()) plt.xlim((0, X.size - 1)) plt.savefig(vz.impath("svg")) plt.close() c += 1
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('responses', nargs='+', type=str) parser.add_argument('-t', '--title', default='', type=str) parser.add_argument('-o', '--output', default='log-responses', type=str) parser.add_argument('-l', '--layers', nargs='?', type=str) parser.add_argument('-a', '--alpha', default=0.99, type=float) args = parser.parse_args() alpha = args.alpha vz = VzLog(args.output) vz.title(args.title) vz.log('alpha =', alpha) plt.figure() layers = args.layers for fn in args.responses: data = dd.io.load(fn) name = data['name'] if layers is None: layers = data['layers'] y = [] ystd = [] for l in layers: rs = [] for X in data['responses'][l]: C = np.corrcoef(X.T) rs.append( np.where( np.sort(np.linalg.eigvals(C))[::-1].cumsum() / C.shape[0] > alpha)[0][0] / C.shape[0]) y.append(np.mean(rs)) ystd.append(np.std(rs)) plt.errorbar(np.arange(len(y)), y, yerr=ystd, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('<- Redundancy / Identity-like ->') plt.legend(loc=4) plt.ylim((0, 1)) plt.savefig(vz.impath('svg')) plt.close()
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('responses', nargs='+', type=str) parser.add_argument('-t', '--title', default='', type=str) parser.add_argument('-o', '--output', default='log-responses', type=str) parser.add_argument('-m', '--model', nargs='+', type=str) parser.add_argument('-l', '--layers', nargs='?', type=str) parser.add_argument('-a', '--alpha', default=0.99, type=float) args = parser.parse_args() alpha = args.alpha vz = VzLog(args.output) vz.title(args.title) vz.log('alpha =', alpha) mms = [] for fn in args.model: mm = caffe.proto.caffe_pb2.NetParameter() with open(fn, 'rb') as f: mm.ParseFromString(f.read()) mms.append(mm) layers = args.layers plt.figure() for fn in args.responses: data = dd.io.load(fn) name = data['name'] if layers is None: layers = data['layers'] y = [] ystd = [] for l in layers: rs = [] #for X in data['responses'][l]: X = data['responses'][l] C = np.corrcoef(X.T) print(l, X.shape) C[np.isnan(C)] = 0.0 try: rs.append(np.where(np.sort(np.linalg.eigvals(C))[::-1].cumsum() / C.shape[0] > alpha)[0][0] / C.shape[0]) except: break y.append(np.mean(rs)) ystd.append(np.std(rs)) plt.errorbar(np.arange(len(y)), y, yerr=ystd, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('<- Redundancy / Identity-like ->') plt.legend(loc=4) plt.ylim((0, 1)) plt.savefig(vz.impath('svg')) plt.close() plt.figure() for fn in args.responses: data = dd.io.load(fn) name = data['name'] if layers is None: layers = data['layers'] y = [] ystd = [] for l in layers: rs = [] #for X in data['responses'][l]: X = data['responses'][l] y.append(X.mean()) plt.plot(np.arange(len(y)), y, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('Mean') plt.legend(loc=4) plt.savefig(vz.impath('svg')) plt.close() plt.figure() for fn in args.responses: data = dd.io.load(fn) name = data['name'] if layers is None: layers = data['layers'] y = [] ystd = [] for l in layers: rs = [] #for X in data['responses'][l]: X = data['responses'][l] y.append((X**2).mean()) plt.plot(np.arange(len(y)), y, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('Second moment') plt.ylim((0, None)) plt.legend(loc=4) plt.savefig(vz.impath('svg')) plt.close() plt.figure() for fn in args.responses: data = dd.io.load(fn) name = data['name'] if layers is None: layers = data['layers'] y = [] ystd = [] for l in layers: rs = [] #for X in data['responses'][l]: X = data['responses'][l] y.append(X.std()) plt.plot(np.arange(len(y)), y, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('Standard deviation') plt.ylim((0, None)) plt.legend(loc=4) plt.savefig(vz.impath('svg')) plt.close() plt.figure() for mm in mms: y = [] for l in layers: blobs = get_blobs(mm, l) y.append(blobs[0].mean()) plt.plot(np.arange(len(y)), y, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('Weight mean') plt.legend(loc=1) plt.savefig(vz.impath('svg')) plt.close() plt.figure() for mm in mms: y = [] for l in layers: blobs = get_blobs(mm, l) y.append(blobs[0].std()) plt.plot(np.arange(len(y)), y, label='{}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('Weight s.d.') plt.legend(loc=1) plt.savefig(vz.impath('svg')) plt.close() plt.figure() for mm in mms: y = [] y2 = [] for l in layers: blobs = get_blobs(mm, l) b0 = blobs[0] print(l, blobs[0].shape) if l.startswith('conv'): N = b0.shape[0] * b0.shape[2] * b0.shape[3] M = b0.shape[1] * b0.shape[2] * b0.shape[3] else: N = b0.shape[2] M = b0.shape[3] y.append(N) y2.append(M) plt.plot(np.arange(len(y)), y, label='n={}'.format(name)) plt.plot(np.arange(len(y2)), y2, label='m={}'.format(name)) plt.xticks(np.arange(len(y)), layers) plt.ylabel('Size') plt.legend(loc=1) plt.savefig(vz.impath('svg')) plt.close()
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('scores', type=str) parser.add_argument('-o', '--output', default='log-scores', type=str) args = parser.parse_args() rs = np.random.RandomState(0) data = dd.io.load(args.scores) scores = data['scores'] y = data['labels'] names = data['names'] L = 50 all_rates = [] all_Hs = [] all_ambigs = [] all_rates_std = [] all_Hs_std = [] all_ambigs_std = [] all_corrs = [] all_corrs_std = [] the_corrs = [] vz = VzLog(args.output) K = scores.shape[1] Y = np.zeros((y.shape[0], y.max() + 1)) Y[np.arange(y.shape[0]), y] = 1 Ks = np.arange(K) + 1 for k in Ks: combs = list(itr.combinations(range(K), k)) rs.shuffle(combs) the_rates = [] the_Hs = [] the_ambigs = [] print('k', k) for indices in combs[:L]: networks = scores[:,list(indices)] # TODO: Make ensemble method a choice ensemble = np.exp(np.log(networks + 1e-10).mean(1)) #ensemble = networks.mean(1) rates = (ensemble.argmax(-1) != y).mean(-1) the_rates.append(rates) Hs = 0 the_Hs.append(Hs) the_ambigs.append(vars) all_rates.append(np.mean(the_rates, axis=0)) all_rates_std.append(np.std(the_rates, axis=0)) all_Hs.append(np.mean(the_Hs, axis=0)) all_Hs_std.append(np.std(the_Hs, axis=0)) yy = np.asarray(the_rates) all_rates = np.asarray(all_rates) all_rates_std = np.asarray(all_rates_std) print('all_rates', all_rates.shape) plt.figure() diffs = all_rates[-1] - all_rates[0] for i in range(all_rates.shape[1]): caption = '{} {:.2f}% ({:.2f})'.format(names[i], 100*all_rates[-1,i], 100*diffs[i]) print('caption', caption) plt.errorbar(Ks, all_rates[:,i], yerr=all_rates_std[:,i], label=caption)#, yerr=all_rates_std) plt.xlim((0, len(Ks)+1)) plt.legend() plt.xlabel('Networks') plt.ylabel('Error rate') plt.savefig(vz.impath('svg')) plt.close() plt.figure() plt.scatter(all_rates[0], diffs) for ll in plt.xlim, plt.ylim: lims = ll(); ll([lims[0], lims[1]]) x = np.asarray([0, 1]) y = -x + 0.14 plt.plot(x, y) plt.xlabel('single-network error rate') plt.ylabel('10-network improvement') plt.savefig(vz.impath('svg')) plt.close()
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('scores', type=str) parser.add_argument('-o', '--output', default='log-scores', type=str) args = parser.parse_args() rs = np.random.RandomState(0) data = dd.io.load(args.scores) scores = data['scores'] y = data['labels'] names = data['names'] L = 50 all_rates = [] all_Hs = [] all_ambigs = [] all_rates_std = [] all_Hs_std = [] all_ambigs_std = [] all_corrs = [] all_corrs_std = [] the_corrs = [] vz = VzLog(args.output) K = scores.shape[1] Y = np.zeros((y.shape[0], y.max() + 1)) Y[np.arange(y.shape[0]), y] = 1 Ks = np.arange(K) + 1 for k in Ks: combs = list(itr.combinations(range(K), k)) rs.shuffle(combs) the_rates = [] the_Hs = [] the_ambigs = [] print('k', k) for indices in combs[:L]: networks = scores[:, list(indices)] # TODO: Make ensemble method a choice ensemble = np.exp(np.log(networks + 1e-10).mean(1)) #ensemble = networks.mean(1) rates = (ensemble.argmax(-1) != y).mean(-1) the_rates.append(rates) Hs = 0 the_Hs.append(Hs) the_ambigs.append(vars) all_rates.append(np.mean(the_rates, axis=0)) all_rates_std.append(np.std(the_rates, axis=0)) all_Hs.append(np.mean(the_Hs, axis=0)) all_Hs_std.append(np.std(the_Hs, axis=0)) yy = np.asarray(the_rates) all_rates = np.asarray(all_rates) all_rates_std = np.asarray(all_rates_std) print('all_rates', all_rates.shape) plt.figure() diffs = all_rates[-1] - all_rates[0] for i in range(all_rates.shape[1]): caption = '{} {:.2f}% ({:.2f})'.format(names[i], 100 * all_rates[-1, i], 100 * diffs[i]) print('caption', caption) plt.errorbar(Ks, all_rates[:, i], yerr=all_rates_std[:, i], label=caption) #, yerr=all_rates_std) plt.xlim((0, len(Ks) + 1)) plt.legend() plt.xlabel('Networks') plt.ylabel('Error rate') plt.savefig(vz.impath('svg')) plt.close() plt.figure() plt.scatter(all_rates[0], diffs) for ll in plt.xlim, plt.ylim: lims = ll() ll([lims[0], lims[1]]) x = np.asarray([0, 1]) y = -x + 0.14 plt.plot(x, y) plt.xlabel('single-network error rate') plt.ylabel('10-network improvement') plt.savefig(vz.impath('svg')) plt.close()