assert(os.path.isfile(args.data)) assert(not args.save_results or not os.path.exists(args.save_results)) # load data print('Loading "{}" ...'.format(args.netinfo)) with open(args.netinfo, 'r') as f: netinfo = json.load(f) print('Loading training settings "{}" ...'.format(netinfo['train_args']['fpath'])) with open(netinfo['train_args']['fpath'], 'r') as f: traininfo = json.load(f) X, y = utils.load_h5_db(args.data, True) X = X.astype(np.float32) if netinfo['preprocess']['demean']: for i, m in enumerate(netinfo['preprocess']['channel_means']): X[:, i, :, :] -= m if netinfo['preprocess']['divide'] != 1: X /= netinfo['preprocess']['divide'] # load and compile net model = utils.load_compile_model(netinfo, args.netinfo, True) # test
import argparse # parse args parser = argparse.ArgumentParser(description='Average predictions saved using `test.py`.') parser.add_argument('gt', type=str, help='Path to a HDF5 database that contains the the original data, `X` ((n*c*h*w) numpy array of image data) and `y` ((n,) numpy array of labels)') parser.add_argument('predictions', type=str, nargs='+', help='Paths to prediction files') args = parser.parse_args() assert(os.path.isfile(args.gt)) assert(all([os.path.isfile(p) for p in args.predictions])) # load data _, y = utils.load_h5_db(args.gt, True) y = np.ravel(y) probas = [] for p in args.predictions: probas.append(np.load(p)) # average probas = np.array(probas) probas = np.average(probas, axis=0) cls = np.argmax(probas, axis=1) assert(y.size == cls.size)
if args.log: print('Saving training progress log to "{}"'.format(args.log)) assert (not os.path.exists(args.save)) assert (not args.log or not os.path.exists(args.log)) # analyze data print('Analyzing training data ...') fpmeans = [] fpnum = [] classes = set() for fp in props['data']['train']: X_train, y_train = utils.load_h5_db(fp, True) fpmeans.append(np.mean(X_train, axis=(0, 2, 3))) fpnum.append(y_train.size) for c in np.unique(y_train): classes.add(c) fpmeans = np.array(fpmeans) fpnum = np.array(fpnum) fpweights = fpnum.astype(np.float64) / np.sum(fpnum) cnmeans = np.average(fpmeans, axis=0, weights=fpweights) if fpnum.size > 1 else fpmeans cnmeans = cnmeans.ravel()
if args.log: print('Saving training progress log to "{}"'.format(args.log)) assert(not os.path.exists(args.save)) assert(not args.log or not os.path.exists(args.log)) # analyze data print('Analyzing training data ...') fpmeans = [] fpnum = [] classes = set() for fp in props['data']['train']: X_train, y_train = utils.load_h5_db(fp, True) fpmeans.append(np.mean(X_train, axis=(0, 2, 3))) fpnum.append(y_train.size) for c in np.unique(y_train): classes.add(c) fpmeans = np.array(fpmeans) fpnum = np.array(fpnum) fpweights = fpnum.astype(np.float64) / np.sum(fpnum) cnmeans = np.average(fpmeans, axis=0, weights=fpweights) if fpnum.size > 1 else fpmeans cnmeans = cnmeans.ravel() print('{} training samples'.format(fpnum.sum()))