def train_one_model(task, mdl_dir, archi_fun, x_arr, y_arr, label_arr): """Function run by a single process.""" x, y, label = pp.getNumpy(x_arr, y_arr, label_arr) n_data = x.shape[0] for i in range(task[0], task[1]): maski = i == label datai = {'x': x[maski], 'y': y[maski]} net = archi_fun(datai['x'].shape[0] / float(n_data)) config = genTrainConfig(network=net, batch_size=64, test_batch_size=1024, outdir=mdl_dir, outname='model_%d_of_%s.pt' % (i, joinNumber(net))) trainOne(config, datai)
def main(): net = GaoNetBN([2, 10, 2]) net.eval() y = net.eval(np.random.random(2)) print(y) # train a mapping with those variables net.train() N = 50 xin = np.random.random((N, 2)) y = np.c_[0.5 * xin[:, 0], 0.6 * xin[:, 1] + 0.4] data_dict = {'x': xin, 'y': y} config = genTrainConfig(network=net) trainOne(config, data_dict, net=net, scalex=False, scaley=False) net.printWeights()
def run_the_training(args): """Run the MoE training without using any clustering information but let it find it on its own.""" cfg, lbl = util.get_label_cfg_by_args(args) uid = cfg['uniqueid'] print('We are playing with %s' % uid) data = npload(cfg['file_path'], uid) data_feed = {'x': data[cfg['x_name']], 'y': data[cfg['y_name']]} dimx = data_feed['x'].shape[1] dimy = data_feed['y'].shape[1] n_model = args.k # create the network net = MoMNet([dimx, 100, n_model], [[dimx, int(np.ceil(300.0 / n_model)), dimy] for _ in range(n_model)]) net.argmax = False config = genTrainConfig(outdir='models/%s/mom' % uid, outname='mom_model.pt', overwrite=False) if args.eval: mdl_path = os.path.join(config['outdir'], config['outname']) eval_fun = momLoader(mdl_path, withclus=args.prob, argmax=False) predy = eval_fun(data_feed['x']) return {'x': data_feed['x'], 'y': data_feed['y'], 'predy': predy} trainOne(config, data_feed, net=net)
def train_one_classifier(task, keys, uid, lbl_name, clas, label_data, x_arr): """A single process that trains one classifier. Paramters --------- task: list of two integers, specifying which tasks we use keys: list of keys, work with task to locate cluster strategy clas: list, integer, neural network architecture label_data: dict, mapping from key to label x_arr: sharedNumpy, the feature vectors """ x = pp.getNumpy(x_arr) for i in range(task[0], task[1]): key = keys[i] n_cluster = int(key) clas[-1] = n_cluster model_directory = 'models/%s/%s/%s' % (uid, lbl_name, key) if not os.path.exists(model_directory): os.makedirs(model_directory) label = label_data[key] datai = {'x': x, 'label': label, 'n_label': n_cluster} config = genTrainConfig(network=clas, batch_size=256, test_batch_size=2048, outdir=model_directory, outname='classifier_%d_of_%s.pt' % (n_cluster, joinNumber(clas))) trainOne(config, datai, is_reg_task=False) # switch to classifier