activations='relu', constrain_norm=constrain_norm) return out n = 5000 dropout_rate = min(1000. / (1000. + n), 0.5) embedding_dropout = 0.1 embedding_l2 = 0.1 epochs = int(1500000. / float(n)) batch_size = 100 x, z, t, y, g_true = data_generator.demand(n=n, seed=1, ypcor=0.5, use_images=True, test=False) print("Data shapes:\n\ Features:{x},\n\ Instruments:{z},\n\ Treament:{t},\n\ Response:{y}".format(**{ 'x': x.shape, 'z': z.shape, 't': t.shape, 'y': y.shape })) # Build and fit treatment model
def datafunction(n, s, images=images, test=False): return data_generator.demand(n=n, seed=s, ypcor=0.5, use_images=images, test=test)
''' npr = importr('np') y_R = robjects.FloatVector(list(y.flatten())) (x_eval, t_eval), y_true = test_points(df, 10000) mod = npr.npregiv(y_R, t, z, x=x, zeval=t_eval, xeval=x_eval, method="Tikhonov", p=0, optim_method="BFGS") return ((y_true - to_array(mod.rx2('phi.eval')))**2).mean() def prepare_file(filename): if not os.path.exists(filename): with open(filename, 'w') as f: f.write('n,seed,endo,mse\n') df = lambda n, s, test: data_generator.demand(n, s, ypcor=args.endo, test=test) x, z, t, y, g = df(args.n_samples, args.seed, False) mse = fit_and_evaluate(x, z, t, y, df) DONE = True # turn off the heartbeat prepare_file(args.results) with open(args.results, 'a') as f: f.write('%d,%d,%f,%f\n' % (args.n_samples, args.seed, args.endo, mse))