model = BSC_ET(D, H, Hprime, gamma, to_learn=['W','sigma','pi','mu']) data = {'y':patches} out_fname = output_path + "/data.h5" #setting up logging/output print_list = ('T', 'Q', 'pi', 'sigma', 'N', 'MAE', 'L') dlog.set_handler(print_list, TextPrinter) h5store_list = ('W', 'pi', 'sigma', 'y', 'MAE', 'N','L','Q','mu') dlog.set_handler(h5store_list, StoreToH5, output_path +'/result.h5') ###### Initialize model ####### # Initialize (Random) model parameters model_params = model.standard_init(data) #model_params['mu'] = np.mean(data['y'],axis=0) # Create and start EM annealing em = EM(model=model, anneal=anneal) em.data = data em.lparams = model_params em.run() dlog.close(True) pprint("Done")
W_gt = W_gt.reshape((H, D)) # Prepare model... model = MCA_ET(D, H, Hprime, gamma) gt_params = {"W": W_gt, "pi": 2.0 / H, "sigma": 1.00} # Generate trainig data my_N = N // comm.size my_data = model.generate_data(gt_params, my_N) dlog.append("y", my_data["y"][0:25, :]) # Initialize model parameters (to be learned) params = { # 'W' : W_gt, "W": np.abs(5 + np.random.normal(size=W_gt.shape)), "pi": 2 / H, "sigma": 5.00, } # params = model.noisify_params(params, anneal) params = comm.bcast(params) # Create and start EM annealing em = EM(model=model, anneal=anneal) em.data = my_data em.lparams = params em.run() dlog.close() # print(em.lparams['W'])
mean_W = np.zeros((H, D)) pics_per_H = my_N // H for indH in xrange(H): mean_W_tmp = np.sum(comm.allreduce(my_data["y"][indH * pics_per_H : (indH + 1) * pics_per_H, :]), axis=0) / ( my_N // H * comm.size ) mean_W[indH, :] = mean_W_tmp pies = 1.0 / H # Initialize model parameters (to be learned) if W_learn == "mean": lparams = {"W": mean_W, "pi": pi_learn, "sigma": sigma_learn, "mu": mu_learn} elif W_learn == "ground_truth": lparams = {"W": W_gt, "pi": pi_gt, "sigma": sigma_gt, "mu": mu_gt} else: lparams = {"W": W_learn, "pi": pi_learn, "sigma": sigma_learn, "mu": mu_learn} lparams = model.noisify_params(lparams, anneal) # Create and start EM annealing em = EM(model=model, anneal=anneal) em.data = my_data em.lparams = lparams em.run() pprint("Done with ET, Calculating MAE...") MAE = check_basis(em.lparams["W"], W_gt) if MAE == None: MAE = np.inf dlog.append("MAE", MAE[0]) dlog.close(True) pprint("Done")
#As a control, we also append the initial values dlog.append_all({ 'W': lparams['W'], 'pi': lparams['pi'], 'sigma': lparams['sigma'], }) comm.Barrier() #==================== Create and run EM ===================================== dlog.progress("Starting EM") em = EM(model=model, anneal=anneal) em.data = {'y': my_y, 'y_rc': my_y_rc} em.lparams = lparams em.run(verbose=True) lparams = em.lparams #dlog.close() comm.Barrier() #============================================================================ # Extract final generative fields (W) if lparams.has_key("rev_corr") and lparams['rev_corr'].max() > 1e-7: dlog.progress("Using actual reverse correlated fields") RF = lparams["rev_corr"] H, D = RF.shape
W_gt += np.random.normal(size=(H, D), scale=0.5) # Prepare model... model = MMCA_ET(D, H, Hprime, gamma) gt_params = {'W': W_gt, 'pi': 2. / H, 'sigma': 0.10} # Generate trainig data my_N = N // comm.size my_data = model.generate_data(gt_params, my_N) dlog.append('y', my_data['y'][0:25, :]) # Initialize model parameters (to be learned) params = { # 'W' : W_gt, 'W': np.random.normal(size=W_gt.shape), 'pi': 1 / H, 'sigma': 5.00 } #params = model.noisify_params(params, anneal) params = comm.bcast(params) # Create and start EM annealing em = EM(model=model, anneal=anneal) em.data = my_data em.lparams = params em.run() dlog.close(True) pprint("Done") #print(em.lparams['W'])