def run_synth_test(): """ Run a test with synthetic data and MCMC inference """ options, popn, data, client, popn_true, x_true = initialize_parallel_test_harness( ) # If x0 specified, load x0 from file x0 = None if options.x0_file is not None: with open(options.x0_file, 'r') as f: print "Initializing with state from: %s" % options.x0_file prev_x0 = cPickle.load(f) if isinstance(prev_x0, list): x0 = prev_x0[-1] else: mle_x0 = prev_x0 # HACK: We're assuming x0 came from a standard GLM mle_model = make_model('standard_glm', N=data['N']) mle_popn = Population(mle_model) mle_popn.set_data(data) x0 = popn.sample(None) x0 = convert_model(mle_popn, mle_model, mle_x0, popn, popn.model, x0) use_existing = False fname = os.path.join(options.resultsDir, '%s_marginal_lkhd.pkl' % options.model) if use_existing and \ os.path.exists(fname): print "Found existing results" with open(fname) as f: marg_lkhd = cPickle.load(f) else: N_samples = 10 popn_true.set_data(data) # Estimate the marginal log likelihood print "Performing parallel inference" marg_lkhd, log_weights = parallel_ais(client, data, x0=x0, N_samples=N_samples, steps_per_B=50, resdir=options.resultsDir) # Save results print "Saving results to %s" % fname with open(fname, 'w') as f: cPickle.dump((marg_lkhd, log_weights), f, protocol=-1)
def run_synth_test(): """ Run a test with synthetic data and MCMC inference """ options, popn, data, client, popn_true, x_true = initialize_parallel_test_harness() # If x0 specified, load x0 from file x0 = None if options.x0_file is not None: with open(options.x0_file, "r") as f: print "Initializing with state from: %s" % options.x0_file prev_x0 = cPickle.load(f) if isinstance(prev_x0, list): x0 = prev_x0[-1] else: mle_x0 = prev_x0 # HACK: We're assuming x0 came from a standard GLM mle_model = make_model("standard_glm", N=data["N"]) mle_popn = Population(mle_model) mle_popn.set_data(data) x0 = popn.sample(None) x0 = convert_model(mle_popn, mle_model, mle_x0, popn, popn.model, x0) use_existing = False fname = os.path.join(options.resultsDir, "%s_marginal_lkhd.pkl" % options.model) if use_existing and os.path.exists(fname): print "Found existing results" with open(fname) as f: marg_lkhd = cPickle.load(f) else: N_samples = 10 popn_true.set_data(data) # Estimate the marginal log likelihood print "Performing parallel inference" marg_lkhd, log_weights = parallel_ais( client, data, x0=x0, N_samples=N_samples, steps_per_B=50, resdir=options.resultsDir ) # Save results print "Saving results to %s" % fname with open(fname, "w") as f: cPickle.dump((marg_lkhd, log_weights), f, protocol=-1)
def fit_latent_network_to_mle(): """ Run a test with synthetic data and MCMC inference """ options, popn, data, popn_true, x_true = initialize_test_harness() import pdb; pdb.set_trace() # Load MLE parameters from command line mle_x = None if options.x0_file is not None: with open(options.x0_file, 'r') as f: print "Initializing with state from: %s" % options.x0_file mle_x = cPickle.load(f) mle_model = make_model('standard_glm', N=data['N']) mle_popn = Population(mle_model) mle_popn.set_data(data) # Create a location sampler print "Initializing latent location sampler" loc_sampler = LatentLocationUpdate() loc_sampler.preprocess(popn) # Convert the mle results into a weighted adjacency matrix x_aw = popn.sample(None) x_aw = convert_model(mle_popn, mle_model, mle_x, popn, popn.model, x_aw) # Get rid of unnecessary keys del x_aw['glms'] # Fit the latent distance network to a thresholded adjacency matrix ws = np.sort(np.abs(x_aw['net']['weights']['W'])) wperm = np.argsort(np.abs(x_aw['net']['weights']['W'])) nthrsh = 20 threshs = np.arange(ws.size, step=ws.size/nthrsh) res = [] N = popn.N for th in threshs: print "Fitting network for threshold: %.3f" % th A = np.zeros_like(ws, dtype=np.int8) A[wperm[th:]] = 1 A = A.reshape((N,N)) # A = (np.abs(x_aw['net']['weights']['W']) >= th).astype(np.int8).reshape((N,N)) # Make sure the diag is still all 1s A[np.diag_indices(N)] = 1 x = copy.deepcopy(x_aw) x['net']['graph']['A'] = A smpls = fit_latent_network_given_A(x, loc_sampler) # Index the results by the overall sparsity of A key = (np.sum(A)-N) / (np.float(np.size(A))-N) res.append((key, smpls)) # Save results results_file = os.path.join(options.resultsDir, 'fit_latent_network_results.pkl') print "Saving results to %s" % results_file with open(results_file, 'w') as f: cPickle.dump(res, f)