def execute_toy(self,mode="discrete",dt_max=3,N_samples=1000,network_priors={"p": 1.0, "allow_self_connections": False}): #np.random.seed(0) if mode == 'discrete': test_model1 = DiscreteTimeNetworkHawkesModelSpikeAndSlab(K=self.K, dt_max=dt_max, network_hypers=network_priors) test_model1.add_data(self.data) test_model1.initialize_with_standard_model(None) elif mode == 'continuous': test_model = ContinuousTimeNetworkHawkesModel(self.K, dt_max=dt_max, network_hypers=network_hypers) test_model.add_data(self.data,self.labels) ########################################################### # Fit the test model with Gibbs sampling ########################################################### samples = [] lps = [] #for itr in xrange(N_samples): # test_model1.resample_model() # lps.append(test_model1.log_probability()) # samples.append(test_model1.copy_sample()) test_model = DiscreteTimeStandardHawkesModel(K=self.K, dt_max=dt_max, allow_self_connections= False) #test_model.initialize_with_gibbs_model(test_model1) test_model.add_data(self.data) test_model.fit_with_bfgs() impulse = test_model1.impulse_model.impulses responses = {} #for i in range(3): # responses[str(i)] = [] # for j in range(3): # responses[str(i)].append({"key":"response: process "+str(i)+" to "+str(j),"values":[{"x":idx,"y":k} for idx,k in enumerate(impulse[:,i,j])]}) # with open('/Users/PauKung/hawkes_demo/webapp/static/data/response'+str(i)+'.json','w') as outfile: # json.dump({"out":responses[str(i)]},outfile) # calculate convolved basis rr = test_model.basis.convolve_with_basis(np.ones((dt_max*2,self.K))) impulse = np.sum(rr, axis=2) impulse[dt_max:,:] = 0 for i in range(3): responses[str(i)] = {"key":"response: process "+str(i),"values":[{"x":idx,"y":k} for idx,k in enumerate(impulse[:,i])]} with open('/Users/PauKung/hawkes_demo/webapp/static/data/response'+str(i)+'.json','w') as outfile: json.dump({"out":responses[str(i)]},outfile) rates = test_model.compute_rate()#self.compute_rate(test_model,mode,dt_max) inferred_rate = {} S,F = test_model.data_list[0] print F for i in range(3): inferred_rate[str(i)] = [] inferred_rate[str(i)].append({"key":"background", "values":[[j,test_model.bias[i]] for j in range(self.T)]}) #"values":[[j,test_model1.bias_model.lambda0[i]] for j in range(self.T)]}) for i in range(3): inferred_rate[str(i)].append({"key":"influence: process"+str(i), "values":[[idx,j-test_model.bias[i]] for idx,j in enumerate(rates[:,i])]}) with open('/Users/PauKung/hawkes_demo/webapp/static/data/infer'+str(i)+'.json','w') as outfile: json.dump({"out":inferred_rate[str(i)]},outfile) # output response function diagram (K x K timeseries) #plt.subplot(3,3,1) #for i in range(3): # for j in range(3): # plt.subplot(3,3,3*i+(j+1)) # plt.plot(np.arange(4),impulse[:,i,j],color="#377eb8", lw=2) #plt.savefig(fpath+"response_fun.png",transparent=True) # output background bias diagram (K x 1 timeseries) #plt.subplot(3,1,1) #for i in range(3): # plt.subplot(3,1,i+1) # plt.plot(np.arange(4),[test_model.bias_model.lambda0[i] for j in range(4)],color="#333333",lw=2) #plt.savefig(fpath+"bias.png",transparent=True) # output inferred rate diagram (K x 1 timeseries) #test_figure, test_handles = test_model.plot(color="#e41a1c", T_slice=(0,self.T)) #plt.savefig(fpath+"inferred_rate.png",transparent=True) print test_model.W return test_model.W, inferred_rate, responses
def fit_ct_network_hawkes_gibbs(S, S_test, dt, dt_max, output_path, model_args={}, standard_model=None, N_samples=100, time_limit=8 * 60 * 60): K = S.shape[1] S_ct, C_ct, T = convert_discrete_to_continuous(S, dt) S_test_ct, C_test_ct, T_test = convert_discrete_to_continuous(S_test, dt) # Check for existing Gibbs results if os.path.exists(output_path): with gzip.open(output_path, 'r') as f: print("Loading Gibbs results from ", output_path) results = pickle.load(f) else: print( "Fitting the data with a continuous time network Hawkes model using Gibbs sampling" ) test_model = \ ContinuousTimeNetworkHawkesModel(K, dt_max=dt_max, **model_args) test_model.add_data(S_ct, C_ct, T) # Initialize with the standard model parameters if standard_model is not None: test_model.initialize_with_standard_model(standard_model) # Gibbs sample samples = [] lps = [test_model.log_probability()] hlls = [ test_model.heldout_log_likelihood(S_test_ct, C_test_ct, T_test) ] times = [0] for _ in progprint_xrange(N_samples, perline=25): # Update the model tic = time.time() test_model.resample_model() times.append(time.time() - tic) samples.append(copy.deepcopy(test_model.get_parameters())) # Compute log probability and heldout log likelihood # lps.append(test_model.log_probability()) hlls.append( test_model.heldout_log_likelihood(S_test_ct, C_test_ct, T_test)) # # Save this sample # with open(output_path + ".gibbs.itr%04d.pkl" % itr, 'w') as f: # cPickle.dump(samples[-1], f, protocol=-1) # Check if time limit has been exceeded if np.sum(times) > time_limit: break # Get cumulative timestamps timestamps = np.cumsum(times) lps = np.array(lps) hlls = np.array(hlls) # Make results object results = Results(samples, timestamps, lps, hlls) # Save the Gibbs samples with gzip.open(output_path, 'w') as f: print("Saving Gibbs samples to ", output_path) pickle.dump(results, f, protocol=-1) return results
def fit_ct_network_hawkes_gibbs(S, K, C, dt, dt_max, output_path, standard_model=None): # Check for existing Gibbs results if os.path.exists(output_path + ".gibbs.pkl"): with open(output_path + ".gibbs.pkl", 'r') as f: print "Loading Gibbs results from ", (output_path + ".gibbs.pkl") (samples, timestamps) = cPickle.load(f) else: print "Fitting the data with a network Hawkes model using Gibbs sampling" S_ct, C_ct, T = convert_discrete_to_continuous(S, dt) # Set the network prior such that E[W] ~= 0.01 # W ~ Gamma(kappa, v) for kappa = 1.25 => v ~ 125 # v ~ Gamma(alpha, beta) for alpha = 10, beta = 10 / 125 E_W = 0.2 kappa = 10. E_v = kappa / E_W alpha = 5. beta = alpha / E_v network_hypers = { 'C': 1, "c": np.zeros(K).astype(np.int), "p": 0.25, "v": E_v, # 'kappa': kappa, # 'alpha': alpha, 'beta': beta, # 'p': 0.1, 'allow_self_connections': False } test_model = \ ContinuousTimeNetworkHawkesModel(K, dt_max=dt_max, network_hypers=network_hypers) test_model.add_data(S_ct, C_ct, T) # Initialize with the standard model parameters if standard_model is not None: test_model.initialize_with_standard_model(standard_model) plt.ion() im = plot_network(test_model.weight_model.A, test_model.weight_model.W, vmax=0.025) plt.pause(0.001) # Gibbs sample N_samples = 100 samples = [] lps = [test_model.log_probability()] timestamps = [] for itr in xrange(N_samples): if itr % 1 == 0: print "Iteration ", itr, "\tLL: ", lps[-1] im.set_data(test_model.weight_model.W_effective) plt.pause(0.001) # lps.append(test_model.log_probability()) lps.append(test_model.log_probability()) samples.append(test_model.resample_and_copy()) timestamps.append(time.clock()) print test_model.network.p # Save this sample with open(output_path + ".gibbs.itr%04d.pkl" % itr, 'w') as f: cPickle.dump(samples[-1], f, protocol=-1) # Save the Gibbs samples with open(output_path + ".gibbs.pkl", 'w') as f: print "Saving Gibbs samples to ", (output_path + ".gibbs.pkl") cPickle.dump((samples, timestamps), f, protocol=-1) return samples, timestamps
def fit_ct_network_hawkes_gibbs(S, K, C, dt, dt_max, output_path, standard_model=None): # Check for existing Gibbs results if os.path.exists(output_path + ".gibbs.pkl"): with open(output_path + ".gibbs.pkl", "r") as f: print "Loading Gibbs results from ", (output_path + ".gibbs.pkl") (samples, timestamps) = cPickle.load(f) else: print "Fitting the data with a network Hawkes model using Gibbs sampling" S_ct, C_ct, T = convert_discrete_to_continuous(S, dt) # Set the network prior such that E[W] ~= 0.01 # W ~ Gamma(kappa, v) for kappa = 1.25 => v ~ 125 # v ~ Gamma(alpha, beta) for alpha = 10, beta = 10 / 125 E_W = 0.2 kappa = 10.0 E_v = kappa / E_W alpha = 5.0 beta = alpha / E_v network_hypers = { "C": 1, "c": np.zeros(K).astype(np.int), "p": 0.25, "v": E_v, # 'kappa': kappa, # 'alpha': alpha, 'beta': beta, # 'p': 0.1, "allow_self_connections": False, } test_model = ContinuousTimeNetworkHawkesModel(K, dt_max=dt_max, network_hypers=network_hypers) test_model.add_data(S_ct, C_ct, T) # Initialize with the standard model parameters if standard_model is not None: test_model.initialize_with_standard_model(standard_model) plt.ion() im = plot_network(test_model.weight_model.A, test_model.weight_model.W, vmax=0.025) plt.pause(0.001) # Gibbs sample N_samples = 100 samples = [] lps = [test_model.log_probability()] timestamps = [] for itr in xrange(N_samples): if itr % 1 == 0: print "Iteration ", itr, "\tLL: ", lps[-1] im.set_data(test_model.weight_model.W_effective) plt.pause(0.001) # lps.append(test_model.log_probability()) lps.append(test_model.log_probability()) samples.append(test_model.resample_and_copy()) timestamps.append(time.clock()) print test_model.network.p # Save this sample with open(output_path + ".gibbs.itr%04d.pkl" % itr, "w") as f: cPickle.dump(samples[-1], f, protocol=-1) # Save the Gibbs samples with open(output_path + ".gibbs.pkl", "w") as f: print "Saving Gibbs samples to ", (output_path + ".gibbs.pkl") cPickle.dump((samples, timestamps), f, protocol=-1) return samples, timestamps
def fit_ct_network_hawkes_gibbs(S, S_test, dt, dt_max, output_path, model_args={}, standard_model=None, N_samples=100, time_limit=8*60*60): K = S.shape[1] S_ct, C_ct, T = convert_discrete_to_continuous(S, dt) S_test_ct, C_test_ct, T_test = convert_discrete_to_continuous(S_test, dt) # Check for existing Gibbs results if os.path.exists(output_path): with gzip.open(output_path, 'r') as f: print "Loading Gibbs results from ", output_path results = cPickle.load(f) else: print "Fitting the data with a continuous time network Hawkes model using Gibbs sampling" test_model = \ ContinuousTimeNetworkHawkesModel(K, dt_max=dt_max, **model_args) test_model.add_data(S_ct, C_ct, T) # Initialize with the standard model parameters if standard_model is not None: test_model.initialize_with_standard_model(standard_model) # Gibbs sample samples = [] lps = [test_model.log_probability()] hlls = [test_model.heldout_log_likelihood(S_test_ct, C_test_ct, T_test)] times = [0] for _ in progprint_xrange(N_samples, perline=25): # Update the model tic = time.time() test_model.resample_model() times.append(time.time() - tic) samples.append(copy.deepcopy(test_model.get_parameters())) # Compute log probability and heldout log likelihood # lps.append(test_model.log_probability()) hlls.append(test_model.heldout_log_likelihood(S_test_ct, C_test_ct, T_test)) # # Save this sample # with open(output_path + ".gibbs.itr%04d.pkl" % itr, 'w') as f: # cPickle.dump(samples[-1], f, protocol=-1) # Check if time limit has been exceeded if np.sum(times) > time_limit: break # Get cumulative timestamps timestamps = np.cumsum(times) lps = np.array(lps) hlls = np.array(hlls) # Make results object results = Results(samples, timestamps, lps, hlls) # Save the Gibbs samples with gzip.open(output_path, 'w') as f: print "Saving Gibbs samples to ", output_path cPickle.dump(results, f, protocol=-1) return results
results = [] results_dir = os.path.join("results", "hippocampus", "run002") for network, name in zip(networks, names): results_file = os.path.join(results_dir, "%s.pkl" % name) if os.path.exists(results_file): with open(results_file, "r") as f: result = pickle.load(f) results.append(result) continue print("Fitting model with ", name, " network.") model = ContinuousTimeNetworkHawkesModel( K, dt_max=1., network=network) model.add_data(S_train, C_train, T_train) model.resample_model() # Add the test data and then remove it. That way we can # efficiently compute its predictive log likelihood model.add_data(S_test, C_test, T - T_train) test_data = model.data_list.pop() ### Fit the model lls = [model.log_likelihood()] plls = [model.log_likelihood(test_data)] Weffs = [] Ps = [] Ls = [] for iter in progprint_xrange(N_samples, perline=25):
results = [] results_dir = os.path.join("results", "hippocampus", "run002") for network, name in zip(networks, names): results_file = os.path.join(results_dir, "%s.pkl" % name) if os.path.exists(results_file): with open(results_file, "r") as f: result = cPickle.load(f) results.append(result) continue print "Fitting model with ", name, " network." model = ContinuousTimeNetworkHawkesModel( K, dt_max=1., network=network) model.add_data(S_train, C_train, T_train) model.resample_model() # Add the test data and then remove it. That way we can # efficiently compute its predictive log likelihood model.add_data(S_test, C_test, T - T_train) test_data = model.data_list.pop() ### Fit the model lls = [model.log_likelihood()] plls = [model.log_likelihood(test_data)] Weffs = [] Ps = [] Ls = [] for iter in progprint_xrange(N_samples, perline=25):