def simulate_and_compute_cumul(mu, kernels, Alpha, T, hM=20): import mlpp.pp.hawkes as hk h = hk.Hawkes(kernels=kernels, mus=list(mu)) h.simulate(T) # use the class Cumulants from nphc.utils.cumulants import Cumulants N = h.get_full_process() cumul = Cumulants(N,hMax=hM) # compute everything from scipy.linalg import inv d = Alpha.shape[0] R_true = inv(np.eye(d)-Alpha) cumul.set_R_true(R_true) cumul.set_mu_true(mu) cumul.set_all() from nphc.utils.metrics import rel_err print("rel_err on C = ", rel_err(cumul.C_th,cumul.C)) print("rel_err on K_c = ", rel_err(cumul.K_c_th,cumul.K_c)) return cumul
for dir_name in list_dir_name: print("Starting for dataset in ", dir_name) L = glob.glob(dir_name + '/copy_*') for x in L: if 'with_cumul' in x: L.remove(x) L.sort() print(len(L)) import gzip, pickle N = [] for x in L: f = gzip.open(x, 'r') process = pickle.load(f, encoding='latin1') f.close() N.append(process) from nphc.utils.cumulants import Cumulants cumul = Cumulants(N) # we set H = 1 hour (in seconds) H = 3600 cumul.hMax = H cumul.set_all(H) ff = gzip.open(dir_name + '/process_with_cumul.pkl.gz', 'wb') pickle.dump(cumul, ff, protocol=2) ff.close()
for dir_name in list_dir_name: print("Starting for dataset in ",dir_name) L = glob.glob(dir_name+'/copy_*') for x in L: if 'with_cumul' in x: L.remove(x) L.sort() print(len(L)) import gzip, pickle N = [] for x in L: f = gzip.open(x,'r') process = pickle.load(f,encoding='latin1') f.close() N.append(process) from nphc.utils.cumulants import Cumulants cumul = Cumulants(N) # we set H = 1 hour (in seconds) H = 3600 cumul.hMax = H cumul.set_all(H) ff = gzip.open(dir_name+'/process_with_cumul.pkl.gz','wb') pickle.dump(cumul,ff,protocol=2) ff.close()