Ejemplo n.º 1
0
def exp_real_with_1_comp():
    # REAL
    data = np.load('data/real/SCEDC-1999-2019-24hrs.npy')
    data = data[:, 1:, :3]
    params = np.load('results/real-24hrs-1-gcomp.npz')
    da = utils.DataAdapter(init_data=data)
    mu = params['mu']
    beta = params['beta']
    kernel = GaussianMixtureDiffusionKernel(n_comp=1,
                                            layers=[10],
                                            C=1.,
                                            beta=beta,
                                            SIGMA_SHIFT=.1,
                                            SIGMA_SCALE=.5,
                                            MU_SCALE=.01,
                                            Wss=params['Wss'],
                                            bss=params['bss'],
                                            Wphis=params['Wphis'])
    lam = HawkesLam(mu, kernel, maximum=1e+3)
    print("mu", mu)
    print("beta", beta)
    utils.plot_spatial_kernel(
        "results/learned-kernel-SCEDC-1999-2019-24hrs.pdf",
        kernel.gdks[0],
        S=[[-1., 1.], [-1., 1.]],
        grid_size=50)
    utils.spatial_intensity_on_map(
        "results/map-SCEDC-1999-2019-24hrs.html",
        da,
        lam,
        data,
        seq_ind=3000,
        t=8.0,
        # xlim=[-23.226, -22.621],
        # ylim=[-43.868, -43.050],
        # ngrid=200)
        xlim=da.xlim,
        ylim=da.ylim,
        ngrid=200)
Ejemplo n.º 2
0
def exp_real_with_2_comp():
    # REAL
    data = np.load(
        '../Spatio-Temporal-Point-Process-Simulator/data/rescale.ambulance.perday.npy'
    )
    data = data[:, 1:, :3]
    params = np.load(
        '../Spatio-Temporal-Point-Process-Simulator/data/rescale_ambulance_mle_gaussian_mixture_params.npz'
    )
    da = utils.DataAdapter(init_data=data)
    mu = .1  # params['mu']
    beta = params['beta']
    kernel = GaussianMixtureDiffusionKernel(n_comp=1,
                                            layers=[5],
                                            C=1.,
                                            beta=beta,
                                            SIGMA_SHIFT=.1,
                                            SIGMA_SCALE=.5,
                                            MU_SCALE=.01,
                                            Wss=params['Wss'],
                                            bss=params['bss'],
                                            Wphis=params['Wphis'])
    lam = HawkesLam(mu, kernel, maximum=1e+3)
    print("mu", mu)
    print("beta", beta)
    print(params['Wphis'].shape)
    pp = SpatialTemporalPointProcess(lam)
    # generate points
    points, sizes = pp.generate(T=[0., 10.],
                                S=[[-1., 1.], [-1., 1.]],
                                batch_size=100,
                                verbose=True)
    results = da.restore(points)
    print(results)
    print(sizes)
    np.save('results/ambulance-simulation.npy', results)
            print('[%s] Train cost:\t%f' % (arrow.now(), avg_train_cost),
                  file=sys.stderr)
            print('[%s] Test cost:\t%f' % (arrow.now(), avg_test_cost),
                  file=sys.stderr)


if __name__ == "__main__":
    np.set_printoptions(suppress=True)
    # np.random.seed(1)
    # tf.set_random_seed(1)

    with tf.Session() as sess:
        # data preparation
        data = np.load("data/northcal.earthquake.perseason.npy")
        da = utils.DataAdapter(init_data=data,
                               S=[[-1., 1.], [-1., 1.]],
                               T=[0., 1.])
        data = da.normalize(data)[:, 1:51, :]
        mask = data == 0.
        mask = mask.astype(float)
        data = data + mask
        print(data)
        # print(data.shape)

        # model configurations
        lstm_hidden_size = 10
        # training configurations
        step_size = np.shape(data)[1]
        batch_size = 5
        test_ratio = 0.3
        epoches = 30
Ejemplo n.º 4
0
        # save all training cost into numpy file.
        np.savetxt("results/robbery_mle_train_cost.txt",
                   all_train_cost,
                   delimiter=",")


if __name__ == "__main__":
    # Unittest example
    S = [[-1., 1.], [-1., 1.]]
    T = [0., 10.]
    data = np.load(
        '../Spatio-Temporal-Point-Process-Simulator/data/rescale.ambulance.perday.npy'
    )
    data = data[:320,
                1:51, :]  # remove the first element in each seqs, since t = 0
    da = utils.DataAdapter(init_data=data, S=S, T=T)
    # data = np.load('../Spatio-Temporal-Point-Process-Simulator/data/northcal.earthquake.perseason.npy')
    # da   = utils.DataAdapter(init_data=data)
    seqs = da.normalize(data)
    print(da)
    print(seqs.shape)

    # training model
    with tf.Session() as sess:
        batch_size = 32
        epoches = 10
        layers = [5]
        n_comp = 5

        ppg = MLE_Hawkes_Generator(T=T,
                                   S=S,
Ejemplo n.º 5
0
        np.savetxt("results/robbery_rl_train_cost.txt",
                   all_train_cost,
                   delimiter=",")


if __name__ == "__main__":
    # Unittest example

    # np.random.seed(0)
    # tf.set_random_seed(1)

    data = np.load(
        '../Spatio-Temporal-Point-Process-Simulator/data/apd.robbery.permonth.npy'
    )
    # data = np.load('../Spatio-Temporal-Point-Process-Simulator/data/northcal.earthquake.perseason.npy')
    da = utils.DataAdapter(init_data=data)
    seqs = da.normalize(data)
    seqs = seqs[:, 1:, :]  # remove the first element in each seqs, since t = 0
    print(da)
    print(seqs.shape)

    # training model
    with tf.Session() as sess:
        # model configuration
        batch_size = 10
        epoches = 30
        lr = 1e-3
        T = [0., 10.]
        S = [[-1., 1.], [-1., 1.]]
        layers = [5]
        n_comp = 5