def gen_event_times(self, run_time):
        decays = self.parameters['decay']
        adjacency = self.parameters['adjacency']
        baseline = self.parameters['baseline']

        hawkes_sim = SimuHawkesExpKernels(adjacency=adjacency,
                                          decays=decays,
                                          baseline=baseline,
                                          verbose=False)
        hawkes_sim.end_time = run_time
        hawkes_sim.simulate()

        # Generate Event Arrival Times
        limit_order_arrivals = hawkes_sim.timestamps[1]
        limit_order_arrivals = [('loa', time) for time in limit_order_arrivals]

        limit_order_cancellations = hawkes_sim.timestamps[2]
        limit_order_cancellations = [('loc', time)
                                     for time in limit_order_cancellations]

        market_order_arrivals = hawkes_sim.timestamps[0]
        market_order_arrivals = [('moa', time)
                                 for time in market_order_arrivals]

        # Merge into single array of event times
        event_times = limit_order_arrivals.copy()
        event_times.extend(limit_order_cancellations)
        event_times.extend(market_order_arrivals)
        event_times.sort(key=lambda x: x[1])

        return event_times
Exemple #2
0
def simulate_univariate_hawkes(mu, alpha, beta, run_time, seed=None):
    """
    Simulates a univariate Hawkes based on the parameters.

    :param mu, alpha, beta: parameters of the Hawkes process
    :param run_time: End time of the simulation
    :param seed: (optional) Seed for the random process.
    :return: Hawkes event times
    """
    # this is due to tick's implementation of Hawkes process
    alpha = alpha / beta

    # Hawkes simulation
    n_nodes = 1  # dimension of the Hawkes process
    adjacency = alpha * np.ones((n_nodes, n_nodes))
    decays = beta * np.ones((n_nodes, n_nodes))
    baseline = mu * np.ones(n_nodes)
    hawkes_sim = SimuHawkesExpKernels(adjacency=adjacency,
                                      decays=decays,
                                      baseline=baseline,
                                      verbose=False,
                                      seed=seed)

    hawkes_sim.end_time = run_time
    hawkes_sim.simulate()
    event_times = hawkes_sim.timestamps[0]

    return event_times
Exemple #3
0
    def test_hawkes_set_timestamps(self):
        """...Test simulation after some timestamps have been set manually
        """
        decay = 0.5
        baseline = [0.2, 0.4]
        adjacency = [[0.2, 0.1], [0, 0.3]]

        hawkes = SimuHawkesExpKernels(
            baseline=baseline, decays=decay, adjacency=adjacency,
            verbose=False, seed=1393)

        # timestamps generated by a hawkes process with the same
        # characteristics simulated up to time 10
        original_timestamps = [
            np.array([7.096244, 9.389927]),
            np.array([0.436199, 0.659153, 2.622352, 3.095093,
                      7.189881, 8.068153, 9.240032]),
        ]

        hawkes.track_intensity(1)
        hawkes.set_timestamps(original_timestamps, 10)
        hawkes.end_time = 100
        hawkes.simulate()

        # Intensity up to time 10 is known
        first_intensities = hawkes.tracked_intensity[0][:10]
        np.testing.assert_array_almost_equal(
            first_intensities,
            [0.2, 0.2447256, 0.27988282, 0.24845138, 0.23549475,
             0.27078386, 0.26749709, 0.27473586, 0.24532959, 0.22749379]
        )

        # Ensure other jumps have occured afterwards
        self.assertGreater(hawkes.n_total_jumps,
                           sum(map(len, original_timestamps)))
Exemple #4
0
    def setUp(self):
        np.random.seed(23982)
        self.n_nodes = 3
        self.baseline = np.random.rand(self.n_nodes)
        self.adjacency = np.random.rand(self.n_nodes, self.n_nodes) / 2
        self.decays = np.random.rand(self.n_nodes, self.n_nodes)

        self.adjacency[0, 0] = 0
        self.adjacency[-1, -1] = 0

        self.hawkes = SimuHawkesExpKernels(self.adjacency, self.decays,
                                           baseline=self.baseline, seed=203,
                                           verbose=False)
Exemple #5
0
def fit_exp_hawkes_and_simulate(train_times, decay, end_time):
    learner = HawkesExpKern(decay, verbose=True, max_iter=100000, tol=1e-10)
    learner.fit(train_times)
    score = learner.score()
    print(f'obtained {score}\n with {decay}\n')

    decay_matrix = np.full((1, 1), decay)

    simulation = SimuHawkesExpKernels(learner.adjacency,
                                      decay_matrix,
                                      baseline=learner.baseline,
                                      end_time=end_time)
    simulation.simulate()
    return learner, simulation
Exemple #6
0
    def _sim_single_exposures(self):
        if not self.sparse:
            raise ValueError(
                "'single_exposure' exposures can only be simulated"
                " as sparse feature matrices")

        if self.hawkes_exp_kernels is None:
            np.random.seed(self.seed)
            decays = .002 * np.ones((self.n_features, self.n_features))
            baseline = 4 * np.random.random(self.n_features) / self.n_intervals
            mult = np.random.random(self.n_features)
            adjacency = mult * np.eye(self.n_features)

            if self.n_correlations:
                comb = list(permutations(range(self.n_features), 2))
                if len(comb) > 1:
                    idx = itemgetter(
                        *np.random.choice(range(len(comb)),
                                          size=self.n_correlations,
                                          replace=False))
                    comb = idx(comb)

                for i, j in comb:
                    adjacency[i, j] = np.random.random(1)

            self._set(
                'hawkes_exp_kernels',
                SimuHawkesExpKernels(adjacency=adjacency,
                                     decays=decays,
                                     baseline=baseline,
                                     verbose=False,
                                     seed=self.seed))

        self.hawkes_exp_kernels.adjust_spectral_radius(
            .1)  # TODO later: allow to change this parameter
        hawkes = SimuHawkesMulti(self.hawkes_exp_kernels,
                                 n_simulations=self.n_cases)

        run_time = self.n_intervals
        hawkes.end_time = [1 * run_time for _ in range(self.n_cases)]
        dt = 1
        self.hawkes_exp_kernels.track_intensity(dt)
        hawkes.simulate()

        self.hawkes_obj = hawkes
        features = [[
            np.min(np.floor(f)) if len(f) > 0 else -1 for f in patient_events
        ] for patient_events in hawkes.timestamps]

        features = [
            self.to_coo(feat, (run_time, self.n_features)) for feat in features
        ]

        # Make sure patients have at least one exposure?
        exposures_filter = itemgetter(
            *[i for i, f in enumerate(features) if f.sum() > 0])
        features = exposures_filter(features)
        n_samples = len(features)

        return features, n_samples
Exemple #7
0
def SimExp(baseline, adjacency, decays, num_clusters, data):
    hawkes = SimuHawkesExpKernels(adjacency=adjacency,
                                  decays=decays,
                                  baseline=baseline,
                                  verbose=False)

    #dt = 0.001 #millisecond granularity
    #hawkes.track_intensity(dt) # turning this on will eat up memory

    #need to compute and draw from the cluster length distrbution from the original data
    cluster_lengths = ComputeClusterLengths(data)

    multi = SimuHawkesMulti(hawkes, n_simulations=num_clusters)

    multi.end_time = np.random.choice(cluster_lengths,
                                      size=num_clusters,
                                      replace=True)
    multi.simulate()
    sim_inner_timestamps = multi.timestamps

    l = 0
    for realisation in sim_inner_timestamps:
        for series in realisation:
            l += len(series)
    print(f"Simulated {l} points")
    return sim_inner_timestamps
Exemple #8
0
def SimulateExp(baseline, adjacency, decays, time):

    hawkes = SimuHawkesExpKernels(adjacency=adjacency,
                                  decays=decays,
                                  baseline=baseline,
                                  verbose=False)

    hawkes.end_time = time
    dt = 0.001  #millisecond granularity
    #hawkes.track_intensity(dt)
    print(f"Starting sim")
    hawkes.simulate()
    timestamps = hawkes.timestamps
    l = 0
    for series in timestamps:
        l += len(series)
    print(f"Simulated {l} points")
    return hawkes.timestamps
Exemple #9
0
    def simulate_sparse_realization(self):
        """Simulate realization in which some nodes are sometimes empty
        """
        baseline = np.array([0.3, 0.001])
        adjacency = np.array([[0.5, 0.8], [0., 1.3]])

        sim = SimuHawkesExpKernels(adjacency=adjacency,
                                   decays=self.decay,
                                   baseline=baseline,
                                   verbose=False,
                                   seed=13487,
                                   end_time=500)
        sim.adjust_spectral_radius(0.8)
        multi = SimuHawkesMulti(sim, n_simulations=100)

        adjacency = sim.adjacency
        multi.simulate()

        # Check that some but not all realizations are empty
        self.assertGreater(max(map(lambda r: len(r[1]), multi.timestamps)), 1)
        self.assertEqual(min(map(lambda r: len(r[1]), multi.timestamps)), 0)

        return baseline, adjacency, multi.timestamps
    def get_train_data(n_nodes=3, betas=1.):
        np.random.seed(130947)
        baseline = np.random.rand(n_nodes)
        adjacency = np.random.rand(n_nodes, n_nodes)
        if isinstance(betas, (int, float)):
            betas = np.ones((n_nodes, n_nodes)) * betas

        sim = SimuHawkesExpKernels(adjacency=adjacency, decays=betas,
                                   baseline=baseline, verbose=False,
                                   seed=13487, end_time=3000)
        sim.adjust_spectral_radius(0.8)
        adjacency = sim.adjacency
        sim.simulate()

        return sim.timestamps, baseline, adjacency
    def simulate(self):
        n_nodes = 1
        baseline = [0.1]
        adjacency = [[0.1]]

        end_time = 10000
        # max_jumps=1000;
        a_sim = SimuHawkesExpKernels(adjacency,
                                     decays,
                                     baseline=baseline,
                                     end_time=end_time,
                                     verbose=True)
        a_sim.track_intensity(0.01)

        a_sim.simulate()
        # print(a_sim.timestamps)

        # print('Tracked intensity: ', a_sim.tracked_intensity)

        with open('sample_timestamps.txt', 'w') as f:
            f.write(str(list(a_sim.timestamps[0])))
Exemple #12
0
    def test_solver_scpg(self):
        """...Check Self-concordant proximal gradient solver for a Hawkes
        model with ridge penalization
        """
        beta = 3
        betas = beta * np.ones((2, 2))

        alphas = np.zeros((2, 2))

        alphas[0, 0] = 1
        alphas[0, 1] = 2
        alphas[1, 1] = 3

        mus = np.arange(1, 3) / 3

        hawkes = SimuHawkesExpKernels(adjacency=alphas,
                                      decays=betas,
                                      baseline=mus,
                                      seed=1231,
                                      end_time=20000,
                                      verbose=False)
        hawkes.adjust_spectral_radius(0.8)
        alphas = hawkes.adjacency

        hawkes.simulate()
        timestamps = hawkes.timestamps

        model = ModelHawkesExpKernLogLik(beta).fit(timestamps)
        prox = ProxL2Sq(1e-7, positive=True)
        pg = SCPG(max_iter=2000, tol=1e-10, verbose=False,
                  step=1e-5).set_model(model).set_prox(prox)

        pg.solve(np.ones(model.n_coeffs))

        original_coeffs = np.hstack((mus, alphas.reshape(4)))
        np.testing.assert_array_almost_equal(pg.solution,
                                             original_coeffs,
                                             decimal=2)
class Test(unittest.TestCase):
    def setUp(self):
        np.random.seed(23982)
        self.n_nodes = 3
        self.baseline = np.random.rand(self.n_nodes)
        self.adjacency = np.random.rand(self.n_nodes, self.n_nodes) / 2
        self.decays = np.random.rand(self.n_nodes, self.n_nodes)

        self.adjacency[0, 0] = 0
        self.adjacency[-1, -1] = 0

        self.hawkes = SimuHawkesExpKernels(self.adjacency,
                                           self.decays,
                                           baseline=self.baseline,
                                           seed=203,
                                           verbose=False)

    def test_hawkes_exponential_kernels(self):
        """...Test creation of a Hawkes Process with exponential kernels
        """

        kernel_0 = None
        for i, j in product(range(self.n_nodes), range(self.n_nodes)):
            kernel_ij = self.hawkes.kernels[i, j]

            if self.adjacency[i, j] == 0:
                self.assertEqual(kernel_ij.__class__, HawkesKernel0)

                # We check that all 0 adjacency share the same kernel 0
                # This might save lots of memory with very large,
                # very sparse adjacency matrices
                if kernel_0 is None:
                    kernel_0 = kernel_ij
                else:
                    self.assertEqual(kernel_0, kernel_ij)

            else:
                self.assertEqual(kernel_ij.__class__, HawkesKernelExp)
                self.assertEqual(kernel_ij.decay, self.decays[i, j])
                self.assertEqual(kernel_ij.intensity, self.adjacency[i, j])

        np.testing.assert_array_equal(self.baseline, self.hawkes.baseline)

    def test_hawkes_spectral_radius_exp_kernel(self):
        """...Hawkes Process spectral radius and adjust spectral radius
        methods
        """
        self.assertAlmostEqual(self.hawkes.spectral_radius(),
                               0.6645446549735008)

        self.hawkes.adjust_spectral_radius(0.6)
        self.assertAlmostEqual(self.hawkes.spectral_radius(), 0.6)

    def test_hawkes_mean_intensity(self):
        """...Test that Hawkes obtained mean intensity is consistent
        """

        self.assertLess(self.hawkes.spectral_radius(), 1)

        self.hawkes.end_time = 1000
        self.hawkes.track_intensity(0.01)
        self.hawkes.simulate()

        mean_intensity = self.hawkes.mean_intensity()
        for i in range(self.hawkes.n_nodes):
            self.assertAlmostEqual(np.mean(self.hawkes.tracked_intensity[i]),
                                   mean_intensity[i],
                                   delta=0.1)
Exemple #14
0
 def _corresponding_simu(self):
     return SimuHawkesExpKernels(adjacency=self.adjacency,
                                 decays=self.decays,
                                 baseline=self.baseline)
                           size=args.n_correlations,
                           replace=False)
    comb = [comb[i] for i in idx]
    for i, j in comb:
        adjacency[i, j] = np.random.random()

if args.constant_decay:
    decays = np.full((args.n_types, args.n_types), args.exp_decay)
else:
    decays = np.random.exponential(args.exp_decay,
                                   (args.n_types, args.n_types))

simu_hawkes = SimuHawkesExpKernels(
    baseline=baseline,
    adjacency=adjacency,
    decays=decays,
    verbose=False,
    seed=args.rand_seed,
)
simu_hawkes.adjust_spectral_radius(args.adj_spectral_radius)
simu_hawkes.max_jumps = args.max_jumps

print(simu_hawkes.baseline)
print(simu_hawkes.adjacency)
print(simu_hawkes.decays)

with Timer("Simulating events"), Pool(cpu_count() // 2) as p:
    timestamps = list(
        starmap(
            simulate_helper,
            zip(
Exemple #16
0
                threshold = alpha / rho
                ZS = soft_thres_S(Aest+US, threshold)
                US = US + Aest - ZS


if __name__ == '__main__':
    end_time = 50000
    n_realizations = 1
    decay = 3
    baseline =np.ones(6) * .03
    adjacency = np.zeros((6, 6))
    adjacency[2:, 2:] = np.ones((4, 4)) * 0.1
    adjacency[:3, :3] = np.ones((3, 3)) * 0.15

    hawkes_exp_kernels = SimuHawkesExpKernels(adjacency=adjacency, decays=decay,
                                              baseline=baseline, end_time=end_time,
                                              verbose=False, seed=1039)

    multi = SimuHawkesMulti(hawkes_exp_kernels, n_simulations=n_realizations)
    multi.end_time = [(i + 1) / n_realizations * end_time for i in range(n_realizations)]
    multi.simulate()

    type_seq = []
    time_seq = []
    for i, d in enumerate(multi.timestamps[0]):
        for j in d:
            type_seq.append(i)
            time_seq.append(j)
    type_seq = np.array(type_seq)
    time_seq = np.array(time_seq)
Exemple #17
0
from tick.plot import plot_point_process

period_length = 100
t_values = np.linspace(0, period_length)
y_values = 0.2 * np.maximum(np.sin(t_values *
                                   (2 * np.pi) / period_length), 0.2)
baselines = np.array(
    [TimeFunction((t_values, y_values), border_type=TimeFunction.Cyclic)])

decay = 0.1
adjacency = np.array([[0.5]])

hawkes = SimuHawkesExpKernels(adjacency,
                              decay,
                              baseline=baselines,
                              seed=2093,
                              verbose=False)
hawkes.track_intensity(0.1)
hawkes.end_time = 6 * period_length
hawkes.simulate()

fig, ax = plt.subplots(1, 1, figsize=(10, 4))

plot_point_process(hawkes, ax=ax)

t_values = np.linspace(0, hawkes.end_time, 1000)
ax.plot(t_values,
        hawkes.get_baseline_values(0, t_values),
        label='baseline',
        ls='--',
def simulate(parameters, T, adjacency, decay):
    '''
    parameters = (pi, beta, rho, u, delta00, delta01, delta10, delta11, mu1, mu2, mu3)
    '''

    pi, beta, rho, u, delta00, delta01, delta10, delta11, mu1, mu2, mu3 = parameters
    baseline = np.array([mu1, mu2, mu3])

    # Simulate Times
    hawkes_sim = SimuHawkesExpKernels(adjacency=adjacency,
                                      decays=decay,
                                      baseline=baseline,
                                      verbose=False)
    hawkes_sim.end_time = T
    hawkes_sim.simulate()

    # Generate Event Arrival Times
    limit_order_arivals = hawkes_sim.timestamps[1]
    limit_order_arivals = [('loa', time) for time in limit_order_arivals]

    limit_order_cancelations = hawkes_sim.timestamps[2]
    limit_order_cancelations = [('loc', time)
                                for time in limit_order_cancelations]

    market_order_arivals = hawkes_sim.timestamps[0]
    market_order_arivals = [('moa', time) for time in market_order_arivals]

    # Merge into single array of event times
    event_times = limit_order_arivals.copy()
    event_times.extend(limit_order_cancelations)
    event_times.extend(market_order_arivals)
    event_times.sort(key=lambda x: x[1])

    #%% initialize order book
    lob = OrderBook('XYZ', tick_size=1)

    AAPL_LOB = pd.read_csv(  # Orderbook
        '../../Data/LOBSTER_SampleFile_AAPL_2012-06-21_50/AAPL_2012-06-21_34200000_37800000_orderbook_50.csv',
        header=None)

    init = AAPL_LOB.iloc[0]
    t = 10**(-10)
    dt = 10**(-10)
    #Initial Orders
    for i in range(0, AAPL_LOB.shape[1], 4):
        if init[i + 1] > 0:
            lob.submit_limitorder(-1, init[i] / 100, init[i + 1], t)
            t += dt
    for i in range(2, AAPL_LOB.shape[1], 4):
        if init[i + 1] > 0:
            lob.submit_limitorder(1, init[i] / 100, init[i + 1], t)
            t += dt

    midprice = [lob.mid_price()]
    spread = [lob.spread()]
    time = [lob.time]

    for event in event_times:
        # simulation
        if event[0] == 'loa':  # limit order arrival
            order = gen_limitorder(lob, delta00, delta01, delta10, delta11,
                                   rho, u)
            ID = lob.submit_limitorder(*order, event[1])
        elif event[0] == 'loc':  # limit order cancellation
            # get number of sell and buy orders
            num_sells = sum(
                [len(value) for key, value in lob.sell_side.items()])
            num_buys = sum([len(value) for key, value in lob.buy_side.items()])
            # cancel random order if condition met
            if (num_sells > 5 and num_buys > 5):
                active_orders = get_active_orders(lob)
                lob.cancel_limitorder(np.random.choice(active_orders),
                                      event[1])
            else:
                continue  # if nothing happended dont collect data

        elif event[0] == 'moa':  # market order case
            order = gen_market_order(lob, beta, pi)
            lob.submit_marketorder(*order, event[1])
        else:
            raise ValueError('Invalid event type')

        midprice.append(lob.mid_price())
        spread.append(lob.spread())
        time.append(lob.time)

    Output = pd.DataFrame(index=pd.to_timedelta(time, unit='s'),
                          data={
                              'mid_price': midprice,
                              'spread': spread
                          })

    sigma5 = np.std(
        np.log(Output.resample('5s').first().mid_price).diff().dropna())
    sigma30 = np.std(
        np.log(Output.resample('30s').first().mid_price).diff().dropna())
    sigma60 = np.std(
        np.log(Output.resample('60s').first().mid_price).diff().dropna())
    meanSpread = np.mean(Output.spread)

    return (sigma5, sigma30, sigma60, meanSpread, *parameters)
np.random.seed(7168)

n_nodes = 3
baselines = 0.3 * np.ones(n_nodes)
decays = 0.5 + np.random.rand(n_nodes, n_nodes)
adjacency = np.array([
    [1, 1, -0.5],
    [0, 1, 0],
    [0, 0, 2],
], dtype=float)

adjacency /= 4

end_time = 1e5
integration_support = 5
n_realizations = 5

simu_hawkes = SimuHawkesExpKernels(
    baseline=baselines, adjacency=adjacency, decays=decays,
    end_time=end_time, verbose=False, seed=7168)
simu_hawkes.threshold_negative_intensity(True)

multi = SimuHawkesMulti(simu_hawkes, n_simulations=n_realizations, n_threads=-1)
multi.simulate()

nphc = HawkesCumulantMatching(integration_support, cs_ratio=.15, tol=1e-10,
                              step=0.3)

nphc.fit(multi.timestamps)
plot_hawkes_kernel_norms(nphc)
Exemple #20
0
 def _corresponding_simu(self):
     """Create simulation object corresponding to the obtained coefficients
     """
     return SimuHawkesExpKernels(adjacency=self.adjacency,
                                 decays=self.decay,
                                 baseline=self.baseline)