def fit_standard_hawkes_model_sgd(S, K, B, dt, dt_max, init_model=None):
    """
    Fit
    :param S:
    :return:
    """
    print("Fitting the data with a standard Hawkes model using SGD")

    # Make a new model for inference
    test_model = DiscreteTimeStandardHawkesModel(K=K,
                                                 dt=dt,
                                                 dt_max=dt_max,
                                                 B=B)
    test_model.add_data(S, minibatchsize=256)

    # Initialize the test model with the init model weights
    if init_model is not None:
        test_model.weights = init_model.weights

    plt.ion()
    im = plot_network(np.ones((K, K)), test_model.W, vmax=0.5)
    plt.pause(0.001)

    # Gradient descent
    N_steps = 1000
    samples = []
    lls = []
    timestamps = []

    learning_rate = 0.01 * np.ones(N_steps)
    momentum = 0.8 * np.ones(N_steps)
    prev_velocity = None
    for itr in range(N_steps):
        # W,ll,grad = test_model.gradient_descent_step(stepsz=0.001)
        W, ll, prev_velocity = test_model.sgd_step(prev_velocity,
                                                   learning_rate[itr],
                                                   momentum[itr])
        samples.append(test_model.copy_sample())
        lls.append(ll)
        timestamps.append(time.clock())

        if itr % 1 == 0:
            print("Iteration ", itr, "\t LL: ", ll)
            im.set_data(np.ones((K, K)) * test_model.W)
            plt.pause(0.001)

    plt.ioff()
    plt.figure()
    plt.plot(np.arange(N_steps), lls)
    plt.xlabel("Iteration")
    plt.ylabel("Log likelihood")

    plot_network(np.ones((K, K)), test_model.W)
    plt.show()

    return samples, timestamps
Пример #2
0
def fit_standard_hawkes_model_sgd(S, K, B, dt, dt_max, init_model=None):
    """
    Fit
    :param S:
    :return:
    """
    print "Fitting the data with a standard Hawkes model using SGD"

    # Make a new model for inference
    test_model = DiscreteTimeStandardHawkesModel(K=K, dt=dt, dt_max=dt_max, B=B)
    test_model.add_data(S, minibatchsize=256)

    # Initialize the test model with the init model weights
    if init_model is not None:
        test_model.weights = init_model.weights

    plt.ion()
    im = plot_network(np.ones((K,K)), test_model.W, vmax=0.5)
    plt.pause(0.001)

    # Gradient descent
    N_steps = 1000
    samples = []
    lls = []
    timestamps = []

    learning_rate = 0.01 * np.ones(N_steps)
    momentum = 0.8 * np.ones(N_steps)
    prev_velocity = None
    for itr in xrange(N_steps):
        # W,ll,grad = test_model.gradient_descent_step(stepsz=0.001)
        W,ll,prev_velocity = test_model.sgd_step(prev_velocity, learning_rate[itr], momentum[itr])
        samples.append(test_model.copy_sample())
        lls.append(ll)
        timestamps.append(time.clock())

        if itr % 1 == 0:
            print "Iteration ", itr, "\t LL: ", ll
            im.set_data(np.ones((K,K)) * test_model.W)
            plt.pause(0.001)

    plt.ioff()
    plt.figure()
    plt.plot(np.arange(N_steps), lls)
    plt.xlabel("Iteration")
    plt.ylabel("Log likelihood")

    plot_network(np.ones((K,K)), test_model.W)
    plt.show()

    return samples, timestamps
def fit_standard_hawkes_model_bfgs(S,
                                   K,
                                   B,
                                   dt,
                                   dt_max,
                                   output_path,
                                   init_len=10000,
                                   xv_len=1000):
    """
    Fit
    :param S:
    :return:
    """
    # Check for existing results
    if os.path.exists(out_path + ".bfgs.pkl"):
        print("Existing BFGS results found. Loading from file.")
        with open(output_path + ".bfgs.pkl", 'r') as f:
            init_model, init_time = pickle.load(f)

    else:
        print("Fitting the data with a standard Hawkes model")
        # betas = np.logspace(-3,-0.8,num=10)
        betas = np.array([0.01, 0.1, 1.0, 10.0, 20.0])
        # betas = np.concatenate(([0], betas))

        init_models = []
        S_init = S[:init_len, :]
        xv_ll = np.zeros(len(betas))
        S_xv = S[init_len:init_len + xv_len, :]

        # Make a model to initialize the parameters
        init_model = DiscreteTimeStandardHawkesModel(K=K,
                                                     dt=dt,
                                                     B=B,
                                                     dt_max=dt_max,
                                                     beta=0.0)
        init_model.add_data(S_init)
        # Initialize the background rates to their mean
        init_model.initialize_to_background_rate()

        start = time.clock()
        for i, beta in enumerate(betas):
            print("Fitting with BFGS on first ", init_len,
                  " time bins, beta = ", beta)
            init_model.beta = beta
            init_model.fit_with_bfgs()
            init_models.append(init_model.copy_sample())

            # Compute the heldout likelihood on the xv data
            xv_ll[i] = init_model.heldout_log_likelihood(S_xv)
            if not np.isfinite(xv_ll[i]):
                xv_ll[i] = -np.inf

        init_time = time.clock() - start

        # Take the best model
        print("XV predictive log likelihoods: ")
        for beta, ll in zip(betas, xv_ll):
            print("Beta: %.2f\tLL: %.2f" % (beta, ll))
        best_ind = np.argmax(xv_ll)
        print("Best beta: ", betas[best_ind])
        init_model = init_models[best_ind]

        if best_ind == 0 or best_ind == len(betas) - 1:
            print("WARNING: Best BFGS model was for extreme value of beta. " \
                  "Consider expanding the beta range.")

        # Save the model (sans data)
        with open(output_path + ".bfgs.pkl", 'w') as f:
            print("Saving BFGS results to ", (output_path + ".bfgs.pkl"))
            pickle.dump((init_model, init_time), f, protocol=-1)

    return init_model, init_time
def fit_standard_hawkes_model_bfgs(S, K, dt, dt_max, output_path, W_max=None):
    """
    Fit
    :param S:
    :return:
    """
    # Check for existing results
    if os.path.exists(out_path + ".bfgs.pkl"):
        print "Existing BFGS results found. Loading from file."
        with open(output_path + ".bfgs.pkl", 'r') as f:
            init_model, init_time = cPickle.load(f)

    else:
        print "Fitting the data with a standard Hawkes model"
        # betas = np.logspace(-1,1.3,num=1)
        # betas = [ 0.0 ]

        # We want the max W ~ -.025 and the mean to be around 0.01
        # W ~ Gamma(alpha, beta) => E[W] = alpha/beta, so beta ~100 * alpha
        alpha = 1.1
        betas = [alpha * 1.0 / 0.01]

        init_models = []
        xv_len = 10000
        init_len = S.shape[0] - 10000
        S_init = S[:init_len, :]

        xv_ll = np.zeros(len(betas))
        S_xv = S[init_len:init_len + xv_len, :]

        # Make a model to initialize the parameters
        test_basis = IdentityBasis(dt, dt_max, allow_instantaneous=True)
        init_model = DiscreteTimeStandardHawkesModel(
            K=K,
            dt=dt,
            dt_max=dt_max,
            alpha=alpha,
            beta=0.0,
            basis=test_basis,
            allow_self_connections=False,
            W_max=W_max)
        init_model.add_data(S_init)
        # Initialize the background rates to their mean
        init_model.initialize_to_background_rate()

        start = time.clock()
        for i, beta in enumerate(betas):
            print "Fitting with BFGS on first ", init_len, " time bins, ", \
                "beta = ", beta, "W_max = ", W_max
            init_model.beta = beta
            init_model.fit_with_bfgs()
            init_models.append(init_model.copy_sample())

            # Compute the heldout likelihood on the xv data
            xv_ll[i] = init_model.heldout_log_likelihood(S_xv)
            if not np.isfinite(xv_ll[i]):
                xv_ll[i] = -np.inf

        init_time = time.clock() - start

        # Take the best model
        print "XV predictive log likelihoods: "
        for beta, ll in zip(betas, xv_ll):
            print "Beta: %.2f\tLL: %.2f" % (beta, ll)
        best_ind = np.argmax(xv_ll)
        print "Best beta: ", betas[best_ind]
        init_model = init_models[best_ind]

        if best_ind == 0 or best_ind == len(betas) - 1:
            print "WARNING: Best BFGS model was for extreme value of beta. " \
                  "Consider expanding the beta range."

        # Save the model (sans data)
        with open(output_path + ".bfgs.pkl", 'w') as f:
            print "Saving BFGS results to ", (output_path + ".bfgs.pkl")
            cPickle.dump((init_model, init_time), f, protocol=-1)

    return init_model, init_time
def fit_standard_hawkes_model_bfgs(S, K, dt, dt_max, output_path, W_max=None):
    """
    Fit
    :param S:
    :return:
    """
    # Check for existing results
    if os.path.exists(out_path + ".bfgs.pkl"):
        print "Existing BFGS results found. Loading from file."
        with open(output_path + ".bfgs.pkl", "r") as f:
            init_model, init_time = cPickle.load(f)

    else:
        print "Fitting the data with a standard Hawkes model"
        # betas = np.logspace(-1,1.3,num=1)
        # betas = [ 0.0 ]

        # We want the max W ~ -.025 and the mean to be around 0.01
        # W ~ Gamma(alpha, beta) => E[W] = alpha/beta, so beta ~100 * alpha
        alpha = 1.1
        betas = [alpha * 1.0 / 0.01]

        init_models = []
        xv_len = 10000
        init_len = S.shape[0] - 10000
        S_init = S[:init_len, :]

        xv_ll = np.zeros(len(betas))
        S_xv = S[init_len : init_len + xv_len, :]

        # Make a model to initialize the parameters
        test_basis = IdentityBasis(dt, dt_max, allow_instantaneous=True)
        init_model = DiscreteTimeStandardHawkesModel(
            K=K,
            dt=dt,
            dt_max=dt_max,
            alpha=alpha,
            beta=0.0,
            basis=test_basis,
            allow_self_connections=False,
            W_max=W_max,
        )
        init_model.add_data(S_init)
        # Initialize the background rates to their mean
        init_model.initialize_to_background_rate()

        start = time.clock()
        for i, beta in enumerate(betas):
            print "Fitting with BFGS on first ", init_len, " time bins, ", "beta = ", beta, "W_max = ", W_max
            init_model.beta = beta
            init_model.fit_with_bfgs()
            init_models.append(init_model.copy_sample())

            # Compute the heldout likelihood on the xv data
            xv_ll[i] = init_model.heldout_log_likelihood(S_xv)
            if not np.isfinite(xv_ll[i]):
                xv_ll[i] = -np.inf

        init_time = time.clock() - start

        # Take the best model
        print "XV predictive log likelihoods: "
        for beta, ll in zip(betas, xv_ll):
            print "Beta: %.2f\tLL: %.2f" % (beta, ll)
        best_ind = np.argmax(xv_ll)
        print "Best beta: ", betas[best_ind]
        init_model = init_models[best_ind]

        if best_ind == 0 or best_ind == len(betas) - 1:
            print "WARNING: Best BFGS model was for extreme value of beta. " "Consider expanding the beta range."

        # Save the model (sans data)
        with open(output_path + ".bfgs.pkl", "w") as f:
            print "Saving BFGS results to ", (output_path + ".bfgs.pkl")
            cPickle.dump((init_model, init_time), f, protocol=-1)

    return init_model, init_time
Пример #6
0
def fit_standard_hawkes_model_bfgs(S, K, B, dt, dt_max, output_path,
                                   init_len=10000, xv_len=1000):
    """
    Fit
    :param S:
    :return:
    """
    # Check for existing results
    if os.path.exists(out_path + ".bfgs.pkl"):
        print "Existing BFGS results found. Loading from file."
        with open(output_path + ".bfgs.pkl", 'r') as f:
            init_model, init_time = cPickle.load(f)

    else:
        print "Fitting the data with a standard Hawkes model"
        # betas = np.logspace(-3,-0.8,num=10)
        betas = np.array([0.01, 0.1, 1.0, 10.0, 20.0])
        # betas = np.concatenate(([0], betas))

        init_models = []
        S_init      = S[:init_len,:]
        xv_ll       = np.zeros(len(betas))
        S_xv        = S[init_len:init_len+xv_len, :]

        # Make a model to initialize the parameters
        init_model = DiscreteTimeStandardHawkesModel(K=K, dt=dt, B=B, dt_max=dt_max, beta=0.0)
        init_model.add_data(S_init)
        # Initialize the background rates to their mean
        init_model.initialize_to_background_rate()


        start = time.clock()
        for i,beta in enumerate(betas):
            print "Fitting with BFGS on first ", init_len, " time bins, beta = ", beta
            init_model.beta = beta
            init_model.fit_with_bfgs()
            init_models.append(init_model.copy_sample())

            # Compute the heldout likelihood on the xv data
            xv_ll[i] = init_model.heldout_log_likelihood(S_xv)
            if not np.isfinite(xv_ll[i]):
                xv_ll[i] = -np.inf


        init_time = time.clock() - start

        # Take the best model
        print "XV predictive log likelihoods: "
        for beta, ll in zip(betas, xv_ll):
            print "Beta: %.2f\tLL: %.2f" % (beta, ll)
        best_ind = np.argmax(xv_ll)
        print "Best beta: ", betas[best_ind]
        init_model = init_models[best_ind]

        if best_ind == 0 or best_ind == len(betas) - 1:
            print "WARNING: Best BFGS model was for extreme value of beta. " \
                  "Consider expanding the beta range."

        # Save the model (sans data)
        with open(output_path + ".bfgs.pkl", 'w') as f:
            print "Saving BFGS results to ", (output_path + ".bfgs.pkl")
            cPickle.dump((init_model, init_time), f, protocol=-1)

    return init_model, init_time