Exemple #1
0
                                        D_multiple), 0), n_chains_multiple),
                        group_event_ndims=1)
        beta = zs.Normal('beta',
                         tf.zeros([K, V]),
                         logstd=tf.ones([K, V]) * log_delta,
                         group_event_ndims=1)
    return model


if __name__ == "__main__":
    tf.set_random_seed(1237)

    # Load nips dataset
    data_name = 'nips'
    data_path = os.path.join(conf.data_dir, data_name + '.pkl.gz')
    X, vocab = dataset.load_uci_bow(data_name, data_path)
    X_train = X[:1200, :]
    X_test = X[1200:, :]

    # Define model training/evaluation parameters
    D = 100
    K = 100
    V = X_train.shape[1]
    n_chains = 1

    num_e_steps = 5
    hmc = zs.HMC(step_size=1e-3,
                 n_leapfrogs=20,
                 adapt_step_size=True,
                 target_acceptance_rate=0.6)
    epochs = 100
Exemple #2
0
    X = X.tocoo()
    indices = np.transpose(np.array([X.row, X.col])).astype(np.int64)
    values = X.data
    p = np.lexsort((indices[:,1], indices[:,0]))
    #print(p)
    return indices[p,:], values[p]


if __name__ == "__main__":
    tf.set_random_seed(1237)

    # Load nips dataset
    data_name = 'enron'
    data_path = os.path.join(conf.data_dir, data_name + '.pkl.gz')
    # TODO write a sparse datareader: FINISHED
    X, vocab = dataset.load_uci_bow(data_name, data_path, is_sparse=True)
    X_train = X[:30000, :]
    X_test = X[30000:, :]
    X_test_indices, X_test_values = get_indices_and_values(X_test)

    # Define model training/evaluation parameters
    D = 2500
    K = 100
    V = X_train.shape[1]
    n_chains = 1

    num_e_steps = 5
    hmc = zs.HMC(step_size=1e-3, n_leapfrogs=20, adapt_step_size=True,
                 target_acceptance_rate=0.6)
    epochs = 100
    learning_rate_0 = 1.0