Пример #1
0
placeholder_dict = datapipeline.get_placeholder_dict()

feed_dict_train = datapipeline.get_feed_dict(mode=TRAIN)

feed_dict_val = datapipeline.get_feed_dict(mode=VALIDATION)

feed_dict_test = datapipeline.get_feed_dict(mode=TEST)

sparse_model_params = SparseModelParams(
    num_elements=datapipeline.num_elements,
    feature_size=datapipeline.feature_size
)

model = select_model(model_name=model_params.model_name)(
    model_params = model_params,
    sparse_model_params = sparse_model_params,
    placeholder_dict = placeholder_dict
)

sess = tf.Session()
K.set_session(sess)
sess.run(tf.global_variables_initializer())

for epoch in range(model_params.epochs):
    start_time = time()
    loss, accuracy, opt = sess.run([model.loss, model.accuracy, model.optimizer_op], feed_dict=feed_dict_train)
    loss_val, accuracy_val = sess.run([model.loss, model.accuracy], feed_dict=feed_dict_test)
    print(accuracy_val)
    # print(accuracy)
Пример #2
0
def run(model_params, data_dir, dataset_name, experiment=None):
    datapipeline = DataPipelineAE(model_params=model_params,
                                  data_dir=data_dir,
                                  dataset_name=dataset_name)

    feed_dict_train = datapipeline.get_feed_dict(mode=TRAIN)

    feed_dict_val = datapipeline.get_feed_dict(mode=VALIDATION)

    feed_dict_test = datapipeline.get_feed_dict(mode=TEST)

    sparse_model_params = datapipeline.get_sparse_model_params()
    autoencoder_model_params = datapipeline.get_autoencoder_model_params()
    placeholder_dict = datapipeline.get_placeholder_dict()

    if (experiment):
        experiment.add_config(sparse_model_params.get_variables())
        experiment.add_config(autoencoder_model_params.get_variables())

    sess = tf.Session()
    K.set_session(sess)

    train_loss_runs = []
    validation_loss_runs = []
    test_aucscore_runs = []
    test_apr_runs = []

    for num_exp in range(model_params.num_exp):

        model = select_model(model_name=model_params.model_name)(
            model_params=model_params,
            sparse_model_params=sparse_model_params,
            placeholder_dict=placeholder_dict,
            autoencoder_model_params=autoencoder_model_params)

        if (model_params.tensorboard_logs_dir):
            train_writer = tf.summary.FileWriter(
                model_params.tensorboard_logs_dir + model_params.model_name +
                "/" + TRAIN, sess.graph)
            val_writer = tf.summary.FileWriter(
                model_params.tensorboard_logs_dir + model_params.model_name +
                "/" + VALIDATION, sess.graph)

        sess.run([
            tf.global_variables_initializer(),
            tf.local_variables_initializer()
        ])

        train_loss_list = []
        validation_loss_list = []
        test_aucscore_list = []
        test_apr_list = []
        for epoch in range(model_params.epochs):
            loss, accuracy, opt, summary = sess.run([
                model.loss, model.accuracy, model.optimizer_op,
                model.summary_op
            ],
                                                    feed_dict=feed_dict_train)

            loss_val, accuracy_val, summary_val = sess.run(
                [model.loss, model.accuracy, model.summary_op],
                feed_dict=feed_dict_val)

            if (model_params.tensorboard_logs_dir):
                train_writer.add_summary(summary, epoch)
                val_writer.add_summary(summary_val, epoch)

            embedding, predictions_test, labels_test, mask_test, loss_test, accuracy_test, summary_test = sess.run(
                [
                    model.embeddings, model.logits, model.labels, model.mask,
                    model.loss, model.accuracy, model.summary_op
                ],
                feed_dict=feed_dict_test)

            auc_score = compute_auc_score(labels=labels_test,
                                          predictions=predictions_test,
                                          mask=mask_test)
            test_aucscore_list.append(auc_score)

            apr = compute_average_precision_recall(
                labels=labels_test,
                predictions=predictions_test,
                mask=mask_test)
            test_apr_list.append(apr)

            train_loss_list.append(loss)
            validation_loss_list.append(loss_val)

            print(
                "For epoch:run {}:{}, training_loss = {}, validation_loss = {}, test_auc = {}, test_apr = {}"
                .format(epoch, num_exp, loss, loss_val, auc_score, apr))

        train_loss_runs.append(train_loss_list)
        validation_loss_runs.append(validation_loss_list)
        test_aucscore_runs.append(test_aucscore_list)
        test_apr_runs.append(test_apr_list)

    plot_loss_curves(train_loss_runs,
                     validation_loss_runs,
                     dataset_name=dataset_name,
                     model_params=model_params)
    print_stats(train_loss_runs,
                validation_loss_runs,
                test_metrics=[test_aucscore_runs, test_apr_runs],
                test_metrics_labels=[AUCSCORE, AVERAGE_PRECISION_RECALL_SCORE])
Пример #3
0
data_dir = FLAGS.data_dir
dataset_name = FLAGS.dataset_name

datapipeline = DataPipelineAE(model_params=model_params,
                              data_dir=data_dir,
                              dataset_name=dataset_name)

feed_dict_train = datapipeline.get_feed_dict(mode=TRAIN)

feed_dict_val = datapipeline.get_feed_dict(mode=VALIDATION)

feed_dict_test = datapipeline.get_feed_dict(mode=TEST)

model = select_model(model_name=model_params.model_name)(
    model_params=model_params,
    sparse_model_params=datapipeline.get_sparse_model_params(),
    placeholder_dict=datapipeline.get_placeholder_dict(),
    autoencoder_model_params=datapipeline.get_autoencoder_model_params())

sess = tf.Session()
K.set_session(sess)
sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()])

for epoch in range(model_params.epochs):
    start_time = time()
    loss, accuracy, opt = sess.run(
        [model.loss, model.accuracy, model.optimizer_op],
        feed_dict=feed_dict_train)
    predictions_val, labels_val, mask_val, loss_val, accuracy_val = \
        sess.run([model.logits, model.labels, model.mask, model.loss, model.accuracy], feed_dict=feed_dict_val)
    print(
Пример #4
0
def run(model_params, data_dir, dataset_name, experiment=None):
    datapipeline = DataPipeline(model_params=model_params,
                                data_dir=data_dir,
                                dataset_name=dataset_name)

    placeholder_dict = datapipeline.get_placeholder_dict()

    feed_dict_train = datapipeline.get_feed_dict(mode=TRAIN)

    feed_dict_val = datapipeline.get_feed_dict(mode=VALIDATION)

    feed_dict_test = datapipeline.get_feed_dict(mode=TEST)

    sparse_model_params = SparseModelParams(
        num_elements=datapipeline.num_elements,
        feature_size=datapipeline.feature_size)

    if (experiment):
        experiment.add_config(sparse_model_params.get_variables())

    sess = tf.Session()
    K.set_session(sess)

    train_loss_runs = []
    validation_loss_runs = []
    test_accuracy_runs = []

    model = None

    for _ in range(model_params.num_exp):

        model = select_model(model_name=model_params.model_name)(
            model_params=model_params,
            sparse_model_params=sparse_model_params,
            placeholder_dict=placeholder_dict)

        if (model_params.tensorboard_logs_dir):
            train_writer = tf.summary.FileWriter(
                model_params.tensorboard_logs_dir + model_params.model_name +
                "/" + TRAIN, sess.graph)
            val_writer = tf.summary.FileWriter(
                model_params.tensorboard_logs_dir + model_params.model_name +
                "/" + VALIDATION, sess.graph)
        sess.run(tf.global_variables_initializer())

        train_loss_list = []
        validation_loss_list = []
        test_accuracy_list = []

        for epoch in range(model_params.epochs):
            loss, accuracy, opt, summary = sess.run([
                model.loss, model.accuracy, model.optimizer_op,
                model.summary_op
            ],
                                                    feed_dict=feed_dict_train)

            loss_val, accuracy_val, summary_val = sess.run(
                [model.loss, model.accuracy, model.summary_op],
                feed_dict=feed_dict_val)

            if (model_params.tensorboard_logs_dir):
                train_writer.add_summary(summary, epoch)
                val_writer.add_summary(summary_val, epoch)

            train_loss_list.append(loss)
            validation_loss_list.append(loss_val)

            accuracy_test = sess.run([model.accuracy],
                                     feed_dict=feed_dict_test)
            test_accuracy_list.append(accuracy_test)

        train_loss_runs.append(train_loss_list)
        validation_loss_runs.append(validation_loss_list)
        test_accuracy_runs.append(test_accuracy_list)

    plot_loss_curves(train_loss_runs,
                     validation_loss_runs,
                     dataset_name=dataset_name,
                     model_params=model_params)
    print_stats(train_loss_runs,
                validation_loss_runs,
                test_metrics=[test_accuracy_runs],
                test_metrics_labels=[ACCURACY])

    activations, khot_labels, mask = sess.run(
        [model.activations, model.labels, model.mask],
        feed_dict=feed_dict_train)
    embedd_and_plot(node_representation=activations[-2],
                    labels=khot_labels,
                    mask=mask)