Beispiel #1
0
def test_dh_transtransformer_1c():
    transformer = transformations_tf.TransTransformer()
    scores_list = []
    delta_times_list = []
    for i in range(N_RUNS):
        mdl = create_simple_network(input_shape=(21, 21, 1),
                                    num_classes=transformer.n_transforms,
                                    dropout_rate=0.0)
        mdl.compile('adam', 'categorical_crossentropy', ['acc'])
        start_time = time.time()
        scores = test_tf1_transformed_data_on_tf2_keras_model_diri(
            mdl,
            transformer,
            dataset_name='hits-1-c',
            tf_version='tf1',
            transformer_name='transtransformed',
            model_name='dh',
            epochs=2)
        end_time = time.time()
        delta_times_list.append(end_time - start_time)
        scores_list.append(scores)
        del mdl
    file_path = os.path.join(PROJECT_PATH, 'tests', 'aux_results',
                             'test_models_tf1-tf2.txt')
    print_scores_times_to_file(
        file_path,
        'Data_transformer_tf1_models_diri_tf2_dh_transtransformer_functionModel\n NRUNS: %i'
        % N_RUNS, scores_list, delta_times_list)
Beispiel #2
0
def test_tf2_resnet_transtransformer_unchanged_1c():
    transformer = transformations_tf.TransTransformer()
    scores_list = []
    delta_times_list = []
    for i in range(N_RUNS):
        mdl = TransformODModel(data_loader=None,
                               transformer=transformer,
                               input_shape=(21, 21, 1))
        start_time = time.time()
        scores = test_tf1_normal_data_on_tf2_transformer_model_original(
            mdl,
            transformer,
            dataset_name='hits-1-c',
            tf_version='tf1',
            epochs=2)
        end_time = time.time()
        delta_times_list.append(end_time - start_time)
        scores_list.append(scores)
        del mdl
    file_path = os.path.join(PROJECT_PATH, 'tests', 'aux_results',
                             'test_models_tf1-tf2.txt')
    print_scores_times_to_file(
        file_path,
        'Data_normal_tf1_models_and_transforms_tf2_unchanged_resnet_transtransformer_1c\n NRUNS: %i'
        % N_RUNS, scores_list, delta_times_list)
Beispiel #3
0
def test_resnet_transtransformer():
    transformer = transformations_tf.TransTransformer()
    scores_list = []
    delta_times_list = []
    for i in range(N_RUNS):
        mdl = AlreadyTransformODModel(transformer=transformer,
                                      input_shape=(21, 21, 4))
        start_time = time.time()
        scores = test_tf1_transformed_data_on_tf2_model_original_diri(
            mdl,
            transformer,
            dataset_name='hits-4-c',
            tf_version='tf1',
            transformer_name='transtransformed',
            model_name='resnet',
            epochs=2)
        end_time = time.time()
        delta_times_list.append(end_time - start_time)
        scores_list.append(scores)
        del mdl
    file_path = os.path.join(PROJECT_PATH, 'tests', 'aux_results',
                             'test_models_tf1-tf2.txt')
    print_scores_times_to_file(
        file_path,
        'Data_transformer_tf1_models_diri_tf2_resnet_transtransformer\n NRUNS: %i'
        % N_RUNS, scores_list, delta_times_list)
Beispiel #4
0
def test_all_tf2_dh_transtransformer_1c():
    transformer = transformations_tf.TransTransformer()
    hits_params = {
        loader_keys.DATA_PATH:
        os.path.join(PROJECT_PATH, '../datasets/HiTS2013_300k_samples.pkl'),
        loader_keys.N_SAMPLES_BY_CLASS:
        10000,
        loader_keys.TEST_PERCENTAGE:
        0.2,
        loader_keys.VAL_SET_INLIER_PERCENTAGE:
        0.1,
        loader_keys.USED_CHANNELS: [2],
        loader_keys.CROP_SIZE:
        21,
        general_keys.RANDOM_SEED:
        42,
        loader_keys.TRANSFORMATION_INLIER_CLASS_VALUE:
        1
    }
    hits_outlier_dataset = HiTSOutlierLoader(hits_params)
    scores_list = []
    delta_times_list = []
    for i in range(N_RUNS):
        mdl = TransformODSimpleModel(data_loader=None,
                                     transformer=transformer,
                                     input_shape=(21, 21, 1),
                                     drop_rate=0.0)
        start_time = time.time()
        scores = test_all_tf2(mdl,
                              transformer,
                              hits_outlier_dataset,
                              dataset_name='hits-1-c',
                              epochs=2)
        end_time = time.time()
        delta_times_list.append(end_time - start_time)
        scores_list.append(scores)
        del mdl
    file_path = os.path.join(PROJECT_PATH, 'tests', 'aux_results',
                             'test_models_tf1-tf2.txt')
    print_scores_times_to_file(
        file_path,
        'all_tf2_unchanged_dh_transtransformer_1c_DP0.0_fast_no_prints\n NRUNS: %i'
        % N_RUNS, scores_list, delta_times_list)
Beispiel #5
0
def best_score_evaluation(result_folder_name, epochs, patience=0):
    trainer_params = {
        param_keys.RESULTS_FOLDER_NAME: result_folder_name,
        'epochs': epochs,
        'patience': patience,
    }
    # data loaders
    hits_params = {
        loader_keys.DATA_PATH:
        os.path.join(PROJECT_PATH, '../datasets/HiTS2013_300k_samples.pkl'),
        loader_keys.N_SAMPLES_BY_CLASS:
        10000,
        loader_keys.TEST_PERCENTAGE:
        0.2,
        loader_keys.VAL_SET_INLIER_PERCENTAGE:
        0.1,
        loader_keys.USED_CHANNELS: [0, 1, 2, 3],  # [2],#
        loader_keys.CROP_SIZE:
        21,
        general_keys.RANDOM_SEED:
        42,
        loader_keys.TRANSFORMATION_INLIER_CLASS_VALUE:
        1
    }
    hits_loader = HiTSOutlierLoader(hits_params)
    ztf_params = {
        loader_keys.DATA_PATH:
        os.path.join(PROJECT_PATH, '../datasets/ztf_v1_bogus_added.pkl'),
        loader_keys.VAL_SET_INLIER_PERCENTAGE:
        0.1,
        loader_keys.USED_CHANNELS: [0, 1, 2],
        loader_keys.CROP_SIZE:
        21,
        general_keys.RANDOM_SEED:
        42,
        loader_keys.TRANSFORMATION_INLIER_CLASS_VALUE:
        1
    }
    ztf_loader = ZTFOutlierLoader(ztf_params)
    # transformers
    transformer_72 = transformations_tf.Transformer()
    trans_transformer = transformations_tf.TransTransformer()
    kernel_transformer = transformations_tf.KernelTransformer()
    plus_kernel_transformer = transformations_tf.PlusKernelTransformer()
    # trainers
    hits_trainer = Trainer(hits_loader, trainer_params)
    ztf_trainer = Trainer(ztf_loader, trainer_params)

    model_constructors_list = (TransformODModel, )
    transformers_list = (
        plus_kernel_transformer,
        kernel_transformer,
        transformer_72,
        trans_transformer,
    )
    trainers_list = (hits_trainer, )  # (ztf_trainer, hits_trainer, )
    trainer_model_transformer_tuples = list(
        itertools.product(trainers_list, model_constructors_list,
                          transformers_list))

    for trainer, model_constructor, transformer in trainer_model_transformer_tuples:
        trainer.train_model_n_times(model_constructor,
                                    transformer,
                                    trainer_params,
                                    train_times=TRAIN_TIME)

    hits_trainer.create_tables_of_results_folders()