def run_experiments(data_loader, transformer, dataset_name, class_name, n_runs):
  save_path = os.path.join(RESULTS_DIR, dataset_name)
  utils.check_paths(save_path)

  # Transformations
  for _ in range(n_runs):
    _transformations_experiment(data_loader, transformer, dataset_name,
                                class_name, save_path)
 def __init__(self,
              data_loader: ZTFOutlierLoader,
              transformer: AbstractTransformer,
              input_shape,
              results_folder_name='',
              name='Ensemble_OVA_Transformer_OD_Simple_Model',
              **kwargs):
     super(TransformODModel, self).__init__(name=name)
     self.date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
     self.main_model_path = self.create_main_model_paths(
         results_folder_name, self.name)
     utils.check_paths(self.main_model_path)
     self.data_loader = data_loader
     self.transformer = transformer
     self.models_list = self._get_model_list(input_shape, **kwargs)
 def __init__(self,
              data_loader: ZTFOutlierLoader,
              transformer: AbstractTransformer,
              input_shape,
              results_folder_name='',
              name='Transformer_OD_Simple_Model',
              **kwargs):
     super(TransformODModel, self).__init__(name=name)
     self.date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
     self.main_model_path = self.create_main_model_paths(
         results_folder_name, self.name)
     utils.check_paths(self.main_model_path)
     self.data_loader = data_loader
     self.transformer = transformer
     self.network = self.get_network(
         input_shape=input_shape,
         n_classes=self.transformer.n_transforms,
         **kwargs)
def save_normal_and_transformed_data(
        transformer,
        normal_data_name='tf2_normal.pkl',
        transformed_data_name='tf2_old_transformed.pkl'):
    save_dir = os.path.join(PROJECT_PATH, 'tests', 'aux_data')
    utils.check_paths(save_dir)
    hits_params = {
        loader_keys.DATA_PATH:
        os.path.join(PROJECT_PATH, '../datasets/HiTS2013_300k_samples.pkl'),
        loader_keys.N_SAMPLES_BY_CLASS:
        10000,
        loader_keys.TEST_PERCENTAGE:
        0.2,
        loader_keys.VAL_SET_INLIER_PERCENTAGE:
        0.1,
        loader_keys.USED_CHANNELS: [0, 1, 2, 3],
        loader_keys.CROP_SIZE:
        21,
        general_keys.RANDOM_SEED:
        42,
        loader_keys.TRANSFORMATION_INLIER_CLASS_VALUE:
        1
    }
    hits_outlier_dataset = HiTSOutlierLoader(hits_params)
    (x_train, y_train), (x_val, y_val), (
        x_test,
        y_test) = hits_outlier_dataset.get_outlier_detection_datasets()
    x_train_transform, y_train_transform = transformer.apply_all_transforms(
        x=x_train)
    x_val_transform, y_val_transform = transformer.apply_all_transforms(
        x=x_val)
    x_test_transform, y_test_transform = transformer.apply_all_transforms(
        x=x_test)
    normal_data = ((x_train, y_train), (x_val, y_val), (x_test, y_test))
    transformed_data = ((x_train_transform, y_train_transform),
                        (x_val_transform, y_val_transform), (x_test_transform,
                                                             y_test_transform))
    utils.save_pickle(normal_data, os.path.join(save_dir, normal_data_name))
    utils.save_pickle(transformed_data,
                      os.path.join(save_dir, transformed_data_name))
 def __init__(self,
              data_loader: ZTFOutlierLoader,
              transformer: AbstractTransformer,
              input_shape,
              depth=10,
              widen_factor=4,
              results_folder_name='',
              name='Ensemble_OVO_Transformer_OD_Model',
              **kwargs):
     super(TransformODModel, self).__init__(name=name)
     self.builder_input_shape = input_shape
     self.depth = 10
     self.widen_factor = widen_factor
     self.date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
     self.main_model_path = self.create_main_model_paths(
         results_folder_name, self.name)
     utils.check_paths(self.main_model_path)
     self.data_loader = data_loader
     self.transformer = transformer
     self.models_list = self._get_model_list(input_shape,
                                             depth=depth,
                                             widen_factor=widen_factor,
                                             **kwargs)
     self.models_index_tuples = self._get_models_index_tuples()
    if path:
        fig.savefig(os.path.join(path, '%s_hist_thr_acc.png' % x_label_name),
                    bbox_inches='tight')
    if show:
        plt.show()
    else:
        plt.close()


if __name__ == "__main__":
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True  # dynamically grow the memory used on the GPU
    sess = tf.Session(config=config)
    set_session(sess)
    save_path = '../results/Transforms_hits'
    check_paths(save_path)

    single_class_ind = 1

    (x_train, y_train), (x_val,
                         y_val), (x_test,
                                  y_test) = load_hits(n_samples_by_class=10000,
                                                      test_size=0.20,
                                                      val_size=0.10,
                                                      return_val=True,
                                                      channels_to_get=[2])
    print(x_train.shape)
    print(x_val.shape)
    print(x_test.shape)

    transformer = Transformer(8, 8)
import tensorflow as tf
from tqdm import tqdm
from scripts.detached_transformer_od_hits import \
  plot_histogram_disc_loss_acc_thr, \
  dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum
from scripts.ensemble_transform_vs_all_od_hits import get_entropy, \
  plot_matrix_score
import torch
import torch.nn as nn
from modules.utils import check_paths

EXPERIMENT_NAME = 'ZTF_v1_TransTransformations'

if __name__ == "__main__":
  results_folder = os.path.join(PROJECT_PATH, 'results', EXPERIMENT_NAME)
  check_paths(results_folder)

  config = tf.ConfigProto()
  config.gpu_options.allow_growth = True  # dynamically grow the memory used on the GPU
  sess = tf.Session(config=config)
  set_session(sess)

  single_class_ind = 1

  (x_train, y_train), (x_val, y_val), (x_test, y_test) = load_ztf_real_bog(return_val=True)#, data_file_name='ztf_v1_bogus_added.pkl')
  print(x_train.shape)
  print(x_val.shape)
  print(x_test.shape)
  # (x_train, y_train) = (x_train[:1000], y_train[:1000])
  # (x_val, y_val) = (x_val[:1000], y_val[:1000])