def __init__(self, data, sample_no, validation_samples, no_sample_per_each_itr,
                 train_tag, validation_tag, test_tag, img_name, label_name, torso_tag, log_tag, min_range, max_range,
                 Logs, fold, Server,newdataset=False):
        settings.init()
        # ==================================
        self.train_tag = train_tag
        self.validation_tag = validation_tag
        self.test_tag = test_tag
        self.img_name = img_name
        self.label_name = label_name
        self.torso_tag = torso_tag
        self.data = data
        self.display_train_step = 25
        # ==================================
        settings.validation_totalimg_patch = validation_samples
        self.gradients=gradients
        # ==================================
        self.learning_decay = .95
        self.learning_rate = 1E-5
        self.beta_rate = 0.05
        self.newdataset=newdataset

        self.img_padded_size = 519
        self.seg_size = 505
        self.min_range = min_range
        self.max_range = max_range

        self.label_patchs_size = 39  # 63
        self.patch_window = 53  # 77#89
        self.sample_no = sample_no
        self.batch_no = 6
        self.batch_no_validation = self.batch_no
        self.validation_samples = validation_samples
        self.display_step = 100
        self.display_validation_step = 1
        self.total_epochs = 10
        self.loss_instance = _loss_func()
        self.fold = fold

        if Server == 'DL':
            self.parent_path = '/srv/2-lkeb-17-dl01/syousefi/TestCode/EsophagusProject/sythesize_code/'
            self.data_path = '/srv/2-lkeb-17-dl01/syousefi/TestCode/EsophagusProject/Data-01/BrainWeb_permutation00_low/'

        else:
            self.parent_path = '/exports/lkeb-hpc/syousefi/Code/'

            self.data_path = '/exports/lkeb-hpc/syousefi/Synth_Data/BrainWeb_permutation2_low/'
        self.Logs = Logs

        self.no_sample_per_each_itr = no_sample_per_each_itr

        self.log_ext = log_tag
        self.LOGDIR = self.parent_path + self.Logs + self.log_ext + '/'
        self.chckpnt_dir = self.parent_path + self.Logs + self.log_ext + '/unet_checkpoints/'

        logger.set_log_file(self.parent_path + self.Logs + self.log_ext + '/log_file' + str(fold))
    def __init__(self, data, sample_no, validation_samples,
                 no_sample_per_each_itr, train_tag, validation_tag, test_tag,
                 img_name, label_name, torso_tag, log_tag, min_range,
                 max_range, Logs, fold, Server):
        '''
        This function is a constructor for this class
        :param data: which dataset should it use
        :param sample_no: number of samples which is used for the training process
        :param validation_samples: number of samples which is used for the validation process
        :param no_sample_per_each_itr: number of samples which is used for each epoch
        :param train_tag: tag for training images
        :param validation_tag: tag for validation images
        :param test_tag: tag for test images
        :param img_name: name of the images
        :param label_name: name of the labels
        :param torso_tag: name of masks if needed
        :param log_tag: tag for log dir
        :param min_range: min range of images
        :param max_range: max range of images
        :param Logs: log dir
        :param fold: fold no if cross validation is used
        :param Server: on which server it is running
        :param newdataset: if it is training on the new DB
        '''
        settings.init()

        # ==================================
        self.train_tag = train_tag
        self.validation_tag = validation_tag
        self.test_tag = test_tag
        self.img_name = img_name
        self.label_name = label_name
        self.torso_tag = torso_tag
        self.data = data
        self.display_train_step = 25
        # ==================================
        settings.validation_totalimg_patch = validation_samples
        self.gradients = gradients
        # ==================================
        self.learning_decay = .95
        self.learning_rate = 1E-4
        self.beta_rate = 0.05

        self.img_padded_size = 519
        self.seg_size = 505
        self.min_range = min_range
        self.max_range = max_range

        self.label_patchs_size = 39  # 63 #input size
        self.patch_window = 53  # 77#89 #output size
        self.sample_no = sample_no
        self.batch_no = 6
        self.batch_no_validation = self.batch_no
        self.validation_samples = validation_samples
        self.display_step = 100
        self.display_validation_step = 1
        self.total_epochs = 10
        self.loss_instance = _loss_func()
        self.fold = fold
        self.quantifications = quantifications()

        if Server == 'DL':
            self.parent_path = '/srv/2-lkeb-17-dl01/syousefi/TestCode/EsophagusProject/sythesize_code/'
            self.data_path = '/srv/2-lkeb-17-dl01/syousefi/TestCode/EsophagusProject/Data-01/BrainWeb_permutation00_low/'

        else:
            self.parent_path = '/exports/lkeb-hpc/syousefi/Code/'
            self.data_path = '/exports/lkeb-hpc/syousefi/Synth_Data/BrainWeb_permutation00_low/'
        self.Logs = Logs

        self.no_sample_per_each_itr = no_sample_per_each_itr

        self.log_ext = log_tag
        self.LOGDIR = self.parent_path + self.Logs + self.log_ext + '/'
        self.chckpnt_dir = self.parent_path + self.Logs + self.log_ext + '/unet_checkpoints/'

        logger.set_log_file(self.parent_path + self.Logs + self.log_ext +
                            '/log_file' + str(fold))