def __init__(self,
                 hyper_params=None,
                 reuse=False,
                 is_saving=True,
                 init_graph=True,
                 mode='train',
                 name='JOINT_DEFENSE'):
        self.is_saving = is_saving
        self.init_graph = init_graph
        self.mode = mode

        if hyper_params is None:
            hyper_params = ADV_TRAIN_HP
        self.hp_params = utils.ParamWrapper(hyper_params)
        self.threshold = None  # get_median()

        # attack initilization
        if not (os.path.exists(config.get('dataset', 'dataX'))
                and os.path.exists(config.get('dataset', 'datay'))
                and os.path.exists(config.get('dataset', 'normalizer'))):
            dataX, datay = self.data_preprocess()
            utils.dump_joblib(dataX, config.get('dataset', 'dataX'))
            utils.dump_joblib(datay, config.get('dataset', 'datay'))

        self.normalizer = utils.read_joblib(config.get('dataset',
                                                       'normalizer'))
        input_dim = len(self.normalizer.data_min_)
        self.inner_maximizer = PGDAdam(self,
                                       input_dim,
                                       self.normalizer,
                                       verbose=False,
                                       **AUG_PARAM)
        super(JointDefense, self).__init__(hyper_params, reuse, self.is_saving,
                                           self.init_graph, self.mode, name)
Exemplo n.º 2
0
    def __init__(self,
                 hyper_params = None,
                 reuse = False,
                 is_saving = True,
                 init_graph = True,
                 mode = 'train',
                 name = 'DAE_RPST_LEARN_DNN'):
        self.is_saving = is_saving
        self.init_graph = init_graph
        self.mode = mode

        if hyper_params is None:
            hyper_params = DAE_TRAIN_HP

        # initilization
        if not (os.path.exists(config.get('dataset', 'dataX')) and
                os.path.exists(config.get('dataset', 'datay')) and
                os.path.exists(config.get('dataset', 'normalizer'))):
            dataX, datay = self.data_preprocess()
            utils.dump_joblib(dataX, config.get('dataset', 'dataX'))
            utils.dump_joblib(datay, config.get('dataset', 'datay'))

        self.normalizer = utils.read_joblib(config.get('dataset', 'normalizer'))
        input_dim = len(self.normalizer.data_min_)
        self.inner_maximizer = PGDAdam(self, input_dim, self.normalizer, verbose=False, **AUG_PARAM)

        super(DAE_RPST_DNN, self).__init__(hyper_params, reuse,
                                           self.is_saving, self.init_graph, self.mode, name)
def normalize_data(X, is_fitting=False):
    """Normalize data using minmaxscalar"""
    if not os.path.exists(config.get('dataset', 'normalizer')) and is_fitting:
        minmax_norm = MinMaxScaler()
        normalizer = minmax_norm.fit(X)
        utils.dump_joblib(
            normalizer,
            config.get('dataset', 'normalizer'),
        )
    normalizer = utils.read_joblib(config.get('dataset', 'normalizer'))
    x_clipped = np.clip(X,
                        a_min=normalizer.data_min_,
                        a_max=normalizer.data_max_)
    X_normlized = normalizer.transform(x_clipped)
    return X_normlized
    def __init__(self,
                 hyper_params=None,
                 reuse=False,
                 is_saving=True,
                 init_graph=True,
                 mode='train',
                 name='BASIC_DNN'):
        super(BasicDNN, self).__init__()
        self.is_saving = is_saving
        self.init_graph = init_graph
        self.reuse = reuse
        self.model_name = name

        try:
            assert mode == 'train' or mode == 'test'
        except:
            raise AssertionError("Two modes: 'train' or 'test', not both.")
        self.mode = mode
        if hyper_params is not None:
            self.hp_params_dict = hyper_params
            self.hp_params = utils.ParamWrapper(hyper_params)
        else:
            self.hp_params_dict = DNN_HP
            self.hp_params = utils.ParamWrapper(DNN_HP)

        if self.is_saving:
            self.save_dir = config.get("experiments", self.model_name.lower())

        if not (os.path.exists(config.get('dataset', 'dataX'))
                and os.path.exists(config.get('dataset', 'datay'))
                and os.path.exists(config.get('dataset', 'normalizer'))):
            dataX, datay = self.data_preprocess()
            utils.dump_joblib(dataX, config.get('dataset', 'dataX'))
            utils.dump_joblib(datay, config.get('dataset', 'datay'))
        self.normalizer = utils.read_joblib(config.get('dataset',
                                                       'normalizer'))

        # DNN based model
        self.input_dim = len(self.normalizer.data_min_)

        self.hidden_layers = self.hp_params.hidden_units
        self.output_dim = self.hp_params.output_dim
        tf.set_random_seed(self.hp_params.random_seed)
        if self.init_graph:
            self.model_graph(reuse=reuse)