def build(self, input_shape=[None, 784], custom_variables=None, scope="optimizee_vars"): """Builds the computational graph for the optimziee :param input_shape: an array with the dimensions of the input :param custom_variables: an array with variables in the same order as the networks' variables - this array is used to build the network with these variables and disconnect the gradients in the graph. :returns: cleverhans model for this class """ template = MODEL_CONSTANTS.get_fc_template() mlp_args = { 'layers': template, 'input_shape': input_shape, 'scope': scope } if custom_variables is None: self.model = CustomModel(**mlp_args) else: self.model = util.make_with_custom_variables( CustomModel, mlp_args, custom_variables) return self.model
def inference_custom(): x = tf.placeholder(dtype=tf.float32, shape=util.get_input_shape_none()) variables = [tf.Variable(initial_value=0)] inf_op_custom = util.make_with_custom_variables(inference, [x], variables) return inf_op_custom
def clever_custom_vars(): op = MLP kwargs = {'layers': layers, 'input_shape': input_shape} variables, _ = util.get_variables(op, kwargs) custom_model = util.make_with_custom_variables(op, kwargs, variables) return custom_model
def get_model_loss(x, y, custom_variables=None): if custom_variables is None: model = op(**opkwargs) logits = model.get_logits(x) loss = tf.nn.softmax_cross_entropy_with_logits_v2(labels=y, logits=logits) return loss model = util.make_with_custom_variables(op, opkwargs, custom_variables) logits = model.get_logits(x) loss = tf.nn.softmax_cross_entropy_with_logits_v2(labels=y, logits=logits) return loss
def loop_body(t, variables, x, y, fx_array): with tf.name_scope("loop_function"): model = util.make_with_custom_variables(op, opkwargs, variables) logits = model.get_logits(x) loss = tf.nn.softmax_cross_entropy_with_logits_v2( labels=y, logits=logits) fx_array = fx_array.write(t, loss) with tf.name_scope("time_step"): t_next = t+1 next_variables = [x+0.01 for x in variables] return t_next, next_variables, x, y, fx_array