Пример #1
0
    def set_weights(self, weights, sess=None):
        """ Sets the weights of the layer.
    Arguments:
      weights: A list of Numpy arrays with shapes and types
          matching the output of layer.get_weights() or a list
          of private variables
      sess: tfe session"""

        weights_types = (np.ndarray, PondPrivateTensor, PondMaskedTensor)
        assert isinstance(weights[0], weights_types), type(weights[0])

        # Assign new keras weights to existing weights defined by
        # default when tfe layer was instantiated
        if not sess:
            sess = KE.get_session()

        if isinstance(weights[0], np.ndarray):
            for i, w in enumerate(self.weights):
                shape = w.shape.as_list()
                tfe_weights_pl = tfe.define_private_placeholder(shape)
                fd = tfe_weights_pl.feed(weights[i].reshape(shape))
                sess.run(tfe.assign(w, tfe_weights_pl), feed_dict=fd)
        elif isinstance(weights[0], PondPrivateTensor):
            for i, w in enumerate(self.weights):
                shape = w.shape.as_list()
                sess.run(tfe.assign(w, weights[i].reshape(shape)))
Пример #2
0
  def __init__(
      self,
      input_shape,
      output_shape,
      input_queue_capacity=1,
      input_queue_name="input",
      output_queue_capacity=1,
      output_queue_name="output",
  ):
    self.input_shape = input_shape
    self.output_shape = output_shape

    # input
    input_queue = tfe.queue.FIFOQueue(
        capacity=input_queue_capacity,
        shape=input_shape,
        shared_name=input_queue_name)
    self.input_placeholder = tfe.define_private_placeholder(shape=input_shape)
    self.input_op = input_queue.enqueue(self.input_placeholder)

    # output
    output_queue = tfe.queue.FIFOQueue(
        capacity=output_queue_capacity,
        shape=output_shape,
        shared_name=output_queue_name)
    output = output_queue.dequeue()
    self.output0 = output.share0
    self.output1 = output.share1

    # fixedpoint config
    self.modulus = output.backing_dtype.modulus
    self.bound = tfe.get_protocol().fixedpoint_config.bound_single_precision
    self.scaling_factor = tfe.get_protocol().fixedpoint_config.scaling_factor
Пример #3
0
    def __init__(
        self,
        input_shape=None,
        batch_size=None,
        dtype=None,
        input_tensor=None,
        sparse=False,
        name=None,
        **kwargs,
    ):
        if "batch_input_shape" in kwargs:
            batch_input_shape = kwargs.pop("batch_input_shape")
            if input_shape and batch_input_shape:
                raise ValueError(
                    "Only provide the input_shape OR "
                    "batch_input_shape argument to "
                    "InputLayer, not both at the same time."
                )
            batch_size = batch_input_shape[0]
            input_shape = batch_input_shape[1:]
        if kwargs:
            raise ValueError("Unrecognized keyword arguments:", kwargs.keys())

        if not name:
            prefix = "input"
            name = prefix + "_" + str(backend.get_uid(prefix))

        if batch_size is None:
            raise NotImplementedError()
        if input_tensor is not None:
            raise NotImplementedError()
        if dtype is not None:
            raise NotImplementedError()
        if sparse:
            raise NotImplementedError()

        super(InputLayer, self).__init__()
        self.built = True
        self.batch_size = batch_size

        if isinstance(input_shape, tensor_shape.TensorShape):
            input_shape = tuple(input_shape.as_list())
        elif isinstance(input_shape, int):
            input_shape = (input_shape,)

        if input_shape is not None:
            self._batch_input_shape = (batch_size,) + tuple(input_shape)
        else:
            raise ValueError("Input shape must be defined for the first layer.")

        # Create a graph placeholder to call the layer on.
        self.placeholder = tfe.define_private_placeholder(self._batch_input_shape)