コード例 #1
0
 def build(self, input_shape):
     if len(input_shape) > self.rank + 2:
         raise ValueError("Input to {} should has {:d} rank,"
                          "but received input shape {}".format(
                              self.name, self.rank, str(input_shape)))
     spatial = to_list(input_shape[1:-1]) \
         if self.data_format[-1] == 'C' else to_list(input_shape[2:])
     self.kernel_size = tuple(spatial[i] -
                              (self.out_size[i] - 1) * self.strides[0]
                              for i in range(self.rank))
コード例 #2
0
ファイル: layer_hook.py プロジェクト: Leg-end/tensorchainer
 def __init__(self, end_names, prefix=''):
     if not end_names:
         raise ValueError("End points' names must be provided")
     if prefix[-1] != '/':
         prefix += '/'
     self.end_names = [prefix + name for name in to_list(end_names)]
     self.endpoints = OrderedDict()
コード例 #3
0
ファイル: network.py プロジェクト: Leg-end/tensorchainer
def graph_scope(name, default_name=None, values=None):
    from tensorlib.engine import Input
    if values is None:
        raise ValueError("Argument `values` can not be None.")
    values = to_list(values)
    [F.assert_tensor_traceable(x) for x in values]
    with ops.name_scope(name=name, default_name=default_name,
                        values=values) as scope:
        inputs = unpack_singleton([
            Input(batch_input_shape=F.int_shape(x), dtype=x.dtype)
            for x in values
        ])
        handler = GraphScope(scope=scope, inputs=inputs)
        yield handler
    net = Network(inputs=inputs, outputs=handler.outputs, name=scope)
    graph_ops.build_node(net, values, to_list(handler.outputs))
    # print(getattr(handler.outputs, '_anchor')[0])
    del handler
コード例 #4
0
ファイル: layer_hook.py プロジェクト: Leg-end/tensorchainer
 def before_forward(self, layer, inputs, **kwarg):
     states = inputs[1]
     state_size = to_list(layer.state_size)
     if len(states) != state_size:
         raise ValueError("The states passed to RNN cell %s"
                          " have %d states, but RNN cell %s"
                          " only has %d state_size, the RNN"
                          " cell %s 's state_size should be"
                          " one integer for each RNN state" %
                          (layer.name, len(states), layer.name,
                           len(state_size), layer.name))
コード例 #5
0
ファイル: base_layer.py プロジェクト: Leg-end/tensorchainer
 def __call__(self, *inputs, **kwargs):
     inputs = to_list(inputs)
     # self.local_hooks execute at last term
     hooks = OrderedDict(tensorlib._get_hooks(), **self.local_hooks)
     hooks = hooks.values()
     for hook in hooks:
         hook.before_forward(self, inputs, **kwargs)
     with self._name_scope():
         if not self.built:
             self.build(
                 unpack_singleton(nest.map_structure(F.int_shape, inputs)))
             self.built = True
         outputs = self.forward(*inputs, **kwargs)
         # outputs return from __call__ should have same format
         # as from forward, e.g. outputs from forward is [?]
         unpack = not isinstance(outputs, (list, tuple))
         outputs = to_list(outputs)
     for hook in hooks:
         hook.after_forward(self, outputs, inputs, **kwargs)
     if unpack:
         outputs = outputs[0]
     return outputs
コード例 #6
0
ファイル: network.py プロジェクト: Leg-end/tensorchainer
 def __init__(self, inputs=None, outputs=None, **kwargs):
     self._children = set()
     self._graph = None
     self._output_tensor_cache = {}
     self._nested_inputs = None
     self._nested_outputs = None
     self.inputs = None
     self.outputs = None
     if inputs is not None and outputs is not None:
         super(Network, self).__init__(**kwargs)
         self.inputs = to_list(nest.flatten(inputs))
         self.outputs = to_list(nest.flatten(outputs))
         [F.assert_tensor_traceable(x) for x in self.inputs + self.outputs]
         self._nested_inputs = inputs
         self._nested_outputs = outputs
         self._graph = graph_ops.build_graph_network(self, inputs, outputs)
         self.built = True
     else:
         keys = list(sorted(kwargs.keys()))
         for key in keys:
             if isinstance(kwargs[key], Layer):
                 self.add_layer(key, kwargs.pop(key))
         super(Network, self).__init__(**kwargs)
コード例 #7
0
ファイル: sequential.py プロジェクト: Leg-end/tensorchainer
 def forward(self, *inputs):
     if len(self) == 0:
         raise RuntimeError("Can not run on empty layer")
     for layer in self:
         inputs = to_list(layer(*inputs))
     return unpack_singleton(inputs)