コード例 #1
0
ファイル: nnp_graph.py プロジェクト: ehasumi/nnabla
    def get_network(self, name, batch_size=None, callback=None):
        '''Create a variable graph given  network by name

        Returns: NnpNetwork

        '''
        network_proto = nnabla_pb2.Network()
        network_proto.CopyFrom(self.network_dict[name])
        return NnpNetwork(network_proto, self._params, batch_size, callback=callback)
コード例 #2
0
def _create_network(net):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    # List (dict: name -> Variable) of outputs.
    outputs = net['outputs']
    sink = _get_network_sink(outputs)

    # Create force name table: Variable -> name.
    names = {}
    names.update(net['names'])
    names.update(outputs)
    # Reverse dict: Variable --> Name
    names = {v: k for k, v in names.items()}

    # Create table: NdArray -> str
    # (Use Ndarray instead of Variable because parameter variable might be
    # unlinked)
    params = {v.data: k for k, v in get_parameters(grad_only=False).items()}

    # ----------------------------------------------------------------------
    # Parse graph to get variables and functions
    # ----------------------------------------------------------------------
    variables = OrderedDict()
    functions = OrderedDict()

    def collect_info(func):
        # Collect information.
        function_type = func.info.type_name
        if function_type == 'Sink':
            return
        function_name = _get_unique_function_name(function_type, functions)
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            base_name = '{}_Input'.format(function_name)
            vname = _get_variable_name_or_register(i, variables, names, params,
                                                   base_name)
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            base_name = '{}_Output'.format(function_name)
            vname = _get_variable_name_or_register(o, variables, names, params,
                                                   base_name)
            functions[function_name]['outputs'].append(vname)

    sink.visit(collect_info)

    # ----------------------------------------------------------------------
    # Convert variables and functions into proto
    # ----------------------------------------------------------------------
    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable.data in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            # TODO: The first dimension is always considered as batch size.
            # No problem?
            if len(shape) > 0:
                shape[0] = -1
        v.shape.dim.extend(shape)
        # ----------------------------------------------------------------------
        # Add info to variable
        # ----------------------------------------------------------------------
        # TODO: Only required for Parameter variables?
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        _create_function_nntxt(f, name, function)

    return n
コード例 #3
0
def _create_network(net, variable_batch_size):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    # List (dict: name -> Variable) of outputs.
    outputs = net['outputs']
    sink = _get_network_sink(outputs)

    # Create force name table: Variable -> name.
    names = {}
    names.update(net['names'])
    names.update(outputs)
    # Reverse dict: Variable --> Name
    names = {v: k for k, v in names.items()}

    # Create table: NdArray -> str
    # (Use Ndarray instead of Variable because parameter variable might be
    # unlinked)
    params = {v.data: k for k, v in get_parameters(grad_only=False).items()}

    # ----------------------------------------------------------------------
    # Parse graph to get variables and functions
    # ----------------------------------------------------------------------
    variables = OrderedDict()
    functions = OrderedDict()

    def collect_info(func):
        # Collect information.
        function_type = func.info.type_name
        if function_type == 'Sink':
            return
        function_name = _get_unique_function_name(function_type, functions)
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            base_name = '{}_Input'.format(function_name)
            vname = _get_variable_name_or_register(i, variables, names, params,
                                                   base_name)
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            base_name = '{}_Output'.format(function_name)
            vname = _get_variable_name_or_register(o, variables, names, params,
                                                   base_name)
            functions[function_name]['outputs'].append(vname)

    sink.visit(collect_info)

    expect_batch_size = None

    # ----------------------------------------------------------------------
    # Convert variables and functions into proto
    # ----------------------------------------------------------------------
    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable.data in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            if variable_batch_size:
                # TODO: Temporarily dim 0 of shape expects to be batch size.
                if len(shape) > 0:
                    b = shape[0]
                    if expect_batch_size is None:
                        expect_batch_size = b
                    if b != expect_batch_size:
                        raise ValueError(
                            'Variable "{}" has different batch size {} (expected {})'
                            .format(v.name, b, expect_batch_size))
                    shape[0] = -1

        v.shape.dim.extend(shape)
        # ----------------------------------------------------------------------
        # Add info to variable
        # ----------------------------------------------------------------------
        # TODO: Only required for Parameter variables?
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        if function['type'] == 'Reshape':

            shape = function['args']['shape']
            input_shape = variables[function['inputs'][0]].shape
            shape_infer_index = -1
            rest_size = 1
            for i, s in enumerate(shape):
                if s < 0:
                    if shape_infer_index >= 0:
                        raise ValueError(
                            'Rehaps: shape has multiple negative value.')
                    shape_infer_index = i
                else:
                    rest_size *= s
            if shape_infer_index >= 0:
                function['args']['shape'][shape_infer_index] = int(
                    numpy.prod(input_shape) / rest_size)

            if variable_batch_size:
                # TODO: Temporarily dim 0 of shape expects to be batch size.
                b = function['args']['shape'][0]
                if expect_batch_size < 0:
                    expect_batch_size = b
                if b != expect_batch_size:
                    raise ValueError(
                        'Variable "{}" has different batch size {} (expected {})'
                        .format(v.name, b, expect_batch_size))
                function['args']['shape'][0] = -1

        if function['type'] == 'Broadcast':

            shape = function['args']['shape']

            if variable_batch_size:
                # TODO: Temporarily dim 0 of shape expects to be batch size.
                b = function['args']['shape'][0]
                if expect_batch_size < 0:
                    expect_batch_size = b
                if b != expect_batch_size:
                    raise ValueError(
                        'Variable "{}" has different batch size {} (expected {})'
                        .format(v.name, b, expect_batch_size))
                function['args']['shape'][0] = -1

        _create_function_nntxt(f, name, function)

    return n
コード例 #4
0
def _create_network(net):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    params = {v: k for k, v in get_parameters(grad_only=False).items()}
    variables = OrderedDict()
    functions = OrderedDict()

    def _network_recursive(func, seen):
        if func is None:
            return
        seen.add(func)
        for i in func.inputs:
            if i.parent in seen:
                continue
            _network_recursive(i.parent, seen)

        # Collect information.
        function_type = func.info.type_name
        function_name = function_name_base = function_type
        count = 2
        while function_name in functions:
            function_name = '{}_{}'.format(function_name_base, count)
            count += 1
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            vname = _add_variable(i, variables, params,
                                  '{}_Input'.format(function_name))
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            vname = _add_variable(o, variables, params,
                                  '{}_Output'.format(function_name))
            functions[function_name]['outputs'].append(vname)

    seen = set()
    _network_recursive(net['variable'].parent, seen)

    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            if len(shape) > 0:
                shape[0] = -1
        v.shape.dim.extend(shape)
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        _create_function_nntxt(f, name, function)

    return n