コード例 #1
0
def decode_param_command(args, **kwargs):
    try:
        os.makedirs(args.outdir)
    except OSError:
        pass  # python2 does not support exists_ok arg

    # Load parameter
    logger.log(99, 'Loading parameters...')
    load_parameters(args.param)

    # Save Parameters
    params = get_parameters(grad_only=False)
    for key, variable in params.items():
        logger.log(99, key)
        file_path = args.outdir + os.sep + key.replace('/', '~') + '.txt'
        dir = os.path.dirname(file_path)
        try:
            os.makedirs(dir)
        except:
            pass  # python2 does not support exists_ok arg

        save_param_in_txt(variable.d, file_path)

    logger.log(99, 'Decode Parameter Completed.')
    return True
コード例 #2
0
def decode_param_command(args, **kwargs):
    os.makedirs(args.outdir, exist_ok=True)

    # Load parameter
    logger.log(99, 'Loading parameters...')
    load_parameters(args.param)

    # Save Parameters
    params = get_parameters(grad_only=False)
    for key, variable in params.items():
        logger.log(99, key)
        file_path = os.path.join(args.outdir,
                                 urllib.parse.quote(key, safe='/ ').replace('/', '~') + '.txt')
        os.makedirs(os.path.dirname(file_path), exist_ok=True)
        save_param_in_txt(variable.d, file_path)

    logger.log(99, 'Decode Parameter Completed.')
    return True
コード例 #3
0
def decode_param_command(args, **kwargs):
    if not os.path.exists(args.outdir):
        os.makedirs(args.outdir)

    # Load prameter
    logger.log(99, 'Loading parameters...')
    load_parameters(args.param)

    # Save Parameters
    params = get_parameters(grad_only=False)
    for key, variable in params.items():
        logger.log(99, key)
        file_path = args.outdir + os.sep + key.replace('/', '~') + '.txt'
        dir = os.path.dirname(file_path)
        if not os.path.exists(dir):
            os.makedirs(dir)
        save_param_in_txt(variable.d, file_path)

    logger.log(99, 'Decode Parameter Completed.')
コード例 #4
0
def create_proto(contents, include_params=False):
    proto = nnabla_pb2.NNablaProtoBuf()
    if 'global_config' in contents:
        proto.global_config.MergeFrom(
            _create_global_config(
                contents['global_config']['default_context']))
    if 'training_config' in contents:
        proto.training_config.MergeFrom(
            _create_training_config(
                contents['training_config']['max_epoch'],
                contents['training_config']['iter_per_epoch'],
                contents['training_config']['save_best']))
    networks = {}
    if 'networks' in contents:
        proto_nets = []
        for net in contents['networks']:
            networks[net['name']] = _create_network(net)
            proto_nets.append(networks[net['name']])
        proto.network.extend(proto_nets)
    datasets = {}
    if 'datasets' in contents:
        proto_datasets = []
        for d in contents['datasets']:
            if 'cache_dir' in d:
                cache_dir = d['cache_dir']
            else:
                cache_dir = None
            datasets[d['name']] = _create_dataset(d['name'], d['uri'],
                                                  cache_dir, d['variables'],
                                                  d['shuffle'],
                                                  d['batch_size'],
                                                  d['no_image_normalization'])
            proto_datasets.append(datasets[d['name']])
        proto.dataset.extend(proto_datasets)
    if 'optimizers' in contents:
        proto_optimizers = []
        for o in contents['optimizers']:
            proto_optimizers.append(
                _create_optimizer(o['name'], o['solver'],
                                  networks[o['network']],
                                  datasets[o['dataset']]))
        proto.optimizer.extend(proto_optimizers)
    if 'monitors' in contents:
        proto_monitors = []
        for m in contents['monitors']:
            proto_monitors.append(
                _create_monitor(m['name'], m['monitor'],
                                networks[m['network']],
                                datasets[m['dataset']]))
        proto.monitor.extend(proto_monitors)
    if 'executors' in contents:
        proto_executors = []
        for e in contents['executors']:
            proto_executors.append(
                _create_executor(e['name'], networks[e['network']], e['data'],
                                 e['output'], e.get('remp', {})))
        proto.executor.extend(proto_executors)

    if include_params is True:
        params = get_parameters(grad_only=False)
        for variable_name, variable in params.items():
            parameter = proto.parameter.add()
            parameter.variable_name = variable_name
            parameter.shape.dim.extend(variable.shape)
            parameter.data.extend(numpy.array(variable.d).flatten().tolist())
            parameter.need_grad = variable.need_grad

    return proto
コード例 #5
0
def _create_network(net):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    # List (dict: name -> Variable) of outputs.
    outputs = net['outputs']
    sink = _get_network_sink(outputs)

    # Create force name table: Variable -> name.
    names = {}
    names.update(net['names'])
    names.update(outputs)
    # Reverse dict: Variable --> Name
    names = {v: k for k, v in names.items()}

    # Create table: NdArray -> str
    # (Use Ndarray instead of Variable because parameter variable might be
    # unlinked)
    params = {v.data: k for k, v in get_parameters(grad_only=False).items()}

    # ----------------------------------------------------------------------
    # Parse graph to get variables and functions
    # ----------------------------------------------------------------------
    variables = OrderedDict()
    functions = OrderedDict()

    def collect_info(func):
        # Collect information.
        function_type = func.info.type_name
        if function_type == 'Sink':
            return
        function_name = _get_unique_function_name(function_type, functions)
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            base_name = '{}_Input'.format(function_name)
            vname = _get_variable_name_or_register(i, variables, names, params,
                                                   base_name)
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            base_name = '{}_Output'.format(function_name)
            vname = _get_variable_name_or_register(o, variables, names, params,
                                                   base_name)
            functions[function_name]['outputs'].append(vname)

    sink.visit(collect_info)

    # ----------------------------------------------------------------------
    # Convert variables and functions into proto
    # ----------------------------------------------------------------------
    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable.data in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            # TODO: The first dimension is always considered as batch size.
            # No problem?
            if len(shape) > 0:
                shape[0] = -1
        v.shape.dim.extend(shape)
        # ----------------------------------------------------------------------
        # Add info to variable
        # ----------------------------------------------------------------------
        # TODO: Only required for Parameter variables?
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        _create_function_nntxt(f, name, function)

    return n
コード例 #6
0
def _create_network(net, variable_batch_size):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    # List (dict: name -> Variable) of outputs.
    outputs = net['outputs']
    sink = _get_network_sink(outputs)

    # Create force name table: Variable -> name.
    names = {}
    names.update(net['names'])
    names.update(outputs)
    # Reverse dict: Variable --> Name
    names = {v: k for k, v in names.items()}

    # Create table: NdArray -> str
    # (Use Ndarray instead of Variable because parameter variable might be
    # unlinked)
    params = {v.data: k for k, v in get_parameters(grad_only=False).items()}

    # ----------------------------------------------------------------------
    # Parse graph to get variables and functions
    # ----------------------------------------------------------------------
    variables = OrderedDict()
    functions = OrderedDict()

    def collect_info(func):
        # Collect information.
        function_type = func.info.type_name
        if function_type == 'Sink':
            return
        function_name = _get_unique_function_name(function_type, functions)
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            base_name = '{}_Input'.format(function_name)
            vname = _get_variable_name_or_register(i, variables, names, params,
                                                   base_name)
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            base_name = '{}_Output'.format(function_name)
            vname = _get_variable_name_or_register(o, variables, names, params,
                                                   base_name)
            functions[function_name]['outputs'].append(vname)

    sink.visit(collect_info)

    expect_batch_size = None

    # ----------------------------------------------------------------------
    # Convert variables and functions into proto
    # ----------------------------------------------------------------------
    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable.data in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            if variable_batch_size:
                # TODO: Temporarily dim 0 of shape expects to be batch size.
                if len(shape) > 0:
                    b = shape[0]
                    if expect_batch_size is None:
                        expect_batch_size = b
                    if b != expect_batch_size:
                        raise ValueError(
                            'Variable "{}" has different batch size {} (expected {})'
                            .format(v.name, b, expect_batch_size))
                    shape[0] = -1

        v.shape.dim.extend(shape)
        # ----------------------------------------------------------------------
        # Add info to variable
        # ----------------------------------------------------------------------
        # TODO: Only required for Parameter variables?
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        if function['type'] == 'Reshape':

            shape = function['args']['shape']
            input_shape = variables[function['inputs'][0]].shape
            shape_infer_index = -1
            rest_size = 1
            for i, s in enumerate(shape):
                if s < 0:
                    if shape_infer_index >= 0:
                        raise ValueError(
                            'Rehaps: shape has multiple negative value.')
                    shape_infer_index = i
                else:
                    rest_size *= s
            if shape_infer_index >= 0:
                function['args']['shape'][shape_infer_index] = int(
                    numpy.prod(input_shape) / rest_size)

            if variable_batch_size:
                # TODO: Temporarily dim 0 of shape expects to be batch size.
                b = function['args']['shape'][0]
                if expect_batch_size < 0:
                    expect_batch_size = b
                if b != expect_batch_size:
                    raise ValueError(
                        'Variable "{}" has different batch size {} (expected {})'
                        .format(v.name, b, expect_batch_size))
                function['args']['shape'][0] = -1

        if function['type'] == 'Broadcast':

            shape = function['args']['shape']

            if variable_batch_size:
                # TODO: Temporarily dim 0 of shape expects to be batch size.
                b = function['args']['shape'][0]
                if expect_batch_size < 0:
                    expect_batch_size = b
                if b != expect_batch_size:
                    raise ValueError(
                        'Variable "{}" has different batch size {} (expected {})'
                        .format(v.name, b, expect_batch_size))
                function['args']['shape'][0] = -1

        _create_function_nntxt(f, name, function)

    return n
コード例 #7
0
ファイル: compare_with_cpu.py プロジェクト: zwsong/nnabla
def compare_with_cpu_command(args):
    configure_progress(os.path.join(args.outdir, 'progress.txt'))

    class TrainConfig:
        pass

    class OptConfig:
        pass

    class MonConfig:
        pass

    # Load config with current context
    files = []
    files.append(args.config)

    with nn.parameter_scope('current'):
        info = load.load(files)
        parameters = get_parameters(grad_only=False)

    config = TrainConfig()
    config.global_config = info.global_config
    config.training_config = info.training_config

    config.optimizers = OrderedDict()
    for name, opt in info.optimizers.items():
        o = OptConfig()
        o.optimizer = opt
        o.data_iterator = None
        config.optimizers[name] = o

    config.monitors = OrderedDict()
    for name, mon in info.monitors.items():
        m = MonConfig()
        m.monitor = mon
        m.data_iterator = None
        config.monitors[name] = m

    # Load config with cpu context
    files = []
    files.append(args.config2)

    with nn.parameter_scope('cpu'):
        info_cpu = load.load(files)
        cpu_parameters = get_parameters(grad_only=False)

    config_cpu = TrainConfig()
    config_cpu.global_config = info_cpu.global_config
    config_cpu.training_config = info_cpu.training_config

    config_cpu.optimizers = OrderedDict()
    for name, opt in info_cpu.optimizers.items():
        o = OptConfig()
        o.optimizer = opt
        o.data_iterator = None
        config_cpu.optimizers[name] = o

    config_cpu.monitors = OrderedDict()
    for name, mon in info_cpu.monitors.items():
        m = MonConfig()
        m.monitor = mon
        m.data_iterator = None
        config_cpu.monitors[name] = m

    result_array = [['1-Correl']]

    # Profile Optimizer
    with ExitStack() as stack:
        for name, o in config.optimizers.items():
            o.data_iterator = stack.enter_context(
                o.optimizer.data_iterator())
        for name, o in config_cpu.optimizers.items():
            o.data_iterator = stack.enter_context(
                o.optimizer.data_iterator())
        result_array = compare_optimizer(
            config, parameters, config_cpu, cpu_parameters, result_array)

    # Write profiling result
    import csv
    with open(args.outdir + os.sep + 'compare_with_cpu.csv', 'w') as f:
        writer = csv.writer(f, lineterminator='\n')
        writer.writerows(result_array)

    logger.log(99, 'Compare with CPU Completed.')
    progress(None)
コード例 #8
0
ファイル: save.py プロジェクト: zwsong/nnabla
def create_proto(contents, include_params=False):
    proto = nnabla_pb2.NNablaProtoBuf()
    if 'global_config' in contents:
        proto.global_config.MergeFrom(
            _create_global_config(contents['global_config']['default_context'])
        )
    if 'training_config' in contents:
        proto.training_config.MergeFrom(
            _create_training_config(contents['training_config']['max_epoch'],
                                    contents['training_config'][
                                        'iter_per_epoch'],
                                    contents['training_config']['save_best']))
    networks = {}
    if 'networks' in contents:
        proto_nets = []
        for net in contents['networks']:
            networks[net['name']] = _create_network(net)
            proto_nets.append(networks[net['name']])
        proto.network.extend(proto_nets)
    datasets = {}
    if 'datasets' in contents:
        proto_datasets = []
        for d in contents['datasets']:
            if 'cache_dir' in d:
                cache_dir = d['cache_dir']
            else:
                cache_dir = None
            datasets[d['name']] = _create_dataset(d['name'],
                                                  d['uri'],
                                                  cache_dir,
                                                  d['variables'],
                                                  d['shuffle'],
                                                  d['batch_size'],
                                                  d['no_image_normalization'])
            proto_datasets.append(datasets[d['name']])
        proto.dataset.extend(proto_datasets)
    if 'optimizers' in contents:
        proto_optimizers = []
        for o in contents['optimizers']:
            proto_optimizers.append(_create_optimizer(o['name'], o['solver'],
                                                      networks[o['network']],
                                                      datasets[o['dataset']]))
        proto.optimizer.extend(proto_optimizers)
    if 'monitors' in contents:
        proto_monitors = []
        for m in contents['monitors']:
            proto_monitors.append(_create_monitor(m['name'], m['monitor'],
                                                  networks[m['network']],
                                                  datasets[m['dataset']]))
        proto.monitor.extend(proto_monitors)
    if 'executors' in contents:
        proto_executors = []
        for e in contents['executors']:
            proto_executors.append(
                _create_executor(e['name'], networks[e['network']],
                                 e['data'], e['output'], e.get('remp', {})))
        proto.executor.extend(proto_executors)

    if include_params is True:
        params = get_parameters(grad_only=False)
        for variable_name, variable in params.items():
            parameter = proto.parameter.add()
            parameter.variable_name = variable_name
            parameter.shape.dim.extend(variable.shape)
            parameter.data.extend(numpy.array(variable.d).flatten().tolist())
            parameter.need_grad = variable.need_grad

    return proto
コード例 #9
0
ファイル: save.py プロジェクト: zwsong/nnabla
def _create_network(net):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    # List (dict: name -> Variable) of outputs.
    outputs = net['outputs']
    sink = _get_network_sink(outputs)

    # Create force name table: Variable -> name.
    names = {}
    names.update(net['names'])
    names.update(outputs)
    # Reverse dict: Variable --> Name
    names = {v: k for k, v in names.items()}

    # Create table: NdArray -> str
    # (Use Ndarray instead of Variable because parameter variable might be
    # unlinked)
    params = {v.data: k for k, v in get_parameters(grad_only=False).items()}

    # ----------------------------------------------------------------------
    # Parse graph to get variables and functions
    # ----------------------------------------------------------------------
    variables = OrderedDict()
    functions = OrderedDict()

    def collect_info(func):
        # Collect information.
        function_type = func.info.type_name
        if function_type == 'Sink':
            return
        function_name = _get_unique_function_name(function_type, functions)
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            base_name = '{}_Input'.format(function_name)
            vname = _get_variable_name_or_register(
                i, variables, names, params, base_name)
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            base_name = '{}_Output'.format(function_name)
            vname = _get_variable_name_or_register(
                o, variables, names, params, base_name)
            functions[function_name]['outputs'].append(vname)

    sink.visit(collect_info)

    # ----------------------------------------------------------------------
    # Convert variables and functions into proto
    # ----------------------------------------------------------------------
    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable.data in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            # TODO: The first dimension is always considered as batch size.
            # No problem?
            if len(shape) > 0:
                shape[0] = -1
        v.shape.dim.extend(shape)
        # ----------------------------------------------------------------------
        # Add info to variable
        # ----------------------------------------------------------------------
        # TODO: Only required for Parameter variables?
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        _create_function_nntxt(f, name, function)

    return n
コード例 #10
0
ファイル: compare_with_cpu.py プロジェクト: zge/nnabla
def compare_with_cpu_command(args):
    configure_progress(os.path.join(args.outdir, 'progress.txt'))

    class TrainConfig:
        pass

    class OptConfig:
        pass

    class MonConfig:
        pass

    # Load config with current context
    files = []
    files.append(args.config)

    with nn.parameter_scope('current'):
        info = load.load(files)
        parameters = get_parameters(grad_only=False)

    config = TrainConfig()
    config.global_config = info.global_config
    config.training_config = info.training_config

    config.optimizers = OrderedDict()
    for name, opt in info.optimizers.items():
        o = OptConfig()
        o.optimizer = opt
        o.data_iterator = None
        config.optimizers[name] = o

    config.monitors = OrderedDict()
    for name, mon in info.monitors.items():
        m = MonConfig()
        m.monitor = mon
        m.data_iterator = None
        config.monitors[name] = m

    # Load config with cpu context
    files = []
    files.append(args.config2)

    with nn.parameter_scope('cpu'):
        info_cpu = load.load(files)
        cpu_parameters = get_parameters(grad_only=False)

    config_cpu = TrainConfig()
    config_cpu.global_config = info_cpu.global_config
    config_cpu.training_config = info_cpu.training_config

    config_cpu.optimizers = OrderedDict()
    for name, opt in info_cpu.optimizers.items():
        o = OptConfig()
        o.optimizer = opt
        o.data_iterator = None
        config_cpu.optimizers[name] = o

    config_cpu.monitors = OrderedDict()
    for name, mon in info_cpu.monitors.items():
        m = MonConfig()
        m.monitor = mon
        m.data_iterator = None
        config_cpu.monitors[name] = m

    result_array = [['1-Correl']]

    # Profile Optimizer
    with ExitStack() as stack:
        for name, o in config.optimizers.items():
            o.data_iterator = stack.enter_context(o.optimizer.data_iterator())
        for name, o in config_cpu.optimizers.items():
            o.data_iterator = stack.enter_context(o.optimizer.data_iterator())
        result_array = compare_optimizer(config, parameters, config_cpu,
                                         cpu_parameters, result_array)

    # Write profiling result
    import csv
    with open(args.outdir + os.sep + 'compare_with_cpu.csv', 'w') as f:
        writer = csv.writer(f, lineterminator='\n')
        writer.writerows(result_array)

    logger.log(99, 'Compare with CPU Completed.')
    progress(None)
    return True
コード例 #11
0
def _create_network(net):
    n = nnabla_pb2.Network()
    n.name = net['name']
    n.batch_size = net['batch_size']

    params = {v: k for k, v in get_parameters(grad_only=False).items()}
    variables = OrderedDict()
    functions = OrderedDict()

    def _network_recursive(func, seen):
        if func is None:
            return
        seen.add(func)
        for i in func.inputs:
            if i.parent in seen:
                continue
            _network_recursive(i.parent, seen)

        # Collect information.
        function_type = func.info.type_name
        function_name = function_name_base = function_type
        count = 2
        while function_name in functions:
            function_name = '{}_{}'.format(function_name_base, count)
            count += 1
        functions[function_name] = {
            'type': function_type,
            'args': func.info.args,
            'inputs': [],
            'outputs': []
        }
        for i in func.inputs:
            vname = _add_variable(i, variables, params,
                                  '{}_Input'.format(function_name))
            functions[function_name]['inputs'].append(vname)
        for o in func.outputs:
            vname = _add_variable(o, variables, params,
                                  '{}_Output'.format(function_name))
            functions[function_name]['outputs'].append(vname)

    seen = set()
    _network_recursive(net['variable'].parent, seen)

    for name, variable in variables.items():
        v = n.variable.add()
        v.name = name
        shape = list(numpy.array(variable.d).shape)
        if variable in params:
            v.type = 'Parameter'
        else:
            v.type = 'Buffer'
            if len(shape) > 0:
                shape[0] = -1
        v.shape.dim.extend(shape)
        if variable.info:
            i = v.initializer
            i.type = variable.info.initializer.__class__.__name__.replace(
                'Initializer', '')
            i.multiplier = 0.0
            if i.type == 'Constant':
                i.multiplier = variable.info.initializer.value
            elif i.type == 'Uniform':
                i.multiplier = -variable.info.initializer.lim[0]
            elif i.type == 'Normal':
                i.multiplier = variable.info.initializer.sigma
            else:
                pass  # TODO Error

    for name, function in functions.items():
        f = n.function.add()
        _create_function_nntxt(f, name, function)

    return n
コード例 #12
0
def save(filename, contents, include_params=False):
    '''save

    Save network information into protocol buffer file.

    This function store information in 'contents' arg into filename.

    Filename

    If extension of the filename is '.nntxt' contents store with
    readable text format, or if extension of the filename is '.protobuf',
    contents store with binary-encoded text.

    Format of contents.

    Root of contents

    ================ ==================
    Key              Type
    ================ ==================
    global_config    dict
    training_config  dict
    networks         list of networks
    datasets         list of datasets
    optimizers       list of optimizes
    monitors         list of monitors
    executors        list of executors
    ================ ==================


    global_config

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    default_context  Context            Instance of nnabla.Context
    ================ ================== =================================

    training_config

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    max_epoch        int                Training limit.
    iter_per_epoch   int                Number of iteration in epoch.
    save_best        bool               Save parameter if result is best.
    ================ ================== =================================

    network

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    name             str                Name of the network
    batch_size       int                Batch size
    variable         Variable           Output instance of nnabla.variable.
    ================ ================== =================================

    dataset

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    name             str                Name of the dataset
    uri              str                Data location.
    cache_dir        str                Optional: Cache file location.
    variables        tuple of str       Variable names in this dataset.
    shuffle          bool               Is shuffled or not.
    batch_size       int                Batch size
    ================ ================== =================================

    optimizer

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    name             str                Name of the optimizer
    solver           Solver             Instance of nnabla.Solver
    network          str                Name of network to optimize.
    dataset          str                Name of dataset to use.
    ================ ================== =================================

    monitor

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    name             str                Name of the monitor.
    monitor          Monitor            Instance of nnabla.Monitor
    network          str                Name of network to monitor.
    dataset          str                Name of dataset to use.
    ================ ================== =================================

    executor

    ================ ================== =================================
    Key              Type               Description
    ================ ================== =================================
    name             str                Name of the executor.
    network          str                Name of network to execute.
    variables        tuple of str       Input variable names.
    ================ ================== =================================

    Args:
        filename (str): Filename to store infomation.
        contents (dict): Information to store.
        include_params (bool): Includes parameter into single file.
'''
    proto = nnabla_pb2.NNablaProtoBuf()
    if 'global_config' in contents:
        proto.global_config.MergeFrom(
            _create_global_config(
                contents['global_config']['default_context']))
    if 'training_config' in contents:
        proto.training_config.MergeFrom(
            _create_training_config(
                contents['training_config']['max_epoch'],
                contents['training_config']['iter_per_epoch'],
                contents['training_config']['save_best']))
    networks = {}
    if 'networks' in contents:
        proto_nets = []
        for net in contents['networks']:
            networks[net['name']] = _create_network(net)
            proto_nets.append(networks[net['name']])
        proto.network.extend(proto_nets)
    datasets = {}
    if 'datasets' in contents:
        proto_datasets = []
        for d in contents['datasets']:
            if 'cache_dir' in d:
                cache_dir = d['cache_dir']
            else:
                cache_dir = None
            datasets[d['name']] = _create_dataset(d['name'], d['uri'],
                                                  cache_dir, d['variables'],
                                                  d['shuffle'],
                                                  d['batch_size'])
            proto_datasets.append(datasets[d['name']])
        proto.dataset.extend(proto_datasets)
    if 'optimizers' in contents:
        proto_optimizers = []
        for o in contents['optimizers']:
            proto_optimizers.append(
                _create_optimizer(o['name'], o['solver'],
                                  networks[o['network']],
                                  datasets[o['dataset']]))
        proto.optimizer.extend(proto_optimizers)
    if 'monitors' in contents:
        proto_monitors = []
        for m in contents['monitors']:
            proto_monitors.append(
                _create_monitor(m['name'], m['monitor'],
                                networks[m['network']],
                                datasets[m['dataset']]))
        proto.monitor.extend(proto_monitors)
    if 'executors' in contents:
        proto_executors = []
        for e in contents['executors']:
            proto_executors.append(
                _create_executor(e['name'], networks[e['network']],
                                 e['variables']))
        proto.executor.extend(proto_executors)

    if include_params is True:
        params = get_parameters(grad_only=False)
        for variable_name, variable in params.items():
            parameter = proto.parameter.add()
            parameter.variable_name = variable_name
            parameter.shape.dim.extend(variable.shape)
            parameter.data.extend(numpy.array(variable.d).flatten().tolist())
            parameter.need_grad = variable.need_grad

    _, ext = os.path.splitext(filename)
    if ext == '.nntxt':
        with open(filename, 'w') as file:
            text_format.PrintMessage(proto, file)
    elif ext == '.protobuf':
        with open(filename, 'wb') as file:
            file.write(proto.SerializeToString())
コード例 #13
0
    def from_variable(self, leaf, output_name="output"):
        def parse_variable(v, var_num):
            def add_variable(v, v_idx):
                v_name = parameters.get(v.data, None)
                exist = False
                if not v_name:
                    v_name, exist = get_variable_name(v, v_idx)
                if not exist:
                    shape_proto = TensorShapeProto(
                        dim=[TensorShapeProto.Dim(size=d) for d in v.shape])

                    if v.parent is None:
                        inputs = []
                    else:
                        inputs = [get_func_name(v.parent)]
                    # print("Variable: {}:{}".format(v_name, inputs))
                    nodes.append(NodeDef(
                        name=v_name.encode(encoding='utf-8'),
                        op='Variable',
                        input=inputs,
                        attr={
                            'shape': AttrValue(shape=shape_proto),
                            'dtype': AttrValue(type=DT_FLOAT)
                        }
                    ))
                return v_name

            def get_unique_variable_name(v_name_base):
                v_num = 0
                v_name = v_name_base + str(v_num)
                while v_name in unique_var_names:
                    v_num += 1
                    v_name = v_name_base + str(v_num)
                unique_var_names.add(v_name)
                return v_name

            def get_variable_name(v, v_idx):
                v_name = variables.get(v, None)
                if v_name:
                    return v_name, True
                else:
                    if v.parent is None:
                        v_name_base = "Input"
                        v_name = get_unique_variable_name(v_name_base)
                    elif not nodes:
                        v_name = output_name
                    else:
                        f_name_sections = get_func_name(v.parent).split("/")
                        f_name = f_name_sections[-1]
                        f_scope = f_name_sections[:-1]
                        base_name = "variable<-{}".format(f_name)
                        v_name_base = "/".join(f_scope + [base_name])
                        v_name = get_unique_variable_name(v_name_base)

                    variables[v] = v_name
                    return v_name, False

            def get_func_name(func):
                func_name = func_names.get(func, None)
                if func_name:
                    return func_name
                name_scope = loc_var['name_scope']
                for v in func.inputs:
                    v_name = self.parameters.get(v.data, None)
                    if v_name:
                        name_scope = '/'.join(v_name.split('/')[:-1])
                        break
                if name_scope:
                    func_name_base = '/'.join([name_scope, func.name])
                else:
                    func_name_base = func.name
                func_num = 0
                func_name = func_name_base + str(func_num)
                while func_name in unique_func_names:
                    func_num += 1
                    func_name = func_name_base + str(func_num)
                unique_func_names.add(func_name)
                func_names[func] = func_name
                return func_name

            def add_func(v):
                input_names = []
                for index, v_input in enumerate(v.parent.inputs):
                    v_name = add_variable(v_input, index)
                    input_names.append(v_name)
                # print("Function: {}:{}".format(get_func_name(v.parent), input_names))
                f_name = get_func_name(v.parent)
                if f_name in func_set:
                    return False
                attrs = []
                for k, a in v.parent.info.args.items():
                    attr = "{}={}".format(k, a)
                    attrs.append(attr)
                attr_str = ','.join(attrs).encode(encoding='utf-8')
                nodes.append(NodeDef(
                    name=f_name,
                    op=v.parent.info.type_name,
                    input=input_names,
                    attr={"parameters": AttrValue(s=attr_str)}
                ))
                func_set.add(f_name)
                return True

            name_scope = loc_var['name_scope']
            if not nodes:
                add_variable(v, var_num)
            if v.parent is None:
                add_variable(v, var_num)
            else:
                if not add_func(v):
                    return
                for idx, in_var in enumerate(v.parent.inputs):
                    name_scope_stack.append(name_scope)
                    parse_variable(in_var, idx)
                    name_scope = name_scope_stack.pop()

        nodes = []
        variables = {}
        loc_var = {}
        loc_var['name_scope'] = ''
        name_scope_stack = []
        func_names = {}
        func_set = set()
        unique_func_names = set()
        unique_var_names = set()
        parameters = {v.data: k for k,
                      v in get_parameters(grad_only=False).items()}
        parse_variable(leaf, 0)
        nodes = nodes[::-1]

        current_graph = GraphDef(node=nodes, versions=VersionDef(producer=22))
        event = event_pb2.Event(
            graph_def=current_graph.SerializeToString())
        self.file_writer.add_event(event)