Exemple #1
0
def CreateGraph(graph_def):
    """Create the graph in current workspace.

    Parameters
    ----------
    graph_def : GraphDef
        The definition of meta graph.

    Returns
    -------
    str
        The graph name to run.

    """
    options = _cfg.GetGlobalOptions()
    if options['log_meta_graph']: print(graph_def)
    if options['export_meta_graph']:
        if not os.path.exists(options['export_meta_graph']):
            try:
                os.makedirs(options['export_meta_graph'])
            except Exception:
                raise ValueError('The given prefix is invalid.')
        path = os.path.join(options['export_meta_graph'],
                            graph_def.name + '.metatxt')
        with open(path, 'w') as f:
            f.write(str(graph_def))
        _logging.info('Export meta graph to: {}'.format(path))
    return get_default_workspace().CreateGraph(_stringify_proto(graph_def),
                                               options['log_optimized_graph'])
Exemple #2
0
def _inject_device(graph_def):
    """Inject the device info into GraphDef.

    Parameters
    ----------
    graph_def : GraphDef
        The definition of graph.

    Returns
    -------
    None

    References
    ----------
    `config.EnableCPU()`_ - How to use CPU device.

    `config.EnableCUDA(*args, **kwargs)`_ - How to use CUDA device.

    `config.SetRandomSeed(*args, **kwargs)`_ - How to set random seed.

    """
    options = _cfg.GetGlobalOptions()
    if options['device'] is not 'none':
        supports = {'cpu': 0, 'cuda': 1, 'cnml': 2}
        device_option = _proto_def.DeviceOption()
        device_option.device_type = supports[options['device']]
        device_option.device_id = options['device_id']
        device_option.random_seed = options['random_seed']
        graph_def.device_option.CopyFrom(device_option)
Exemple #3
0
def _inject_optimization(graph_def, opt_level=None):
    """Inject the optimization info into GraphDef.

    Parameters
    ----------
    graph_def : GraphDef
        The definition of graph.
    opt_level : int, optional
        The optimization level.

    Returns
    -------
    None

    References
    ----------
    `config.SetDebugMode(*args, **kwargs)`_ - How the enable debug mode.

    `memonger.share_grads(*args, **kwargs)`_ - How the enable gradients sharing.

    """
    options = _cfg.GetGlobalOptions()
    if opt_level is None:
        opt_level = options['graph_optimization_level']
        if not options['share_grads'] and \
            opt_level >= 3:
            opt_level = 2
    graph_def.arg.add().CopyFrom(
        _proto_utils.MakeArgument('optimization_level', opt_level))
    graph_def.graph_type = options['graph_type']
Exemple #4
0
def IsGradsShared():
    """Is grads are shared?

    Returns
    -------
    boolean
        ``True`` if sharing grads else ``False``.

    """
    options = _cfg.GetGlobalOptions()
    return options['share_grads']
Exemple #5
0
 def _gen_module_def(self):
     rng_seed = _cfg.GetGlobalOptions()['random_seed']
     self._module_def = \
         _proto_utils.MakeCXXOperatorDef(
             name='runtime',
             uid=self.module_key,
             op_type=self.op_meta['op_type'],
             device_option=_proto_utils.
                 GetDeviceOption(
                     self._device.type,
                     self._device.index,
                     rng_seed=rng_seed,
             ),
             **self.op_meta['arguments']
         )
Exemple #6
0
def ShareGrads(enabled=True):
    """Enable gradients sharing globally.

    Parameters
    ----------
    enabled : boolean
        Whether to share grads.

    Returns
    -------
    None

    Examples
    --------
    >>> import dragon.memonger as opt
    >>> opt.ShareGrads()

    """
    options = _cfg.GetGlobalOptions()
    options['share_grads'] = enabled
Exemple #7
0
def Backward(
    forward_ops,
    targets,
    input_grads=None,
    ignored_grads=None,
):
    """Compute the gradients of given input operators.

    Parameters
    ----------
    forward_ops : sequence of OperatorDef
        The referring ops to generate gradients.
    targets : sequence or str
        The solving targets.
    input_grads : sequence of str, optional
        The external input grads.
    ignored_grads : sequence of str, optional
        The grads that are explicitly ignored.

    Returns
    -------
    None

    """
    options = _cfg.GetGlobalOptions()

    required_logging = True \
        if (options['log_optimized_graph'] or
            options['log_meta_graph']) else False

    get_default_workspace().Backward(
        forward_ops,
        targets,
        input_grads if input_grads else [],
        ignored_grads if ignored_grads else [],
        options['share_grads'],
        required_logging,
    )
Exemple #8
0
def GetGlobalDeviceOption():
    options = _cfg.GetGlobalOptions()
    return GetDeviceOption(options['device'], options['device_id'])