Пример #1
0
    def test_scheduler(self):
        for scheduler in ('SIMPLE', 'FUSION', 'KNOWN', 'SIMPLE'):
            try:
                dragon.autograph.set_scheduler(scheduler)
                if scheduler == 'FUSION':
                    self.assertEqual(config.config().graph_type, 'FusionGraph')
                else:
                    self.assertEqual(config.config().graph_type, '')
            except ValueError:

                pass
Пример #2
0
 def test_device(self):
     major, minor = dragon.cuda.get_device_capability(0)
     self.assertGreaterEqual(major, 1 if TEST_CUDA else 0)
     self.assertGreaterEqual(minor, 0)
     dragon.cuda.set_device(0)
     self.assertEqual(dragon.cuda.current_device(), 0)
     dragon.cuda.set_default_device(1)
     self.assertEqual(config.config().device_type, 'cuda')
     self.assertEqual(config.config().device_index, 1)
     dragon.cuda.set_default_device(-1)
     self.assertEqual(config.config().device_type, 'cpu')
     self.assertEqual(config.config().device_index, 0)
Пример #3
0
def set_scheduler(scheduler='SIMPLE'):
    """Set the scheduler for symbolic graph.

    Parameters
    ----------
    scheduler : {'SIMPLE', 'FUSION'}, optional
        The scheduler type.

    """
    if scheduler not in ('SIMPLE', 'FUSION'):
        raise ValueError('Unsupported scheduler: ' + scheduler)
    if scheduler == 'SIMPLE':
        config.config().graph_type = ''
    elif scheduler == 'FUSION':
        config.config().graph_type = 'FusionGraph'
Пример #4
0
 def test_execution(self):
     for mode in ('EAGER_MODE', 'GRAPH_MODE', 'UNKNOWN'):
         try:
             dragon.autograph.set_execution(mode)
             self.assertEqual(config.config().graph_execution, mode)
         except ValueError:
             pass
Пример #5
0
 def _add_device(graph_def):
     """Add device."""
     cfg = config.config()
     spec = context.get_device()
     graph_def.device_option.CopyFrom(
         proto_util.get_device_option(
             spec.type, spec.index, cfg.random_seed))
Пример #6
0
def get_global_device_option():
    """Return the global device option."""
    cfg = config.config()
    return get_device_option(
        cfg.device_type,
        cfg.device_index,
    )
Пример #7
0
 def __init__(self, cache_key, device, **kwargs):
     self._def = None
     self._cache_key = cache_key
     self._device = device
     self._arg_device = proto_util.get_device_option('cpu')
     self._arg_device = self._arg_device.SerializeToString()
     self._seed = kwargs.get('seed', config.config().random_seed)
Пример #8
0
    def run_backward(
        self,
        op_defs,
        targets,
        sources=None,
        input_grads=None,
        empty_grads=None,
    ):
        """Compute the gradients of input operators.

        Parameters
        ----------
        op_defs : Sequence[OperatorDef]
            The executed op defs.
        targets : Sequence[str]
            The derivative targets.
        sources : Sequence[str], optional
            The differentiated inputs.
        input_grads : Sequence[str], optional
            The input grad for targets.
        empty_grads : Sequence[str], optional
            The grads to set to empty.

        """
        cfg = config.config()
        self.RunBackward(
            op_defs,
            targets,
            sources if sources else [],
            input_grads if input_grads else [],
            empty_grads if empty_grads else [],
            cfg.graph_optimization <= 2,
            cfg.graph_verbosity > 0,
        )
Пример #9
0
def get_device(use_default=True):
    """Return the nesting or default device."""
    spec = _GLOBAL_DEVICE_STACK.get_default()
    if spec is None:
        cfg = config.config()
        spec = device_spec.DeviceSpec(cfg.device_type, cfg.device_index)
    return spec
Пример #10
0
def add_optimization(graph_def, level=None):
    """Add the optimization argument."""
    cfg = config.config()
    if level is None:
        level = cfg.graph_optimization
    graph_def.arg.add().CopyFrom(
        proto_util.make_argument('optimization', level))
    graph_def.graph_type = cfg.graph_type
Пример #11
0
 def create_graph(self, graph_def):
     """Create a graph."""
     cfg = config.config()
     if cfg.graph_verbosity == 2:
         msg = '\n' + str(graph_def)[:-1]
         logging.info('\ngraph {' + msg.replace('\n', '\n  ') + '\n}\n')
     return self._impl.CreateGraph(serialization.serialize_proto(graph_def),
                                   cfg.graph_verbosity == 1)
Пример #12
0
 def reset_parameters(self):
     """Reset the parameters."""
     numpy.random.seed(config.config().random_seed)
     for li, di, pi in itertools.product(
             range(self.num_layers),
             range(self.num_directions),
             range(self.num_gates * 2),
     ):
         self.reset_parameter(li, di, pi, 'matrix', 'orthogonal')
         self.reset_parameter(li, di, pi, 'bias', 'zero')
Пример #13
0
def set_directory(path):
    """Set the directory for logging files.

    Parameters
    ----------
    path : str, optional
        The path of the directory.

    """
    config.config().log_dir = path
Пример #14
0
 def run_backward(self, op_defs, targets, grad_targets=None, sources=None):
     """Compute the gradients of operators."""
     cfg = config.config()
     self._impl.RunBackward(
         op_defs,
         targets,
         grad_targets if grad_targets else [],
         sources if sources else [],
         cfg.graph_optimization > 2,
         cfg.graph_verbosity > 0,
     )
Пример #15
0
def add_device_option(graph_def):
    """Add the device option."""
    cfg = config.config()
    spec = context.get_device_spec()
    graph_def.device_option.CopyFrom(
        dragon_pb2.DeviceOption(device_type={
            'cpu': 0,
            'cuda': 1,
            'cnml': 2
        }[spec.type],
                                device_id=spec.index,
                                random_seed=cfg.random_seed))
Пример #16
0
    def run_operator(self, op_def):
        """Run the operator.

        Parameters
        ----------
        op_def : Union[OperatorDef, Sequence[OperatorDef]]
            The ``OperatorDef`` protocol buffer.

        """
        cfg = config.config()
        if isinstance(op_def, dragon_pb2.OperatorDef):
            op_def = op_def.SerializePartialToString()
        self.RunOperator(op_def, cfg.graph_verbosity > 0)
Пример #17
0
def set_default_device(device_index=0):
    """Set the default device.

    A valid device index should be greater equal than 0:

    ```python
    dragon.cuda.set_default_device(0)   # Ok
    dragon.cuda.set_default_device(-1)  # Reset to the cpu device
    ```

    Parameters
    ----------
    device_index : int
        The device index.

    """
    if device_index < 0:
        config.config().device_type = 'cpu'
        config.config().device_index = 0
    else:
        config.config().device_type = 'cuda'
        config.config().device_index = device_index
Пример #18
0
    def __init__(self, **kwargs):
        """Create a ``DataTransformer``.

        Parameters
        ----------
        resize : int, optional, default=0
            The size for the shortest edge.
        padding : int, optional, default=0
            The size for zero padding on two sides.
        fill_value : int or sequence, optional, default=127
            The value(s) to fill for padding or cutout.
        crop_size : int, optional, default=0
            The size for random-or-center cropping.
        random_crop_size: int, optional, default=0
            The size for sampling-based random cropping.
        cutout_size : int, optional, default=0
            The square size for the cutout algorithm.
        mirror : bool, optional, default=False
            Whether to apply the mirror (flip horizontally).
        random_scales : Sequence[float], optional, default=(0.08, 1.)
            The range of scales to sample a crop randomly.
        random_aspect_ratios : Sequence[float], optional, default=(0.75, 1.33)
            The range of aspect ratios to sample a crop randomly.
        distort_color : bool, optional, default=False
            Whether to apply color distortion.
        inverse_color : bool, option, default=False
            Whether to inverse channels for color images.
        phase : {'TRAIN', 'TEST'}, optional
            The optional running phase.
        seed : int, optional
            The random seed to use instead.

        """
        super(DataTransformer, self).__init__()
        self._resize = kwargs.get('resize', 0)
        self._padding = kwargs.get('padding', 0)
        self._fill_value = kwargs.get('fill_value', 127)
        self._crop_size = kwargs.get('crop_size', 0)
        self._random_crop_size = kwargs.get('random_crop_size', 0)
        self._cutout_size = kwargs.get('cutout_size', 0)
        self._mirror = kwargs.get('mirror', False)
        self._random_scales = kwargs.get('random_scales', (0.08, 1.))
        self._random_ratios = kwargs.get('random_aspect_ratios',
                                         (3. / 4., 4. / 3.))
        self._distort_color = kwargs.get('distort_color', False)
        self._inverse_color = kwargs.get('inverse_color', False)
        self._phase = kwargs.get('phase', 'TRAIN')
        self._seed = kwargs.get('seed', config.config().random_seed)
        self.q_in = self.q_out = None
        self.daemon = True
Пример #19
0
def get_device_spec():
    """Return the device spec in current nesting."""
    dev_info = get_device_info()
    if dev_info is not None:
        return device_spec.DeviceSpec(
            dev_info['device_type'],
            dev_info['device_index'],
        )
    else:
        cfg = config.config()
        return device_spec.DeviceSpec(
            cfg.device_type,
            cfg.device_index,
        )
Пример #20
0
def set_verbosity(level=1):
    """Set the verbosity for graph ir.

    Following levels are defined (default=0):

    * level = ``0``: Do nothing.

    * level = ``1``: Print the optimized GraphIR.

    * level = ``2``: Print the raw GraphIR.

    Parameters
    ----------
    level : int, optional, default=1
        The verbosity level.

    """
    config.config().graph_verbosity = level
Пример #21
0
    def __init__(self, key, dev, **kwargs):
        """Create a ``Function``.

        Parameters
        ----------
        key : str
            The cache key.
        device : dragon.vm.torch.device
            The device spec.

        """
        super(Function, self).__init__()
        self._def = None
        self._cache_key = key
        self._device = dev
        self._arg_device = proto_util.get_device_option('cpu')
        self._arg_device = self._arg_device.SerializeToString()
        self._seed = kwargs.get('seed', config.config().random_seed)
Пример #22
0
def set_optimization(level=1):
    """Set the optimization for graph ir.

    Following levels are defined (default=3):

    * level = ``0``: Do nothing.

    * level = ``1``: Eliminate the unused outputs and operators.

    * level = ``2``: Apply the inplace to inputs if available.

    * level = ``3``: Allocate the shared buffer to outputs if available.

    Parameters
    ----------
    level : int, optional, default=3
        The optimization level.

    """
    config.config().graph_optimization = level
Пример #23
0
    def cuda(self, device=None):
        """Switch the buffers and parameters to cuda device.

        If ``device`` is not provided, use the value
        set by ``dragon.config.set_cuda_device()``.

        Parameters
        ----------
        device : int, optional
            The optional device index.

        Returns
        -------
        dragon.vm.torch.nn.Module
            The self.

        """
        if device is None:
            device = config.config().device_index
        return self._apply(lambda t: t.cuda(device))
Пример #24
0
    def create_graph(self, graph_def):
        """Create the graph.

        Parameters
        ----------
        graph_def : GraphDef
            The ``GraphDef`` protocol buffer.

        Returns
        -------
        str
            The graph name.

        """
        cfg = config.config()
        if cfg.graph_verbosity == 2:
            msg = '\n' + str(graph_def)[:-1]
            logging.info('\ngraph {' + msg.replace('\n', '\n  ') + '\n}\n')
        return self.CreateGraph(
            serialization.serialize_proto(graph_def),
            cfg.graph_verbosity == 1)
Пример #25
0
def set_execution(execution='GRAPH_MODE'):
    """Set the execution mode for graph ir.

    For changing the execution temporarily, use:

    ```python
    # Enter a context to enforce graph execution
    with dragon.graph_mode():
        pass

    # Enter a context to enforce eager execution
    with dragon.eager_mode():
        pass
    ```

    Parameters
    ----------
    execution : {'GRAPH_MODE', 'EAGER_MODE'}, optional
        The execution mode.

    """
    if execution not in ('GRAPH_MODE', 'EAGER_MODE'):
        raise ValueError('Unsupported execution: ' + execution)
    config.config().graph_execution = execution
Пример #26
0
    def __init__(self, **kwargs):
        """Create a ``DataReader``.

        Parameters
        ----------
        dataset : class
            The dataset class to load examples.
        source : str
            The path of data source.
        part_idx : int, optional, default=0
            The index of partition to read.
        num_parts : int, optional, default=1
            The total number of partitions over dataset.
        shuffle : bool, optional, default=False
            Whether to shuffle the data.
        initial_fill : int, optional, default=1024
            The length of sampling sequence for shuffle.
        seed : int, optional
            The random seed to use instead.

        """
        super(DataReader, self).__init__()
        self._dataset = kwargs.get('dataset', None)
        self._source = kwargs.get('source', '')
        self._part_idx = kwargs.get('part_idx', 0)
        self._num_parts = kwargs.get('num_parts', 1)
        self._shuffle = kwargs.get('shuffle', False)
        self._initial_fill = kwargs.get('initial_fill', 1024) if self._shuffle else 1
        self._seed = kwargs.get('seed', config.config().random_seed)
        self._first, self._cursor, self._last = 0, 0, 0
        self._part_size = 0
        self._num_examples = 0
        self._example_buffer = []
        self._parts = []
        self.q_out = None
        self.daemon = True
Пример #27
0
 def test_verbosity(self):
     dragon.autograph.set_verbosity(1)
     self.assertEqual(config.config().graph_verbosity, 1)
     dragon.autograph.set_verbosity(0)
Пример #28
0
 def run_operator(self, op_def):
     """Run an operator."""
     cfg = config.config()
     if isinstance(op_def, dragon_pb2.OperatorDef):
         op_def = op_def.SerializePartialToString()
     self._impl.RunOperator(op_def, cfg.graph_verbosity > 0)
Пример #29
0
 def __init__(self):
     super(_ThreadLocalData, self).__init__()
     cfg = config.config()
     self.mode = cfg.graph_execution
     self.is_eager = self.mode == 'EAGER_MODE'
Пример #30
0
 def test_optimization(self):
     dragon.autograph.set_optimization(1)
     self.assertEqual(config.config().graph_optimization, 1)