Esempio n. 1
0
 def __init__(self,
              session,
              quantizer,
              mean_quantizer=None,
              should_update=True,
              reg=False):
     super().__init__(session, should_update)
     cls, params = object_from_params(quantizer)
     self.quantizer = cls(session, **params)
     self.reg = reg
     if mean_quantizer:
         cls, params = object_from_params(mean_quantizer)
         self.parameter_quantizers = {
             'positives_mean': cls(session, **params),
             'negatives_mean': cls(session, **params),
         }
     else:
         self.parameter_quantizers = {}
Esempio n. 2
0
 def _estimate_layer(self, node, info):
     input_shape = [self.shapes[p] for p in node.predecessors]
     input_shape = input_shape[0] if len(input_shape) == 1 else input_shape
     output_shape = self.shapes[node]
     try:
         func, params = object_from_params(node.params, self, 'estimate_')
     except NotImplementedError:
         func = self.generic_estimate
         params = node.params
     return func(node, info, input_shape, output_shape, params)
Esempio n. 3
0
 def __init__(self,
              session,
              quantizer,
              interval,
              count_zero=True,
              should_update=True,
              enable=True):
     super().__init__(session, should_update, enable)
     cls, params = object_from_params(quantizer)
     self.quantizer = cls(session, **params)
     self.count_zero = count_zero
     self.interval = interval
Esempio n. 4
0
 def learning_rate(self):
     params = self.config.train.learning_rate
     lr_class, params = object_from_params(params)
     if lr_class is tf.train.piecewise_constant:
         # `tf.train.piecewise_constant` uses argument name 'x' instead
         # just to make life more difficult
         step_name = 'x'
     else:
         step_name = 'global_step'
     params[step_name] = self.num_epochs
     log.debug('Using learning rate {!r} with params {}.'.format(
         lr_class.__name__, params))
     return lr_class(**params)
Esempio n. 5
0
    def _create_hyperobjects(self, layer_node, params):
        suffixes = ['regularizer', 'initializer']
        for key, p in params.items():
            if not any(key.endswith(s) for s in suffixes):
                continue
            # regularizer and initializer
            if p is None:
                params[key] = None
                continue
            cls, p = object_from_params(p)
            params[key] = cls(**p)

        def create_overrider(overriders):
            for name, p in overriders.items():
                if p.get('type'):
                    continue
                raise TypeError(
                    'We expect a mapping of name-overrider pairs, overrider '
                    'named {!r} does not have a type.'.format(name))
            if all(not p.get('_priority') for p in overriders.values()):
                log.warn(
                    'Priority not specified for a sequence of overriders '
                    'in layer {!r}, which may result in unexpected ordering.'
                    .format(layer_node.formatted_name()))
            overriders = list(reversed(sorted(
                overriders.values(), key=lambda p: p.get('_priority', 0))))
            overriders = [
                cls(session=self.session, **p)
                for cls, p in multi_objects_from_params(overriders)]
            if len(overriders) == 1:
                return overriders[0]
            return ChainOverrider(session=self.session, overriders=overriders)

        overrider_params = params.get('overrider', {})
        if not overrider_params:
            return
        for key, p in list(overrider_params.items()):
            if not p:
                del overrider_params[key]
                continue
            overriders = self._overriders.setdefault(layer_node, {})
            if key == 'gradient':
                for grad_key, grad_p in p.items():
                    q = overriders.setdefault('gradient', {})
                    p[grad_key] = q[grad_key] = create_overrider(grad_p)
                continue
            overrider_params[key] = overriders[key] = create_overrider(p)
Esempio n. 6
0
File: mixed.py Progetto: zaf05/mayo
 def __init__(self,
              session,
              quantizers,
              index=0,
              should_update=True,
              reg_factor=0.0,
              interval=0.1):
     super().__init__(session, should_update)
     self.quantizer_maps = {}
     for key, item in dict(quantizers).items():
         cls, params = object_from_params(item)
         quantizer = cls(session, **params)
         self.quantizer_maps[key] = quantizer
     self.reg_factor = reg_factor
     # the quantizer that makes a loss for training
     self.quantizers = quantizers
     self.picked_quantizer = list(quantizers.keys())[index]
     # keep record of an index for update
     self.index = index
Esempio n. 7
0
 def _instantiate_layer(self, node, tensors):
     # transform parameters
     params, scope = self._transformer.transform(node, node.params)
     with scope:
         tensors = self.instantiate_numeric_padding(node, tensors, params)
         layer_type = params['type']
         layer_key = '{}/{}'.format(tf.get_variable_scope().name,
                                    params['scope'])
         layer_args = self._params_to_text(params)
         log.debug('Instantiating {!r} of type {!r} with arguments:\n{}\n'
                   '  for tensor(s) {}.'.format(layer_key, layer_type,
                                                layer_args, tensors))
         # get method by its name to instantiate a layer
         try:
             func, params = object_from_params(params, self, 'instantiate_')
         except NotImplementedError:
             func = self.generic_instantiate
         # instantiation
         layer = func(node, tensors, params)
     return layer
Esempio n. 8
0
 def _instantiate_layer(self, node, tensors):
     func, params = object_from_params(node.params, self, 'instantiate_')
     # instantiation
     return func(node, tensors, params)
Esempio n. 9
0
 def optimizer(self):
     params = self.config.train.optimizer
     optimizer_class, params = object_from_params(params)
     log.debug('Using optimizer {!r}.'.format(optimizer_class.__name__))
     return optimizer_class(self.learning_rate, **params)
Esempio n. 10
0
 def _task_constructor(self):
     return object_from_params(self.config.dataset.task)