Пример #1
0
 def _excepthook(self, etype, evalue, etb):
     from IPython.core import ultratb
     from mayo.util import import_from_string
     ultratb.FormattedTB()(etype, evalue, etb)
     for exc in self.get('system.pdb.skip', []):
         exc = import_from_string(exc)
         if issubclass(etype, exc):
             sys.exit(-1)
     if self.get('system.pdb.use', True):
         import ipdb
         ipdb.post_mortem(etb)
Пример #2
0
 def _check_type(self, overrider):
     o_type = self.info.type
     targets = [import_from_string(item) for item in o_type]
     # check recentralizer
     if isinstance(overrider, Recentralizer):
         o = overrider.quantizer
     else:
         o = overrider
     for target in targets:
         if isinstance(o, target):
             return True
     return False
Пример #3
0
 def _config_layer(self, node, params):
     # normalizer_fn and activation_fn
     for key in ['activation_fn', 'normalizer_fn']:
         if key not in params:
             continue
         fn = params[key]
         if isinstance(fn, str):
             fn = import_from_string(fn)
         params[key] = fn
     # insert is_training into normalizer_params
     if params.get('normalizer_fn', None):
         norm_params = params.setdefault('normalizer_params', {})
         norm_params['is_training'] = self.is_training
     activation_functions = []
     # gradient of error
     gradient_overrider = params.get('overrider.gradient.error')
     if gradient_overrider and self.is_training:
         def gradient_fn(v):
             name = '{}/errors'.format(node.formatted_name())
             return self._apply_gradient_overrider(
                 node, name, gradient_overrider, v)
         activation_functions.append(gradient_fn)
     # activation
     activation_overrider = params.get('overrider.activation', None)
     if activation_overrider:
         override_fn = lambda x: activation_overrider.apply(
             node, 'activations', tf.get_variable, x)
         activation_functions.append(override_fn)
     # produce a default ReLU activation when overriders are used
     relu_types = [
         'convolution', 'depthwise_separable_convolution',
         'fully_connected']
     default_fn = None
     if activation_functions and node.params.type in relu_types:
         default_fn = tf.nn.relu
     activation_fn = params.get('activation_fn', default_fn)
     if activation_fn:
         activation_params = params.pop('activation_params', {})
         activation_fn = functools.partial(
             activation_fn, **activation_params)
         activation_functions.append(activation_fn)
     if activation_functions:
         params['activation_fn'] = compose_functions(activation_functions)
     # set up other parameters
     params['scope'] = node.name
     try:
         params['padding'] = params['padding'].upper()
     except (KeyError, AttributeError):
         pass
Пример #4
0
 def _eval(self, n):
     if isinstance(n, ast.Num):
         return n.n
     if isinstance(n, ast.Call):
         op = import_from_string(self._eval(n.func))
         args = (self._eval(a) for a in n.args)
         return op(*args)
     if isinstance(n, ast.Attribute):
         obj = self._eval(n.value)
         if isinstance(obj, str):
             return '{}.{}'.format(obj, n.attr)
         return getattr(obj, n.attr)
     if isinstance(n, ast.Name):
         return n.id
     if isinstance(n, ast.Str):
         return n.s
     if isinstance(n, ast.Compare):
         ops = n.ops
         rhs = n.comparators
         if len(ops) > 1 or len(rhs) > 1:
             raise NotImplementedError(
                 'We support only one comparator for now.')
         op = self._eval_expr_map[type(ops[0])]
         return op(self._eval(n.left), self._eval(rhs[0]))
     if isinstance(n, ast.IfExp):
         if self._eval(n.test):
             return self._eval(n.body)
         else:
             return self._eval(n.orelse)
     if isinstance(n, ast.NameConstant):
         return n.value
     if isinstance(n, ast.List):
         return [self._eval(e) for e in n.elts]
     if not isinstance(n, (ast.UnaryOp, ast.BinOp, ast.BoolOp)):
         raise TypeError('Unrecognized operator node {}'.format(n))
     op = self._eval_expr_map[type(n.op)]
     if isinstance(n, ast.UnaryOp):
         return op(self._eval(n.operand))
     if isinstance(n, ast.BoolOp):
         return op(*(self._eval(e) for e in n.values))
     return op(self._eval(n.left), self._eval(n.right))