コード例 #1
0
def get_metric(metric_name):
    if metric_name is None:
        return None
    metric_modules = ['trident.optims.tensorflow_metrics']
    try:
        metric_fn = get_function(camel2snake(metric_name), metric_modules)
    except Exception:
        metric_fn = get_function(metric_name, metric_modules)
    return metric_fn
コード例 #2
0
def get_metric(metric_name):
    if metric_name is None:
        return None

    metric_modules = ['trident.optims.pytorch_metrics']
    if metric_name in __all__:
        metric_fn = get_function(metric_name, metric_modules)
    else:
        try:
            metric_fn = get_function(camel2snake(metric_name), metric_modules)
        except Exception:
            metric_fn = get_function(metric_name, metric_modules)
    return metric_fn
コード例 #3
0
def get_activation(fn_name,only_layer=False):
    """

    Args:
        fn_name ():

    Returns:

    Examples:
        >>> print(get_activation('swish'))



    """
    if fn_name is None:
        return None
    fn_modules = ['trident.layers.pytorch_activations', 'trident.backend.pytorch_ops', 'torch.nn.functional']
    trident_fn_modules = ['trident.layers.pytorch_activations', 'trident.backend.pytorch_ops']
    if only_layer:
        fn_modules = ['trident.layers.pytorch_activations']
        trident_fn_modules = ['trident.layers.pytorch_activations']
    try:
        if isinstance(fn_name, str):
            if not only_layer and (camel2snake(fn_name)== fn_name or fn_name.lower()== fn_name):
                if fn_name == 'p_relu' or fn_name == 'prelu':
                    return PRelu()
                activation_fn = get_function(fn_name, trident_fn_modules)
                return activation_fn
            else:
                try:
                    activation_fn = get_class(snake2camel(fn_name), fn_modules)
                    return activation_fn()
                except Exception:
                    activation_fn = get_class(fn_name, fn_modules)
                    return activation_fn()
        elif getattr(fn_name, '__module__', None) == 'trident.layers.pytorch_activations':
            if inspect.isfunction(fn_name):
                return partial(fn_name)
            elif inspect.isclass(fn_name) and  fn_name.__class__.__name__=="type":
                return fn_name()
            elif isinstance(fn_name, Layer):
                return fn_name
        elif inspect.isfunction(fn_name) and getattr(fn_name, '__module__', None) == 'trident.backend.pytorch_ops':
            if only_layer:
                activation_layer = get_class(snake2camel(fn_name.__name__), trident_fn_modules)
                return activation_layer()
            else:
                return fn_name

        else:
            if callable(fn_name):
                result = inspect.getfullargspec(fn_name)
                if 1 <= len(result.args) <= 2:
                    return fn_name if inspect.isfunction(fn_name) else fn_name()
                else:
                    raise ValueError('Unknown activation function/ class')
    except Exception as e:
        print(e)
        return None
コード例 #4
0
def get_reg(reg_name):
    if reg_name is None:
        return None
    if '_reg' not in reg_name:
        reg_name = reg_name + '_reg'
    reg_modules = ['trident.optims.tensorflow_regularizers']
    reg_fn = get_function(reg_name, reg_modules)
    return reg_fn
コード例 #5
0
def get_reg(reg_name):
    if reg_name is None:
        return None
    if '_reg' not in reg_name:
        reg_name = reg_name + '_reg'
    reg_modules = ['trident.optims.pytorch_regularizers']
    reg_fn = get_function(camel2snake(reg_name), reg_modules)
    return reg_fn
コード例 #6
0
def get_initializer(initializer, **kwargs):
    if isinstance(initializer, str):
        initializer_fn = get_function(camel2snake(initializer),
                                      ['trident.backend.pytorch_initializers'])
        initializer_fn = partial(initializer_fn, **
                                 kwargs) if len(kwargs) > 0 else initializer_fn
        return initializer_fn
    elif inspect.isfunction(initializer) and getattr(
            initializer, '__module__',
            None) == 'trident.backend.pytorch_initializers':
        initializer = partial(initializer, **
                              kwargs) if len(kwargs) > 0 else initializer
        return initializer