def __init__( self, params: Union[str, List[str]], init_sparsity: float, final_sparsity: float, start_epoch: float, end_epoch: float, update_frequency: float, inter_func: str = "cubic", log_types: Union[str, List[str]] = ALL_TOKEN, mask_type: Union[str, List[int]] = "unstructured", leave_enabled: bool = True, **kwargs, ): kwargs["min_frequency"] = kwargs.get("min_frequency", -1.0) super().__init__( log_types=log_types, start_epoch=start_epoch, end_epoch=end_epoch, update_frequency=update_frequency, **kwargs, ) self._params = validate_str_iterable(params, "{} for params".format( self.__class__.__name__)) # type: List[str] self._init_sparsity = init_sparsity self._final_sparsity = final_sparsity self._leave_enabled = convert_to_bool(leave_enabled) self._inter_func = inter_func self._mask_type = mask_type self._leave_enabled = convert_to_bool(leave_enabled) self.validate()
def __init__( self, lr_class: str, lr_kwargs: Dict, init_lr: float, start_epoch: float, end_epoch: float = -1.0, update_frequency: float = -1.0, log_types: Union[str, List[str]] = ALL_TOKEN, constant_logging: bool = False, ): super().__init__( lr_class=lr_class, lr_kwargs=lr_kwargs, init_lr=init_lr, log_types=log_types, start_epoch=start_epoch, end_epoch=end_epoch, update_frequency=-1.0, end_comparator=-1, ) self._lr_scheduler = None self._base_lr_set = False self._last_scheduler_epoch = math.floor(start_epoch) self._constant_logging = convert_to_bool(constant_logging) self._double_step = False self._last_logged_lr = None self._last_logged_epoch = None self._scheduler_steps = 0 self.validate()
def __init__( self, params: Union[str, List[str]], trainable: bool, params_strict: bool = True, start_epoch: float = -1.0, end_epoch: float = -1.0, ): super().__init__(start_epoch=start_epoch, end_epoch=end_epoch, end_comparator=-1) self._start_epoch = start_epoch self._params = validate_str_iterable( params, "{} for params".format(self.__class__.__name__)) self._trainable = convert_to_bool(trainable) self._params_strict = convert_to_bool(params_strict) self._module_params = [] # type: List[Parameter] self._original = []
def __init__( self, params: Union[str, List[str]], trainable: bool, params_strict: bool = True, start_epoch: float = -1.0, end_epoch: float = -1.0, ): super(TrainableParamsModifier, self).__init__( start_epoch=-1, end_epoch=-1, end_comparator=-1, ) self._params = self._validate_params(params) self._trainable = convert_to_bool(trainable) self._params_strict = convert_to_bool(params_strict) self._vars_to_trainable_orig = {} self.validate()
def __init__( self, params: Union[str, List[str]], init_sparsity: float, final_sparsity: float, start_epoch: float, end_epoch: float, update_frequency: float, inter_func: str = "cubic", log_types: Union[str, List[str]] = ALL_TOKEN, mask_type: Union[str, List[int], PruningMaskCreator] = "unstructured", leave_enabled: bool = True, ): super(GMPruningModifier, self).__init__( log_types=log_types, start_epoch=start_epoch, min_start=-1.0, end_epoch=end_epoch, min_end=0.0, end_comparator=1, update_frequency=update_frequency, min_frequency=-1.0, ) self._params = validate_str_iterable( params, "{} for params".format(self.__class__.__name__) ) # type: List[str] self._layer_names = [get_layer_name_from_param(p) for p in self._params] self._init_sparsity = init_sparsity self._final_sparsity = final_sparsity self._leave_enabled = convert_to_bool(leave_enabled) self._inter_func = inter_func self._mask_type = mask_type self._mask_creator = mask_type self._leave_enabled = convert_to_bool(leave_enabled) if not isinstance(mask_type, PruningMaskCreator): self._mask_creator = load_mask_creator(mask_type) self._prune_op_vars = None self._update_ready = None self._sparsity = None self._mask_initializer = None self._masked_layers = [] self.validate()
def __init__( self, params: Union[str, List[str]], trainable: bool, params_strict: bool = True, start_epoch: float = -1.0, end_epoch: float = -1.0, **kwargs, ): kwargs["end_comparator"] = kwargs.get("end_comparator", -1) super(TrainableParamsModifier, self).__init__( start_epoch=start_epoch, end_epoch=end_epoch, **kwargs, ) self._params = validate_str_iterable( params, "{} for params".format(self.__class__.__name__) ) self._trainable = convert_to_bool(trainable) self._params_strict = convert_to_bool(params_strict) self._vars_to_trainable_orig = {} self.validate()
def __init__( self, init_sparsity: float, final_sparsity: float, start_epoch: float, end_epoch: float, update_frequency: float, params: Union[str, List[str]], leave_enabled: bool = True, inter_func: str = "cubic", log_types: Union[str, List[str]] = ALL_TOKEN, mask_type: Union[str, List[int], PruningMaskCreator] = "unstructured", global_sparsity: bool = False, ): super().__init__( log_types=log_types, start_epoch=start_epoch, end_epoch=end_epoch, update_frequency=update_frequency, min_end=0.0, end_comparator=1, ) self._init_sparsity = init_sparsity self._final_sparsity = final_sparsity self._params = validate_str_iterable( params, "{} for params".format(self.__class__.__name__) ) self._leave_enabled = convert_to_bool(leave_enabled) self._inter_func = inter_func self._mask_type = mask_type self._mask_creator = mask_type if not isinstance(mask_type, PruningMaskCreator): self._mask_creator = load_mask_creator(mask_type) self._global_sparsity = global_sparsity self._module_masks = None # type: ModuleParamPruningMask self._applied_sparsity = None self._last_logged_sparsity = None self._last_logged_epoch = None self._analyzers = None self._non_serializable_props = {} self.validate()
def __init__( self, weight_decay: float, start_epoch: float = -1.0, param_groups: Union[List[int], None] = None, end_epoch: float = -1.0, log_types: Union[str, List[str]] = ALL_TOKEN, constant_logging: bool = False, ): super().__init__( start_epoch=start_epoch, end_epoch=-1, log_types=log_types, end_comparator=-1, ) self._weight_decay = weight_decay self._param_groups = param_groups self._constant_logging = convert_to_bool(constant_logging) self._update_since_last_log = False
def __init__( self, learning_rate: Union[float, None], start_epoch: float = -1.0, end_epoch: float = -1.0, log_types: Union[str, List[str]] = ALL_TOKEN, constant_logging: bool = False, ): super().__init__( learning_rate=learning_rate, log_types=log_types, start_epoch=start_epoch, end_epoch=-1, end_comparator=None, ) self._lr_set = False self._applied = -1.0 self._constant_logging = convert_to_bool(constant_logging) self._last_logged_lr = None self._last_logged_epoch = None
def __init__( self, layers: Union[str, List[str]], alpha: Union[float, List[float]], layer_normalized: bool = False, reg_func: str = "l1", reg_tens: str = "inp", start_epoch: float = -1.0, end_epoch: float = -1.0, ): super().__init__(start_epoch=start_epoch, end_epoch=end_epoch, end_comparator=-1) self._layers = validate_str_iterable( layers, "{} for layers".format(self.__class__.__name__)) self._alpha = alpha self._layer_normalized = convert_to_bool(layer_normalized) self._reg_func = reg_func self._reg_tens = reg_tens self._trackers = [] # type: List[ASLayerTracker] self.validate()
def __init__( self, learning_rate: Union[float, None], param_groups: Optional[List[int]] = None, start_epoch: float = -1.0, end_epoch: float = -1.0, log_types: Union[str, List[str]] = ALL_TOKEN, constant_logging: bool = False, ): super(SetLearningRateModifier, self).__init__( learning_rate=learning_rate, log_types=log_types, start_epoch=start_epoch, end_epoch=-1, end_comparator=None, ) self._param_groups = param_groups self._lr_set = False self._applied = -1.0 self._constant_logging = convert_to_bool(constant_logging) self._last_logged_lr = None self._last_logged_epoch = None
def __init__( self, init_sparsity: float, final_sparsity: float, start_epoch: float, end_epoch: float, update_frequency: float, params: Union[str, List[str]], leave_enabled: bool = True, inter_func: str = "cubic", log_types: Union[str, List[str]] = ALL_TOKEN, mask_type: Union[str, List[int], PruningMaskCreator] = "unstructured", global_sparsity: bool = False, ): super().__init__( params=params, start_epoch=start_epoch, end_epoch=end_epoch, end_comparator=-1, update_frequency=update_frequency, log_types=log_types, ) self._init_sparsity = init_sparsity self._final_sparsity = final_sparsity self._leave_enabled = convert_to_bool(leave_enabled) self._inter_func = inter_func self._mask_type = mask_type self._mask_creator = ( mask_type if isinstance(mask_type, PruningMaskCreator) else load_mask_creator(mask_type) ) self._global_sparsity = global_sparsity self._applied_sparsity = None self._last_logged_sparsity = None self._non_serializable_props = {} self.validate()
def test_convert_to_bool(test_bool, output): converted = convert_to_bool(test_bool) assert converted == output