Esempio n. 1
0
    def __init__(self, axis=None):
        BaseStep.__init__(self)
        NonFittableMixin.__init__(self)

        if axis is None:
            axis = -2
        self.axis = axis
Esempio n. 2
0
    def __init__(self, wrapped: BaseStep, copy_op=copy.deepcopy):
        BaseStep.__init__(self)
        MetaStepMixin.__init__(self, wrapped)

        self.set_step(wrapped)
        self.steps: List[BaseStep] = []
        self.copy_op = copy_op
    def __init__(self,
                 create_model,
                 create_loss,
                 create_optimizer,
                 step_saver,
                 create_inputs=None,
                 data_inputs_dtype=None,
                 expected_outputs_dtype=None,
                 print_loss=False,
                 print_func=None):
        BaseStep.__init__(self,
                          savers=[step_saver],
                          hyperparams=self.__class__.HYPERPARAMS,
                          hyperparams_space=self.__class__.HYPERPARAMS_SPACE)

        self.create_inputs = create_inputs
        self.create_model = create_model
        self.create_loss = create_loss
        self.create_optimizer = create_optimizer

        self.expected_outputs_dtype = expected_outputs_dtype
        self.data_inputs_dtype = data_inputs_dtype

        self.train_losses = []
        self.test_losses = []
        self.print_loss = print_loss
        if print_func is None:
            print_func = print
        self.print_func = print_func
Esempio n. 4
0
 def __init__(self, handle_fit_callback, handle_transform_callback,
              handle_fit_transform_callback):
     ForceMustHandleMixin.__init__(self)
     BaseStep.__init__(self)
     self.handle_fit_callback = handle_fit_callback
     self.handle_fit_transform_callback = handle_fit_transform_callback
     self.handle_transform_callback = handle_transform_callback
Esempio n. 5
0
 def __init__(self, handle_fit_callback, handle_transform_callback,
              handle_fit_transform_callback):
     HandleOnlyMixin.__init__(self)
     BaseStep.__init__(self)
     self.handle_fit_callback = handle_fit_callback
     self.handle_fit_transform_callback = handle_fit_transform_callback
     self.handle_transform_callback = handle_transform_callback
Esempio n. 6
0
 def __init__(self, seed=None, increment_seed_after_each_fit=True):
     InputAndOutputTransformerMixin.__init__(self)
     BaseStep.__init__(self)
     if seed is None:
         seed = 42
     self.seed = seed
     self.increment_seed_after_each_fit = increment_seed_after_each_fit
Esempio n. 7
0
 def __init__(self,
              scoring_function=r2_score,
              joiner=NumpyConcatenateOuterBatch()):
     MetaStepMixin.__init__(self)
     BaseStep.__init__(self)
     self.scoring_function = scoring_function
     self.joiner = joiner
Esempio n. 8
0
 def __init__(self,
              wrapped_sklearn_predictor,
              hyperparams_space: HyperparameterSpace = None,
              return_all_sklearn_default_params_on_get: bool = False,
              use_partial_fit: bool = False,
              use_predict_proba: bool = False,
              partial_fit_kwargs: dict = None):
     if not isinstance(wrapped_sklearn_predictor, BaseEstimator):
         raise ValueError(
             "The wrapped_sklearn_predictor must be an instance of scikit-learn's BaseEstimator."
         )
     self.wrapped_sklearn_predictor = wrapped_sklearn_predictor
     self.is_ensemble = isinstance(wrapped_sklearn_predictor, BaseEnsemble)
     params: dict = wrapped_sklearn_predictor.get_params()
     self._delete_base_estimator_from_dict(params)
     BaseStep.__init__(self,
                       hyperparams=params,
                       hyperparams_space=hyperparams_space)
     self.return_all_sklearn_default_params_on_get = return_all_sklearn_default_params_on_get
     self.name += "_" + wrapped_sklearn_predictor.__class__.__name__
     self.use_partial_fit: bool = use_partial_fit
     if self.use_partial_fit:
         if partial_fit_kwargs is None:
             partial_fit_kwargs = {}
         self.partial_fit_kwargs = partial_fit_kwargs
     self.use_predict_proba: bool = use_predict_proba
 def __init__(self,
              hyperparams: HyperparameterSamples = None,
              hyperparams_space: HyperparameterSpace = None,
              name: str = None):
     NonFittableMixin.__init__(self)
     BaseStep.__init__(self, hyperparams, hyperparams_space, name)
     InputAndOutputTransformerMixin.__init__(self)
Esempio n. 10
0
 def __init__(self, add=1):
     NonFittableMixin.__init__(self)
     BaseStep.__init__(
         self,
         hyperparams=HyperparameterSamples({
             'add': add
         })
     )
Esempio n. 11
0
 def __init__(self, multiply_by=1):
     NonFittableMixin.__init__(self)
     BaseStep.__init__(
         self,
         hyperparams=HyperparameterSamples({
             'multiply_by': multiply_by
         })
     )
Esempio n. 12
0
 def __init__(self, transform_callback_function, fit_callback_function, more_arguments: List = tuple(),
              transform_function=None,
              hyperparams=None):
     BaseStep.__init__(self, hyperparams)
     self.transform_function = transform_function
     self.more_arguments = more_arguments
     self.fit_callback_function = fit_callback_function
     self.transform_callback_function = transform_callback_function
Esempio n. 13
0
 def __init__(self, wrapped: BaseStep, copy_op=copy.deepcopy):
     # TODO: set params on wrapped.
     # TODO: use MetaStep*s*Mixin (plural) and review.
     BaseStep.__init__(self)
     MetaStepMixin.__init__(self)
     self.set_step(wrapped)
     self.steps: List[BaseStep] = []
     self.copy_op = copy_op
Esempio n. 14
0
 def __init__(self,
              hyperparams_space: HyperparameterSpace = None,
              output=AN_EXPECTED_OUTPUT):
     BaseStep.__init__(self,
                       hyperparams=None,
                       hyperparams_space=hyperparams_space)
     NonFittableMixin.__init__(self)
     self.output = output
Esempio n. 15
0
    def __init__(self, wrapped, epochs, fit_only=False, repeat_in_test_mode=False, cache_folder_when_no_handle=None):
        BaseStep.__init__(self)
        MetaStepMixin.__init__(self, wrapped)
        ForceHandleOnlyMixin.__init__(self, cache_folder=cache_folder_when_no_handle)

        self.repeat_in_test_mode = repeat_in_test_mode
        self.fit_only = fit_only
        self.epochs = epochs
 def __init__(self,
              wrapped,
              from_data_inputs=False,
              cache_folder_when_no_handle=None):
     BaseStep.__init__(self)
     MetaStepMixin.__init__(self, wrapped)
     ForceHandleOnlyMixin.__init__(self, cache_folder_when_no_handle)
     self.from_data_inputs = from_data_inputs
Esempio n. 17
0
 def __init__(
     self,
     all_checkpointers: List[BaseCheckpointer] = None,
 ):
     BaseStep.__init__(self)
     ResumableStepMixin.__init__(self)
     IdentityHandlerMethodsMixin.__init__(self)
     self.all_checkpointers = all_checkpointers
Esempio n. 18
0
 def __init__(
         self,
         steps: NamedTupleList,
         pipeline_runner: BasePipelineRunner = BlockPipelineRunner(),
 ):
     BaseStep.__init__(self)
     TruncableSteps.__init__(self, steps)
     self.pipeline_runner: BasePipelineRunner = pipeline_runner
Esempio n. 19
0
 def __init__(self,
              sleep_time=0.1,
              hyperparams=None,
              hyperparams_space=None):
     BaseStep.__init__(self,
                       hyperparams=hyperparams,
                       hyperparams_space=hyperparams_space)
     self.sleep_time = sleep_time
Esempio n. 20
0
    def __init__(
            self,
            pipeline: Union[BaseStep, NamedTupleList],
            validation_size: int = None,
            batch_size: int = None,
            batch_metrics: Dict[str, Callable] = None,
            shuffle_in_each_epoch_at_train: bool = True,
            seed: int = None,
            n_epochs: int = 1,
            epochs_metrics: Dict[str, Callable] = None,
            scoring_function: Callable = None,
            cache_folder: str = None,
            print_epoch_metrics=False,
            print_batch_metrics=False
    ):
        """
        :param pipeline: pipeline to wrap with an epoch repeater, a validation split wrapper, and a mini batch sequential pipeline
        :param validation_size: ratio for validation size between 0 and 1
        :param batch_size: batch size for the mini batch sequential pipeline
        :param batch_metrics: metrics to calculate for each processed mini batch
        :param shuffle_in_each_epoch_at_train:
        :param seed: random seed for the data shuffling that can be done at each epoch when the param shuffle_in_each_epoch_at_train is True
        :param n_epochs: number of epochs
        :param epochs_metrics: metrics to calculate for each epoch
        :param scoring_function: scoring function with two arguments (y_true, y_pred)
        :param cache_folder: cache folder to be used inside the pipeline
        :param print_epoch_metrics: whether or not to print epoch metrics
        :param print_batch_metrics: whether or not to print batch metrics
        """
        if epochs_metrics is None:
            epochs_metrics = {}
        if batch_metrics is None:
            batch_metrics = {}

        self.final_scoring_metric = scoring_function
        self.epochs_metrics = epochs_metrics
        self.n_epochs = n_epochs
        self.shuffle_in_each_epoch_at_train = shuffle_in_each_epoch_at_train
        self.batch_size = batch_size
        self.batch_metrics = batch_metrics
        self.validation_size = validation_size
        self.print_batch_metrics = print_batch_metrics
        self.print_epoch_metrics = print_epoch_metrics

        wrapped = pipeline
        wrapped = self._create_mini_batch_pipeline(wrapped)

        if shuffle_in_each_epoch_at_train:
            wrapped = TrainShuffled(wrapped=wrapped, seed=seed)

        wrapped = self._create_validation_split(wrapped)
        wrapped = self._create_epoch_repeater(wrapped)

        BaseStep.__init__(self)
        MetaStepMixin.__init__(self, wrapped)
        EvaluableStepMixin.__init__(self)
        ForceHandleMixin.__init__(self, cache_folder)
Esempio n. 21
0
 def __init__(self, axis):
     """
     Create a numpy concatenate on custom axis object.
     :param axis: the axis where the concatenation is performed.
     :return: NumpyConcatenateOnCustomAxis instance.
     """
     self.axis = axis
     BaseStep.__init__(self)
     NonFittableMixin.__init__(self)
Esempio n. 22
0
 def __init__(self,
              plotting_function: Callable,
              max_plotted_predictions,
              enabled=False):
     NonFittableMixin.__init__(self)
     BaseStep.__init__(self)
     self.max_plotted_predictions = max_plotted_predictions
     self.enabled = enabled
     self.plotting_function = plotting_function
Esempio n. 23
0
 def __init__(self,
              hyperparameter_optimizer: BaseHyperparameterOptimizer,
              higher_score_is_better=True,
              cache_folder_when_no_handle=None):
     BaseStep.__init__(self)
     MetaStepMixin.__init__(self, None)
     ForceHandleOnlyMixin.__init__(self, cache_folder_when_no_handle)
     self.higher_score_is_better = higher_score_is_better
     self.hyperparameter_optimizer = hyperparameter_optimizer
Esempio n. 24
0
    def __init__(self, columns_selection, n_dimension=3):
        BaseStep.__init__(self)

        col_selector = ColumnSelector2D(columns_selection=columns_selection)
        for _ in range(min(0, n_dimension - 2)):
            col_selector = ForEachDataInput(col_selector)

        MetaStepMixin.__init__(self, col_selector)
        self.n_dimension = n_dimension
Esempio n. 25
0
    def __init__(
            self,
            pipeline: BaseStep,
            validation_splitter: 'BaseValidationSplitter',
            refit_trial: bool,
            scoring_callback: ScoringCallback,
            hyperparams_optimizer: BaseHyperparameterSelectionStrategy = None,
            hyperparams_repository: HyperparamsRepository = None,
            n_trials: int = 10,
            epochs: int = 1,
            callbacks: List[BaseCallback] = None,
            refit_scoring_function: Callable = None,
            print_func: Callable = None,
            cache_folder_when_no_handle=None,
            continue_loop_on_error=True
    ):
        BaseStep.__init__(self)
        ForceHandleMixin.__init__(self, cache_folder=cache_folder_when_no_handle)

        self.validation_splitter: BaseValidationSplitter = validation_splitter

        if print_func is None:
            print_func = print

        if hyperparams_optimizer is None:
            hyperparams_optimizer = RandomSearchHyperparameterSelectionStrategy()
        self.hyperparameter_optimizer: BaseHyperparameterSelectionStrategy = hyperparams_optimizer

        if hyperparams_repository is None:
            hyperparams_repository = HyperparamsJSONRepository(hyperparams_optimizer, cache_folder_when_no_handle)
        else:
            hyperparams_repository.set_strategy(hyperparams_optimizer)

        self.hyperparams_repository: HyperparamsJSONRepository = hyperparams_repository

        self.pipeline: BaseStep = pipeline
        self.print_func: Callable = print_func

        self.n_trial: int = n_trials
        self.hyperparams_repository: HyperparamsRepository = hyperparams_repository

        self.refit_scoring_function: Callable = refit_scoring_function

        self.refit_trial: bool = refit_trial

        self.error_types_to_raise = (SystemError, SystemExit, EOFError, KeyboardInterrupt) if continue_loop_on_error \
            else (Exception,)

        self.trainer = Trainer(
            epochs=epochs,
            scoring_callback=scoring_callback,
            callbacks=callbacks,
            print_func=self.print_func,
            validation_splitter=validation_splitter,
            hyperparams_repository=hyperparams_repository
        )
Esempio n. 26
0
 def __init__(self,
              wrapped,
              epochs,
              fit_only=False,
              repeat_in_test_mode=False):
     BaseStep.__init__(self)
     MetaStepMixin.__init__(self, wrapped)
     self.repeat_in_test_mode = repeat_in_test_mode
     self.fit_only = fit_only
     self.epochs = epochs
Esempio n. 27
0
    def __init__(self,
                 wrapped: BaseStep,
                 is_train_only=True,
                 cache_folder_when_no_handle=None):
        BaseStep.__init__(self)
        MetaStepMixin.__init__(self, wrapped)
        ForceHandleOnlyMixin.__init__(self,
                                      cache_folder=cache_folder_when_no_handle)

        self.is_train_only = is_train_only
Esempio n. 28
0
    def __init__(self, wrapped: BaseStep, then_unflatten: bool = True):
        BaseStep.__init__(self)
        MetaStepMixin.__init__(self, wrapped)
        ResumableStepMixin.__init__(self)
        ForceHandleMixin.__init__(self)

        self.then_unflatten = then_unflatten

        self.len_di = []
        self.len_eo = []
Esempio n. 29
0
 def __init__(self, wrapped):
     """
     Wrap a scikit-learn MetaEstimatorMixin for usage in Neuraxle. 
     This class is similar to the SKLearnWrapper class of Neuraxle that can wrap a scikit-learn BaseEstimator. 
     
     :param wrapped: a scikit-learn object of type "MetaEstimatorMixin". 
     """
     MetaStepMixin.__init__(self)
     BaseStep.__init__(self)
     self.wrapped_sklearn_metaestimator = wrapped  # TODO: use self.set_step of the MetaStepMixin instead?
Esempio n. 30
0
    def __init__(self, wrapped=None, scoring_function: Callable = r2_score):
        """
        Base class For validation wrappers.
        It has a scoring function to calculate the score for the validation split.

        :param scoring_function: scoring function with two arguments (y_true, y_pred)
        :type scoring_function: Callable
        """
        BaseStep.__init__(self)
        MetaStepMixin.__init__(self, wrapped)
        self.scoring_function = scoring_function