Esempio n. 1
0
 def __init__(
         self,
         steps: NamedTupleList,
         pipeline_runner: BasePipelineRunner = BlockPipelineRunner(),
 ):
     BaseStep.__init__(self)
     TruncableSteps.__init__(self, steps)
     self.pipeline_runner: BasePipelineRunner = pipeline_runner
Esempio n. 2
0
 def __init__(self,
              phase_to_callable: Dict[ExecutionPhase, BaseTransformer],
              default: OptionalType[BaseTransformer] = None):
     phase, steps = zip(*phase_to_callable.items())
     if default:
         steps.append(default)
     TruncableSteps.__init__(self, steps_as_tuple=steps)
     self.phase_to_step_index = {p: i for i, p in enumerate(phase)}
     self.default = default
Esempio n. 3
0
    def __init__(self,
                 steps_as_tuple: NamedTupleList,
                 joiner: NonFittableMixin = NumpyConcatenateInnerFeatures(),
                 n_jobs: int = None,
                 backend: str = "threading"):
        """
        Create a feature union.

        :param steps_as_tuple: the NamedTupleList of steps to process in parallel and to join.
        :param joiner: What will be used to join the features. For example, ``NumpyConcatenateInnerFeatures()``.
        :param n_jobs: The number of jobs for the parallelized ``joblib.Parallel`` loop in fit and in transform.
        :param backend: The type of parallelization to do with ``joblib.Parallel``. Possible values: "loky", "multiprocessing", "threading", "dask" if you use dask, and more.
        """
        TruncableSteps.__init__(self, steps_as_tuple)
        self.joiner = joiner
        self.n_jobs = n_jobs
        self.backend = backend
Esempio n. 4
0
 def __init__(self,
              steps_as_tuple: NamedTupleList,
              joiner: BaseTransformer = None,
              n_jobs: int = None,
              backend: str = "threading",
              cache_folder_when_no_handle: str = None):
     """
     Create a feature union.
     :param steps_as_tuple: the NamedTupleList of steps to process in parallel and to join.
     :param joiner: What will be used to join the features. ``NumpyConcatenateInnerFeatures()`` is used by default.
     :param n_jobs: The number of jobs for the parallelized ``joblib.Parallel`` loop in fit and in transform.
     :param backend: The type of parallelization to do with ``joblib.Parallel``. Possible values: "loky", "multiprocessing", "threading", "dask" if you use dask, and more.
     """
     if joiner is None:
         joiner = NumpyConcatenateInnerFeatures()
     steps_as_tuple.append(('joiner', joiner))
     TruncableSteps.__init__(self, steps_as_tuple)
     ForceHandleOnlyMixin.__init__(self,
                                   cache_folder=cache_folder_when_no_handle)
     self.n_jobs = n_jobs
     self.backend = backend
Esempio n. 5
0
 def __init__(self, steps: NamedTupleList):
     TruncableSteps.__init__(self, steps_as_tuple=steps)
Esempio n. 6
0
 def __init__(self, preprocessing_step, postprocessing_step):
     ForceMustHandleMixin.__init__(self)
     TruncableSteps.__init__(self, [
         ("preprocessing_step", preprocessing_step),
         ("postprocessing_step", postprocessing_step)
     ])
Esempio n. 7
0
 def __init__(self, preprocessing_step, postprocessing_step):
     TruncableSteps.__init__(self,
                             [("preprocessing_step", preprocessing_step),
                              ("postprocessing_step", postprocessing_step)])
     HandleOnlyMixin.__init__(self)
Esempio n. 8
0
 def __init__(self):
     TruncableSteps.__init__(self,
                             hyperparams=HYPERPARAMETERS,
                             hyperparams_space=HYPERPARAMETERS_SPACE,
                             steps_as_tuple=(SomeStepWithHyperparams(),
                                             SomeStepWithHyperparams()))
Esempio n. 9
0
 def __init__(self, steps: NamedTupleList):
     BaseStep.__init__(self)
     TruncableSteps.__init__(self, steps)