def __init__( self, steps: NamedTupleList, pipeline_runner: BasePipelineRunner = BlockPipelineRunner(), ): BaseStep.__init__(self) TruncableSteps.__init__(self, steps) self.pipeline_runner: BasePipelineRunner = pipeline_runner
def __init__(self, phase_to_callable: Dict[ExecutionPhase, BaseTransformer], default: OptionalType[BaseTransformer] = None): phase, steps = zip(*phase_to_callable.items()) if default: steps.append(default) TruncableSteps.__init__(self, steps_as_tuple=steps) self.phase_to_step_index = {p: i for i, p in enumerate(phase)} self.default = default
def __init__(self, steps_as_tuple: NamedTupleList, joiner: NonFittableMixin = NumpyConcatenateInnerFeatures(), n_jobs: int = None, backend: str = "threading"): """ Create a feature union. :param steps_as_tuple: the NamedTupleList of steps to process in parallel and to join. :param joiner: What will be used to join the features. For example, ``NumpyConcatenateInnerFeatures()``. :param n_jobs: The number of jobs for the parallelized ``joblib.Parallel`` loop in fit and in transform. :param backend: The type of parallelization to do with ``joblib.Parallel``. Possible values: "loky", "multiprocessing", "threading", "dask" if you use dask, and more. """ TruncableSteps.__init__(self, steps_as_tuple) self.joiner = joiner self.n_jobs = n_jobs self.backend = backend
def __init__(self, steps_as_tuple: NamedTupleList, joiner: BaseTransformer = None, n_jobs: int = None, backend: str = "threading", cache_folder_when_no_handle: str = None): """ Create a feature union. :param steps_as_tuple: the NamedTupleList of steps to process in parallel and to join. :param joiner: What will be used to join the features. ``NumpyConcatenateInnerFeatures()`` is used by default. :param n_jobs: The number of jobs for the parallelized ``joblib.Parallel`` loop in fit and in transform. :param backend: The type of parallelization to do with ``joblib.Parallel``. Possible values: "loky", "multiprocessing", "threading", "dask" if you use dask, and more. """ if joiner is None: joiner = NumpyConcatenateInnerFeatures() steps_as_tuple.append(('joiner', joiner)) TruncableSteps.__init__(self, steps_as_tuple) ForceHandleOnlyMixin.__init__(self, cache_folder=cache_folder_when_no_handle) self.n_jobs = n_jobs self.backend = backend
def join_transform(self, step: TruncableSteps, data_container: DataContainer, context: ExecutionContext) -> ZipDataContainer: context = context.push(step) data_container_batches = data_container.minibatches( batch_size=self.batch_size, keep_incomplete_batch=self.keep_incomplete_batch, default_value_data_inputs=self.default_value_data_inputs, default_value_expected_outputs=self.default_value_expected_outputs ) output_data_container = [] for data_container_batch in data_container_batches: output_data_container.append(step._transform_data_container(data_container_batch, context)) return ZipDataContainer.create_from(*output_data_container)
def __init__(self, steps: NamedTupleList): TruncableSteps.__init__(self, steps_as_tuple=steps)
def __init__(self, preprocessing_step, postprocessing_step): ForceMustHandleMixin.__init__(self) TruncableSteps.__init__(self, [ ("preprocessing_step", preprocessing_step), ("postprocessing_step", postprocessing_step) ])
def __init__(self, preprocessing_step, postprocessing_step): TruncableSteps.__init__(self, [("preprocessing_step", preprocessing_step), ("postprocessing_step", postprocessing_step)]) HandleOnlyMixin.__init__(self)
def __init__(self): TruncableSteps.__init__(self, hyperparams=HYPERPARAMETERS, hyperparams_space=HYPERPARAMETERS_SPACE, steps_as_tuple=(SomeStepWithHyperparams(), SomeStepWithHyperparams()))
def __init__(self, steps: NamedTupleList): BaseStep.__init__(self) TruncableSteps.__init__(self, steps)