コード例 #1
0
    def _create_tuner(self, pipeline):
        # Build an MLPipeline to get the tunables and the default params
        mlpipeline = MLPipeline.from_dict(self.template_dict)
        tunable_hyperparameters = mlpipeline.get_tunable_hyperparameters()

        tunables = []
        tunable_keys = []
        for block_name, params in tunable_hyperparameters.items():
            for param_name, param_details in params.items():
                key = (block_name, param_name)
                param_type = param_details['type']
                param_type = PARAM_TYPES.get(param_type, param_type)
                if param_type == 'bool':
                    param_range = [True, False]
                else:
                    param_range = param_details.get(
                        'range') or param_details.get('values')

                value = HyperParameter(param_type, param_range)
                tunables.append((key, value))
                tunable_keys.append(key)

        # Create the tuner
        LOGGER.info('Creating %s tuner', self._tuner_class.__name__)

        self.tuner = self._tuner_class(tunables)

        if pipeline:
            try:
                # Add the default params and the score obtained by them to the tuner.
                default_params = defaultdict(dict)
                for block_name, params in pipeline.pipeline.get_hyperparameters(
                ).items():
                    for param, value in params.items():
                        key = (block_name, param)
                        if key in tunable_keys:
                            if value is None:
                                raise ValueError('None value is not supported')

                            default_params[key] = value

                if pipeline.rank is not None:
                    self.tuner.add(default_params, 1 - pipeline.rank)

            except ValueError:
                pass
コード例 #2
0
    def _get_tuner(self, pipeline, template_dict):
        # Build an MLPipeline to get the tunables and the default params
        mlpipeline = MLPipeline.from_dict(template_dict)

        tunables = []
        tunable_keys = []
        for block_name, params in mlpipeline.get_tunable_hyperparameters(
        ).items():
            for param_name, param_details in params.items():
                key = (block_name, param_name)
                param_type = param_details['type']
                param_type = PARAM_TYPES.get(param_type, param_type)
                if param_type == 'bool':
                    param_range = [True, False]
                else:
                    param_range = param_details.get(
                        'range') or param_details.get('values')

                value = HyperParameter(param_type, param_range)
                tunables.append((key, value))
                tunable_keys.append(key)

        # Create the tuner
        LOGGER.info('Creating %s tuner', self._tuner_class.__name__)
        tuner = self._tuner_class(tunables)

        if pipeline:
            # Add the default params and the score obtained by the default pipeline to the tuner.
            default_params = defaultdict(dict)
            for block_name, params in pipeline.pipeline.get_hyperparameters(
            ).items():
                for param, value in params.items():
                    key = (block_name, param)
                    if key in tunable_keys:
                        # default_params[key] = 'None' if value is None else value
                        default_params[key] = value

            tuner.add(default_params, 1 - pipeline.rank)

        return tuner