Exemple #1
0
    def _prepare_for_training(self, job_name=None):
        if job_name is not None:
            self._current_job_name = job_name
        else:
            base_name = self.base_tuning_job_name or base_name_from_image(
                self.estimator.train_image())
            self._current_job_name = name_from_base(
                base_name,
                max_length=self.TUNING_JOB_NAME_MAX_LENGTH,
                short=True)

        self.static_hyperparameters = {
            to_str(k): to_str(v)
            for (k, v) in self.estimator.hyperparameters().items()
        }
        for hyperparameter_name in self._hyperparameter_ranges.keys():
            self.static_hyperparameters.pop(hyperparameter_name, None)

        # For attach() to know what estimator to use for non-1P algorithms
        # (1P algorithms don't accept extra hyperparameters)
        if not isinstance(self.estimator, AmazonAlgorithmEstimatorBase):
            self.static_hyperparameters[
                self.SAGEMAKER_ESTIMATOR_CLASS_NAME] = json.dumps(
                    self.estimator.__class__.__name__)
            self.static_hyperparameters[
                self.SAGEMAKER_ESTIMATOR_MODULE] = json.dumps(
                    self.estimator.__module__)
    def _prepare_for_training(self, job_name=None, include_cls_metadata=False):
        """
        Args:
            job_name:
            include_cls_metadata:
        """
        if job_name is not None:
            self._current_job_name = job_name
        else:
            base_name = self.base_tuning_job_name or base_name_from_image(
                self.estimator.train_image()
            )
            self._current_job_name = name_from_base(
                base_name, max_length=self.TUNING_JOB_NAME_MAX_LENGTH, short=True
            )

        self.static_hyperparameters = {
            to_str(k): to_str(v) for (k, v) in self.estimator.hyperparameters().items()
        }
        for hyperparameter_name in self._hyperparameter_ranges.keys():
            self.static_hyperparameters.pop(hyperparameter_name, None)

        # For attach() to know what estimator to use for frameworks
        # (other algorithms may not accept extra hyperparameters)
        if include_cls_metadata or isinstance(self.estimator, Framework):
            self.static_hyperparameters[self.SAGEMAKER_ESTIMATOR_CLASS_NAME] = json.dumps(
                self.estimator.__class__.__name__
            )
            self.static_hyperparameters[self.SAGEMAKER_ESTIMATOR_MODULE] = json.dumps(
                self.estimator.__module__
            )
Exemple #3
0
    def __init__(self, values):  # pylint: disable=super-init-not-called
        """Initialize a ``CategoricalParameter``.

        Args:
            values (list or object): The possible values for the hyperparameter. This input will
                be converted into a list of strings.
        """
        if isinstance(values, list):
            self.values = [to_str(v) for v in values]
        else:
            self.values = [to_str(values)]
    def __init__(self, values):
        """Initialize a ``CategoricalParameter``.

        Args:
            values (list or object): The possible values for the hyperparameter. This input will
                be converted into a list of strings.
        """
        if isinstance(values, list):
            self.values = [to_str(v) for v in values]
        else:
            self.values = [to_str(values)]
    def as_tuning_range(self, name):
        """Represent the parameter range as a dicionary suitable for a request to
        create an Amazon SageMaker hyperparameter tuning job.

        Args:
            name (str): The name of the hyperparameter.

        Returns:
            dict[str, str]: A dictionary that contains the name and values of the hyperparameter.
        """
        return {'Name': name,
                'MinValue': to_str(self.min_value),
                'MaxValue': to_str(self.max_value)}
    def as_tuning_range(self, name):
        """Represent the parameter range as a dicionary suitable for a request to
        create an Amazon SageMaker hyperparameter tuning job.

        Args:
            name (str): The name of the hyperparameter.

        Returns:
            dict[str, str]: A dictionary that contains the name and values of the hyperparameter.
        """
        return {'Name': name,
                'MinValue': to_str(self.min_value),
                'MaxValue': to_str(self.max_value)}
    def _prepare_for_training(self, job_name=None, include_cls_metadata=True):
        if job_name is not None:
            self._current_job_name = job_name
        else:
            base_name = self.base_tuning_job_name or base_name_from_image(self.estimator.train_image())
            self._current_job_name = name_from_base(base_name, max_length=self.TUNING_JOB_NAME_MAX_LENGTH, short=True)

        self.static_hyperparameters = {to_str(k): to_str(v) for (k, v) in self.estimator.hyperparameters().items()}
        for hyperparameter_name in self._hyperparameter_ranges.keys():
            self.static_hyperparameters.pop(hyperparameter_name, None)

        # For attach() to know what estimator to use for non-1P algorithms
        # (1P algorithms don't accept extra hyperparameters)
        if include_cls_metadata and not isinstance(self.estimator, AmazonAlgorithmEstimatorBase):
            self.static_hyperparameters[self.SAGEMAKER_ESTIMATOR_CLASS_NAME] = json.dumps(
                self.estimator.__class__.__name__)
            self.static_hyperparameters[self.SAGEMAKER_ESTIMATOR_MODULE] = json.dumps(self.estimator.__module__)
Exemple #8
0
 def __init__(self, values):
     if isinstance(values, list):
         self.values = [to_str(v) for v in values]
     else:
         self.values = [to_str(values)]
Exemple #9
0
 def as_tuning_range(self, name):
     return {
         'Name': name,
         'MinValue': to_str(self.min_value),
         'MaxValue': to_str(self.max_value)
     }
def tuning_config(tuner, inputs, job_name=None):
    """Export Airflow tuning config from an estimator

    Args:
        tuner (sagemaker.tuner.HyperparameterTuner): The tuner to export tuning
            config from.
        inputs: Information about the training data. Please refer to the ``fit()``
            method of the associated estimator in the tuner, as this can take any of the
            following forms:

            * (str) - The S3 location where training data is saved.

            * (dict[str, str] or dict[str, sagemaker.session.s3_input]) - If using multiple
                  channels for training data, you can specify a dict mapping channel names to
                  strings or :func:`~sagemaker.session.s3_input` objects.

            * (sagemaker.session.s3_input) - Channel configuration for S3 data sources that can
                  provide additional information about the training dataset. See
                  :func:`sagemaker.session.s3_input` for full details.

            * (sagemaker.amazon.amazon_estimator.RecordSet) - A collection of
                  Amazon :class:~`Record` objects serialized and stored in S3.
                  For use with an estimator for an Amazon algorithm.

            * (list[sagemaker.amazon.amazon_estimator.RecordSet]) - A list of
                  :class:~`sagemaker.amazon.amazon_estimator.RecordSet` objects,
                  where each instance is a different channel of training data.
        job_name (str): Specify a tuning job name if needed.

    Returns:
        dict: Tuning config that can be directly used by SageMakerTuningOperator in Airflow.
    """
    train_config = training_base_config(tuner.estimator, inputs)
    hyperparameters = train_config.pop("HyperParameters", None)
    s3_operations = train_config.pop("S3Operations", None)

    if hyperparameters and len(hyperparameters) > 0:
        tuner.static_hyperparameters = {
            utils.to_str(k): utils.to_str(v) for (k, v) in hyperparameters.items()
        }

    if job_name is not None:
        tuner._current_job_name = job_name
    else:
        base_name = tuner.base_tuning_job_name or utils.base_name_from_image(
            tuner.estimator.train_image()
        )
        tuner._current_job_name = utils.name_from_base(
            base_name, tuner.TUNING_JOB_NAME_MAX_LENGTH, True
        )

    for hyperparameter_name in tuner._hyperparameter_ranges.keys():
        tuner.static_hyperparameters.pop(hyperparameter_name, None)

    train_config["StaticHyperParameters"] = tuner.static_hyperparameters

    tune_config = {
        "HyperParameterTuningJobName": tuner._current_job_name,
        "HyperParameterTuningJobConfig": {
            "Strategy": tuner.strategy,
            "HyperParameterTuningJobObjective": {
                "Type": tuner.objective_type,
                "MetricName": tuner.objective_metric_name,
            },
            "ResourceLimits": {
                "MaxNumberOfTrainingJobs": tuner.max_jobs,
                "MaxParallelTrainingJobs": tuner.max_parallel_jobs,
            },
            "ParameterRanges": tuner.hyperparameter_ranges(),
        },
        "TrainingJobDefinition": train_config,
    }

    if tuner.metric_definitions is not None:
        tune_config["TrainingJobDefinition"]["AlgorithmSpecification"][
            "MetricDefinitions"
        ] = tuner.metric_definitions

    if tuner.tags is not None:
        tune_config["Tags"] = tuner.tags

    if s3_operations is not None:
        tune_config["S3Operations"] = s3_operations

    return tune_config
Exemple #11
0
 def cast_to_type(cls, value):
     return to_str(value)
def test_to_str_with_unicode_string():
    value = u'åñøthér strîng'
    assert to_str(value) == value
def test_to_str_with_native_string():
    value = 'some string'
    assert to_str(value) == value
 def cast_to_type(cls, value):
     """
     Args:
         value:
     """
     return to_str(value)
def test_to_str_with_unicode_string():
    value = u'åñøthér strîng'
    assert to_str(value) == value
def test_to_str_with_native_string():
    value = 'some string'
    assert to_str(value) == value