Ejemplo n.º 1
0
def main(argv=None):
    parser = create_parser()
    args = parser.parse_args(argv)

    logging.getLogger().setLevel(logging.INFO)
    client = _utils.get_sagemaker_client(args.region, args.endpoint_url)

    logging.info('Submitting Training Job to SageMaker...')
    job_name = _utils.create_training_job(client, vars(args))
    logging.info('Job request submitted. Waiting for completion...')
    _utils.wait_for_training_job(client, job_name)

    image = _utils.get_image_from_job(client, job_name)
    model_artifact_url = _utils.get_model_artifacts_from_job(client, job_name)
    logging.info('Get model artifacts %s from training job %s.',
                 model_artifact_url, job_name)

    with open('/tmp/model_artifact_url.txt', 'w') as f:
        f.write(model_artifact_url)
    with open('/tmp/job_name.txt', 'w') as f:
        f.write(job_name)
    with open('/tmp/training_image.txt', 'w') as f:
        f.write(image)

    logging.info('Job completed.')
Ejemplo n.º 2
0
  def test_create_training_job(self):
    mock_client = MagicMock()
    mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-job'])
    response = _utils.create_training_job(mock_client, vars(mock_args))

    mock_client.create_training_job.assert_called_once_with(
      AlgorithmSpecification={'TrainingImage': 'test-image', 'TrainingInputMode': 'File'},
      EnableInterContainerTrafficEncryption=False,
      EnableManagedSpotTraining=False,
      EnableNetworkIsolation=True,
      HyperParameters={},
      InputDataConfig=[{'ChannelName': 'train',
                        'DataSource': {'S3DataSource': {'S3Uri': 's3://fake-bucket/data', 'S3DataType': 'S3Prefix', 'S3DataDistributionType': 'FullyReplicated'}},
                        'ContentType': '',
                        'CompressionType': 'None',
                        'RecordWrapperType': 'None',
                        'InputMode': 'File'
                      }],
      OutputDataConfig={'KmsKeyId': '', 'S3OutputPath': 'test-path'},
      ResourceConfig={'InstanceType': 'ml.m4.xlarge', 'InstanceCount': 1, 'VolumeSizeInGB': 50, 'VolumeKmsKeyId': ''},
      RoleArn='arn:aws:iam::123456789012:user/Development/product_1234/*',
      StoppingCondition={'MaxRuntimeInSeconds': 3600},
      Tags=[],
      TrainingJobName='test-job'
    )
    self.assertEqual(response, 'test-job')
Ejemplo n.º 3
0
def main(argv=None):
  parser = create_parser()
  args = parser.parse_args(argv)

  logging.getLogger().setLevel(logging.INFO)
  client = _utils.get_sagemaker_client(args.region, args.endpoint_url)

  logging.info('Submitting Training Job to SageMaker...')
  job_name = _utils.create_training_job(client, vars(args))

  def signal_term_handler(signalNumber, frame):
    _utils.stop_training_job(client, job_name)
    logging.info(f"Training Job: {job_name} request submitted to Stop")
  signal.signal(signal.SIGTERM, signal_term_handler)

  logging.info('Job request submitted. Waiting for completion...')
  try:
    _utils.wait_for_training_job(client, job_name)
    _utils.wait_for_debug_rules(client, job_name)
  except:
    raise
  finally:
    cw_client = _utils.get_cloudwatch_client(args.region)
    _utils.print_logs_for_job(cw_client, '/aws/sagemaker/TrainingJobs', job_name)

  image = _utils.get_image_from_job(client, job_name)
  model_artifact_url = _utils.get_model_artifacts_from_job(client, job_name)
  logging.info('Get model artifacts %s from training job %s.', model_artifact_url, job_name)

  _utils.write_output(args.model_artifact_url_output_path, model_artifact_url)
  _utils.write_output(args.job_name_output_path, job_name)
  _utils.write_output(args.training_image_output_path, image)

  logging.info('Job completed.')
Ejemplo n.º 4
0
  def test_sagemaker_exception_in_create_training_job(self):
    mock_client = MagicMock()
    mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "create_training_job")
    mock_client.create_training_job.side_effect = mock_exception
    mock_args = self.parser.parse_args(required_args)

    with self.assertRaises(Exception):
      response = _utils.create_training_job(mock_client, vars(mock_args))
Ejemplo n.º 5
0
def main(argv=None):
    parser = argparse.ArgumentParser(description='SageMaker Training Job')
    parser.add_argument('--region',
                        type=str,
                        help='The region where the training job launches.')
    parser.add_argument(
        '--image',
        type=str,
        help=
        'The registry path of the Docker image that contains the training algorithm.'
    )
    parser.add_argument('--instance_type',
                        type=str,
                        help='The ML compute instance type.')
    parser.add_argument(
        '--instance_count',
        type=int,
        help=
        'The registry path of the Docker image that contains the training algorithm.'
    )
    parser.add_argument(
        '--volume_size',
        type=int,
        help='The size of the ML storage volume that you want to provision.')
    parser.add_argument(
        '--dataset_path',
        type=str,
        help=
        'The S3 location of the data source that is associated with a channel.'
    )
    parser.add_argument(
        '--model_artifact_path',
        type=str,
        help=
        'Identifies the S3 path where you want Amazon SageMaker to store the model artifacts.'
    )
    parser.add_argument(
        '--role',
        type=str,
        help=
        'The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.'
    )
    args = parser.parse_args()

    logging.getLogger().setLevel(logging.INFO)
    client = _utils.get_client(args.region)

    logging.info('Submitting Training Job to SageMaker...')
    job_name = _utils.create_training_job(client, args.image,
                                          args.instance_type,
                                          args.instance_count,
                                          args.volume_size, args.dataset_path,
                                          args.model_artifact_path, args.role)
    logging.info('Job request submitted. Waiting for completion...')
    _utils.wait_for_training_job(client, job_name)

    model_artifact_url = _utils.get_model_artifacts_from_job(client, job_name)
    logging.info('Get model artifacts %s from training job %s.',
                 model_artifact_url, job_name)

    with open('/tmp/model_artifact_url.txt', 'w') as f:
        f.write(model_artifact_url)
    with open('/tmp/job_name.txt', 'w') as f:
        f.write(job_name)

    logging.info('Job completed.')
Ejemplo n.º 6
0
def main(argv=None):
    parser = argparse.ArgumentParser(description='SageMaker Training Job')
    parser.add_argument('--region',
                        type=str.strip,
                        required=True,
                        help='The region where the training job launches.')
    parser.add_argument('--job_name',
                        type=str.strip,
                        required=False,
                        help='The name of the training job.',
                        default='')
    parser.add_argument(
        '--role',
        type=str.strip,
        required=True,
        help=
        'The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.'
    )
    parser.add_argument(
        '--image',
        type=str.strip,
        required=True,
        help=
        'The registry path of the Docker image that contains the training algorithm.',
        default='')
    parser.add_argument(
        '--algorithm_name',
        type=str.strip,
        required=False,
        help='The name of the resource algorithm to use for the training job.',
        default='')
    parser.add_argument(
        '--metric_definitions',
        type=_utils.str_to_json_dict,
        required=False,
        help=
        'The dictionary of name-regex pairs specify the metrics that the algorithm emits.',
        default='{}')
    parser.add_argument(
        '--training_input_mode',
        choices=['File', 'Pipe'],
        type=str.strip,
        help='The input mode that the algorithm supports. File or Pipe.',
        default='File')
    parser.add_argument(
        '--hyperparameters',
        type=_utils.str_to_json_dict,
        help='Dictionary of hyperparameters for the the algorithm.',
        default='{}')
    parser.add_argument(
        '--channels',
        type=_utils.str_to_json_list,
        required=True,
        help=
        'A list of dicts specifying the input channels. Must have at least one.'
    )
    parser.add_argument(
        '--instance_type',
        required=True,
        choices=[
            'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge',
            'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge',
            'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge',
            'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge',
            'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge',
            'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge',
            'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'
        ],
        type=str.strip,
        help='The ML compute instance type.',
        default='ml.m4.xlarge')
    parser.add_argument(
        '--instance_count',
        required=True,
        type=_utils.str_to_int,
        help=
        'The registry path of the Docker image that contains the training algorithm.',
        default=1)
    parser.add_argument(
        '--volume_size',
        type=_utils.str_to_int,
        required=True,
        help='The size of the ML storage volume that you want to provision.',
        default=1)
    parser.add_argument(
        '--resource_encryption_key',
        type=str.strip,
        required=False,
        help=
        'The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).',
        default='')
    parser.add_argument(
        '--max_run_time',
        type=_utils.str_to_int,
        required=True,
        help='The maximum run time in seconds for the training job.',
        default=86400)
    parser.add_argument(
        '--model_artifact_path',
        type=str.strip,
        required=True,
        help=
        'Identifies the S3 path where you want Amazon SageMaker to store the model artifacts.'
    )
    parser.add_argument(
        '--output_encryption_key',
        type=str.strip,
        required=False,
        help=
        'The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.',
        default='')
    parser.add_argument(
        '--vpc_security_group_ids',
        type=str.strip,
        required=False,
        help='The VPC security group IDs, in the form sg-xxxxxxxx.')
    parser.add_argument(
        '--vpc_subnets',
        type=str.strip,
        required=False,
        help=
        'The ID of the subnets in the VPC to which you want to connect your hpo job.'
    )
    parser.add_argument('--network_isolation',
                        type=_utils.str_to_bool,
                        required=False,
                        help='Isolates the training container.',
                        default=True)
    parser.add_argument(
        '--traffic_encryption',
        type=_utils.str_to_bool,
        required=False,
        help=
        'Encrypts all communications between ML compute instances in distributed training.',
        default=False)
    parser.add_argument(
        '--tags',
        type=_utils.str_to_json_dict,
        required=False,
        help='An array of key-value pairs, to categorize AWS resources.',
        default='{}')
    args = parser.parse_args()

    logging.getLogger().setLevel(logging.INFO)
    client = _utils.get_client(args.region)

    logging.info('Submitting Training Job to SageMaker...')
    job_name = _utils.create_training_job(client, vars(args))
    logging.info('Job request submitted. Waiting for completion...')
    _utils.wait_for_training_job(client, job_name)

    image = _utils.get_image_from_job(client, job_name)
    model_artifact_url = _utils.get_model_artifacts_from_job(client, job_name)
    logging.info('Get model artifacts %s from training job %s.',
                 model_artifact_url, job_name)

    with open('/tmp/model_artifact_url.txt', 'w') as f:
        f.write(model_artifact_url)
    with open('/tmp/job_name.txt', 'w') as f:
        f.write(job_name)
    with open('/tmp/training_image.txt', 'w') as f:
        f.write(image)

    logging.info('Job completed.')