Exemplo n.º 1
0
def test_generate_random_name_with_prefix(bucket_prefix):
    bucket_name = utils.generate_random_name_with_prefix(bucket_prefix)
    max_bucket_name_length = 63
    random_suffix_length = 17  # 16 digits + 1 separator

    pruned_prefix = bucket_prefix[:max_bucket_name_length -
                                  len(bucket_prefix) - random_suffix_length]
    assert_that(bucket_name).starts_with(pruned_prefix)
    assert_that(len(bucket_name)).is_equal_to(
        len(pruned_prefix) + random_suffix_length)

    # Verify bucket name limits: bucket name must be at least 3 and no more than 63 characters long
    assert_that(len(bucket_name)).is_between(3, max_bucket_name_length)
Exemplo n.º 2
0
def _setup_bucket_with_resources(pcluster_config, storage_data, stack_name, tags):
    """
    Create pcluster bucket if needed and upload cluster specific resources.

    If no bucket specified, create a bucket associated to the given stack.
    Created bucket needs to be removed on cluster deletion.
    Artifacts are uploaded to {bucket_name}/{artifact_directory}/.
    {artifact_directory}/ will be always be cleaned up on cluster deletion or in case of failure.
    """
    s3_bucket_name = pcluster_config.get_section("cluster").get_param_value("cluster_resource_bucket")
    remove_bucket_on_deletion = False
    # Use "{stack_name}-{random_string}" as directory in bucket
    artifact_directory = utils.generate_random_name_with_prefix(stack_name)
    if not s3_bucket_name or s3_bucket_name == "NONE":
        # Create 1 bucket per cluster named "parallelcluster-{random_string}" if bucket is not provided
        # This bucket needs to be removed on cluster deletion
        s3_bucket_name = utils.generate_random_name_with_prefix("parallelcluster")
        LOGGER.debug("Creating S3 bucket for cluster resources, named %s", s3_bucket_name)
        try:
            utils.create_s3_bucket(s3_bucket_name, pcluster_config.region)
        except Exception:
            LOGGER.error("Unable to create S3 bucket %s.", s3_bucket_name)
            raise
        remove_bucket_on_deletion = True
    else:
        # Use user-provided bucket
        # Do not remove this bucket on deletion, but cleanup artifact directory
        try:
            utils.check_s3_bucket_exists(s3_bucket_name)
        except Exception as e:
            LOGGER.error("Unable to access config-specified S3 bucket %s: %s", s3_bucket_name, e)
            raise

    _upload_cluster_artifacts(
        s3_bucket_name, artifact_directory, pcluster_config, storage_data, tags, remove_bucket_on_deletion
    )

    return s3_bucket_name, artifact_directory, remove_bucket_on_deletion
Exemplo n.º 3
0
    def _generate_artifact_dir(self):
        """
        Generate artifact directory in S3 bucket.

        cluster artifact dir is generated before cfn stack creation and only generate once.
        artifact_directory: e.g. parallelcluster/{version}/clusters/{cluster_name}-jfr4odbeonwb1w5k
        """
        service_directory = generate_random_name_with_prefix(self.name)
        self.__s3_artifact_dir = "/".join([
            PCLUSTER_S3_ARTIFACTS_DICT.get("root_directory"),
            get_installed_version(),
            PCLUSTER_S3_ARTIFACTS_DICT.get("root_cluster_directory"),
            service_directory,
        ])
Exemplo n.º 4
0
    def _generate_artifact_dir(self):
        """
        Generate artifact directory in S3 bucket.

        Image artifact dir is generated before cfn stack creation and only generate once.
        artifact_directory: e.g. parallelcluster/{version}/images/{image_id}-jfr4odbeonwb1w5k
        """
        service_directory = generate_random_name_with_prefix(self.image_id)
        self.__s3_artifact_dir = "/".join(
            [
                self._s3_artifacts_dict.get("root_directory"),
                get_installed_version(),
                self._s3_artifacts_dict.get("root_image_directory"),
                service_directory,
            ]
        )