def test_cw_dashboard_builder(mocker, test_datadir, config_file_name):
    mock_aws_api(mocker)
    mocker.patch(
        "pcluster.config.cluster_config.HeadNodeNetworking.availability_zone",
        new_callable=PropertyMock(return_value="us-east-1a"),
    )
    # mock bucket initialization parameters
    mock_bucket(mocker)

    input_yaml = load_yaml_dict(test_datadir / config_file_name)
    cluster_config = ClusterSchema(cluster_name="clustername").load(input_yaml)
    print(cluster_config)
    generated_template = CDKTemplateBuilder().build_cluster_template(
        cluster_config=cluster_config,
        bucket=dummy_cluster_bucket(),
        stack_name="clustername")
    output_yaml = yaml.dump(generated_template, width=float("inf"))
    print(output_yaml)

    if cluster_config.is_cw_dashboard_enabled:
        if cluster_config.shared_storage:
            _verify_ec2_metrics_conditions(cluster_config, output_yaml)

        if cluster_config.is_cw_logging_enabled:
            _verify_head_node_logs_conditions(cluster_config, output_yaml)
        else:
            assert_that(output_yaml).does_not_contain("Head Node Logs")
Beispiel #2
0
def test_cluster_builder_from_configuration_file(mocker, config_file_name):
    mock_aws_api(mocker)
    # mock bucket initialization parameters
    mock_bucket(mocker)
    input_yaml, cluster = load_cluster_model_from_yaml(config_file_name)
    generated_template = CDKTemplateBuilder().build_cluster_template(
        cluster_config=cluster, bucket=dummy_cluster_bucket(), stack_name="clustername"
    )
    print(yaml.dump(generated_template))
def test_awsbatch_cluster_builder(mocker):
    mock_aws_api(mocker)
    # mock bucket initialization parameters
    mock_bucket(mocker)

    generated_template = CDKTemplateBuilder().build_cluster_template(
        cluster_config=dummy_awsbatch_cluster_config(mocker),
        bucket=dummy_cluster_bucket(),
        stack_name="clustername")
    print(yaml.dump(generated_template))
Beispiel #4
0
    def create(
        self,
        disable_rollback: bool = False,
        validator_suppressors: Set[ValidatorSuppressor] = None,
        validation_failure_level: FailureLevel = FailureLevel.ERROR,
    ) -> Tuple[Optional[str], List]:
        """
        Create cluster.

        raises ClusterActionError: in case of generic error
        raises ConfigValidationError: if configuration is invalid
        """
        creation_result = None
        artifact_dir_generated = False
        try:
            suppressed_validation_failures = self.validate_create_request(
                validator_suppressors, validation_failure_level)

            self._add_version_tag()
            self._generate_artifact_dir()
            artifact_dir_generated = True
            self._upload_config()

            # Create template if not provided by the user
            if not (self.config.dev_settings
                    and self.config.dev_settings.cluster_template):
                self.template_body = CDKTemplateBuilder(
                ).build_cluster_template(cluster_config=self.config,
                                         bucket=self.bucket,
                                         stack_name=self.stack_name)

            # upload cluster artifacts and generated template
            self._upload_artifacts()

            LOGGER.info("Creating stack named: %s", self.stack_name)
            creation_result = AWSApi.instance().cfn.create_stack_from_url(
                stack_name=self.stack_name,
                template_url=self.bucket.get_cfn_template_url(
                    template_name=PCLUSTER_S3_ARTIFACTS_DICT.get(
                        "template_name")),
                disable_rollback=disable_rollback,
                tags=self._get_cfn_tags(),
            )

            return creation_result.get(
                "StackId"), suppressed_validation_failures

        except ConfigValidationError as e:
            raise e
        except Exception as e:
            if not creation_result and artifact_dir_generated:
                # Cleanup S3 artifacts if stack is not created yet
                self.bucket.delete_s3_artifacts()
            raise _cluster_error_mapper(e, str(e))
Beispiel #5
0
    def create(
        self,
        disable_rollback: bool = True,
        validator_suppressors: Set[ValidatorSuppressor] = None,
        validation_failure_level: FailureLevel = FailureLevel.ERROR,
    ):
        """Create the CFN Stack and associate resources."""
        suppressed_validation_failures = self.validate_create_request(validator_suppressors, validation_failure_level)

        # Generate artifact directory for image
        self._generate_artifact_dir()

        creation_result = None
        artifacts_uploaded = False
        try:
            self._upload_config()

            LOGGER.info("Building ParallelCluster image: %s", self.image_id)

            # Generate cdk cfn template
            self.template_body = CDKTemplateBuilder().build_imagebuilder_template(
                image_config=self.config, image_id=self.image_id, bucket=self.bucket
            )

            # upload generated template
            self._upload_artifacts()
            artifacts_uploaded = True

            # Stack creation
            creation_result = AWSApi.instance().cfn.create_stack_from_url(
                stack_name=self.image_id,
                template_url=self.bucket.get_cfn_template_url(
                    template_name=self._s3_artifacts_dict.get("template_name")
                ),
                disable_rollback=disable_rollback,
                tags=self._get_cfn_tags(),
                capabilities="CAPABILITY_NAMED_IAM",
            )

            self.__stack = ImageBuilderStack(AWSApi.instance().cfn.describe_stack(self.image_id))

            LOGGER.debug("StackId: %s", self.stack.id)
            LOGGER.info("Status: %s", self.stack.status)

            return suppressed_validation_failures

        except Exception as e:
            LOGGER.critical(e)
            if not creation_result and artifacts_uploaded:
                # Cleanup S3 artifacts if stack is not created yet
                self.bucket.delete_s3_artifacts()
            raise _imagebuilder_error_mapper(e, f"ParallelCluster image build infrastructure creation failed.\n{e}")
Beispiel #6
0
def test_scheduler_plugin_substack(mocker, config_file_name, expected_scheduler_plugin_stack, test_datadir):
    mock_aws_api(mocker)
    # mock bucket initialization parameters
    mock_bucket(mocker)
    if config_file_name == "scheduler_plugin.full.yaml":
        input_yaml, cluster = load_cluster_model_from_yaml(config_file_name)
    else:
        input_yaml, cluster = load_cluster_model_from_yaml(config_file_name, test_datadir)
    generated_template = CDKTemplateBuilder().build_cluster_template(
        cluster_config=cluster, bucket=dummy_cluster_bucket(), stack_name="clustername"
    )
    print(yaml.dump(generated_template))
    assert_that(generated_template["Resources"].get("SchedulerPluginStack", {})).is_equal_to(
        expected_scheduler_plugin_stack
    )
Beispiel #7
0
def test_head_node_dna_json(mocker, test_datadir, config_file_name, expected_head_node_dna_json_file_name):
    mock_aws_api(mocker)

    input_yaml = load_yaml_dict(test_datadir / config_file_name)

    cluster_config = ClusterSchema(cluster_name="clustername").load(input_yaml)

    generated_template = CDKTemplateBuilder().build_cluster_template(
        cluster_config=cluster_config, bucket=dummy_cluster_bucket(), stack_name="clustername"
    )

    generated_head_node_dna_json = json.loads(
        _get_cfn_init_file_content(template=generated_template, resource="HeadNodeLaunchTemplate", file="/tmp/dna.json")
    )
    expected_head_node_dna_json = load_json_dict(test_datadir / expected_head_node_dna_json_file_name)

    assert_that(generated_head_node_dna_json).is_equal_to(expected_head_node_dna_json)
Beispiel #8
0
    def update(
        self,
        target_source_config: str,
        validator_suppressors: Set[ValidatorSuppressor] = None,
        validation_failure_level: FailureLevel = FailureLevel.ERROR,
        force: bool = False,
    ):
        """
        Update cluster.

        raises ClusterActionError: in case of generic error
        raises ConfigValidationError: if configuration is invalid
        raises ClusterUpdateError: if update is not allowed
        """
        try:
            target_config, changes, ignored_validation_failures = self.validate_update_request(
                target_source_config, validator_suppressors,
                validation_failure_level, force)

            self.config = target_config
            self.__source_config_text = target_source_config

            self._add_version_tag()
            self._upload_config()

            # Create template if not provided by the user
            if not (self.config.dev_settings
                    and self.config.dev_settings.cluster_template):
                self.template_body = CDKTemplateBuilder(
                ).build_cluster_template(
                    cluster_config=self.config,
                    bucket=self.bucket,
                    stack_name=self.stack_name,
                    log_group_name=self.stack.log_group_name,
                )

            # upload cluster artifacts and generated template
            self._upload_artifacts()

            LOGGER.info("Updating stack named: %s", self.stack_name)
            AWSApi.instance().cfn.update_stack_from_url(
                stack_name=self.stack_name,
                template_url=self.bucket.get_cfn_template_url(
                    template_name=PCLUSTER_S3_ARTIFACTS_DICT.get(
                        "template_name")),
                tags=self._get_cfn_tags(),
            )

            self.__stack = ClusterStack(AWSApi.instance().cfn.describe_stack(
                self.stack_name))
            LOGGER.debug("StackId: %s", self.stack.id)
            LOGGER.info("Status: %s", self.stack.status)

            return changes, ignored_validation_failures

        except ClusterActionError as e:
            # It can be a ConfigValidationError or ClusterUpdateError
            raise e
        except Exception as e:
            LOGGER.critical(e)
            raise _cluster_error_mapper(e, f"Cluster update failed.\n{e}")