コード例 #1
0
    def _add_cfn_parameters(self):
        if (self.config.dev_settings and self.config.dev_settings.cookbook
                and self.config.dev_settings.cookbook.chef_cookbook):
            dev_settings_cookbook_value = self.config.dev_settings.cookbook.chef_cookbook
            custom_chef_cookbook = (
                create_s3_presigned_url(dev_settings_cookbook_value)
                if dev_settings_cookbook_value.startswith("s3://") else
                dev_settings_cookbook_value)
        else:
            custom_chef_cookbook = ""

        CfnParameter(
            self,
            "CfnParamCookbookVersion",
            type="String",
            default=utils.get_installed_version(),
            description="CookbookVersion",
        )
        CfnParameter(self,
                     "CfnParamChefCookbook",
                     type="String",
                     default=custom_chef_cookbook,
                     description="ChefCookbook")
        CfnParameter(self,
                     "CfnParamCincInstaller",
                     type="String",
                     default="",
                     description="CincInstaller")
        CfnParameter(
            self,
            "CfnParamChefDnaJson",
            type="String",
            default=ImageBuilderExtraChefAttributes(
                self.config.dev_settings).dump_json(),
            description="ChefAttributes",
        )
        CfnParameter(
            self,
            "CfnParamUpdateOsAndReboot",
            type="String",
            default="true"
            if self.config.build and self.config.build.update_os_packages
            and self.config.build.update_os_packages.enabled else "false",
            description="UpdateOsAndReboot",
        )
コード例 #2
0
    def export_logs(
        self,
        bucket: str,
        bucket_prefix: str = None,
        keep_s3_objects: bool = False,
        start_time: datetime = None,
        end_time: datetime = None,
        output_file: str = None,
    ):
        """
        Export image builder's logs in the given output path, by using given bucket as a temporary folder.

        :param bucket: S3 bucket to be used to export cluster logs data
        :param bucket_prefix: Key path under which exported logs data will be stored in s3 bucket,
               also serves as top-level directory in resulting archive
        :param keep_s3_objects: Keep the exported objects exports to S3. The default behavior is to delete them
        :param start_time: Start time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param end_time: End time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        """
        # check stack
        stack_exists = self._stack_exists()
        if not stack_exists:
            LOGGER.debug("CloudFormation Stack for Image %s does not exist.", self.image_id)

        try:
            with tempfile.TemporaryDirectory() as output_tempdir:
                # Create root folder for the archive
                archive_name = f"{self.image_id}-logs-{datetime.now().strftime('%Y%m%d%H%M')}"
                root_archive_dir = os.path.join(output_tempdir, archive_name)
                os.makedirs(root_archive_dir, exist_ok=True)

                if AWSApi.instance().logs.log_group_exists(self._log_group_name):
                    # Export logs from CloudWatch
                    export_logs_filters = self._init_export_logs_filters(start_time, end_time)
                    logs_exporter = CloudWatchLogsExporter(
                        resource_id=self.image_id,
                        log_group_name=self._log_group_name,
                        bucket=bucket,
                        output_dir=root_archive_dir,
                        bucket_prefix=bucket_prefix,
                        keep_s3_objects=keep_s3_objects,
                    )
                    logs_exporter.execute(
                        start_time=export_logs_filters.start_time, end_time=export_logs_filters.end_time
                    )
                else:
                    LOGGER.info(
                        "Log streams not yet available for %s, only CFN Stack events will be exported.", {self.image_id}
                    )

                if stack_exists:
                    # Get stack events and write them into a file
                    stack_events_file = os.path.join(root_archive_dir, self._stack_events_stream_name)
                    export_stack_events(self.stack.name, stack_events_file)

                archive_path = create_logs_archive(root_archive_dir, output_file)
                if output_file:
                    return output_file
                else:
                    s3_path = upload_archive(bucket, bucket_prefix, archive_path)
                    return create_s3_presigned_url(s3_path)
        except Exception as e:
            raise ImageBuilderActionError(f"Unexpected error when exporting image's logs: {e}")
コード例 #3
0
    def export_logs(
        self,
        bucket: str,
        bucket_prefix: str = None,
        keep_s3_objects: bool = False,
        start_time: datetime = None,
        end_time: datetime = None,
        filters: str = None,
        output_file: str = None,
    ):
        """
        Export cluster's logs in the given output path, by using given bucket as a temporary folder.

        :param output: file path to save log file archive to
        :param bucket: Temporary S3 bucket to be used to export cluster logs data
        :param bucket_prefix: Key path under which exported logs data will be stored in s3 bucket,
               also serves as top-level directory in resulting archive
        :param keep_s3_objects: Keep the exported objects exports to S3. The default behavior is to delete them
        :param start_time: Start time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param end_time: End time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param filters: Filters in the format Name=name,Values=value1,value2
               Accepted filters are: private_dns_name, node_type==HeadNode
        """
        # check stack
        if not AWSApi.instance().cfn.stack_exists(self.stack_name):
            raise NotFoundClusterActionError(
                f"Cluster {self.name} does not exist.")

        try:
            with tempfile.TemporaryDirectory() as output_tempdir:
                # Create root folder for the archive
                archive_name = f"{self.name}-logs-{datetime.now().strftime('%Y%m%d%H%M')}"
                root_archive_dir = os.path.join(output_tempdir, archive_name)
                os.makedirs(root_archive_dir, exist_ok=True)

                if self.stack.log_group_name:
                    # Export logs from CloudWatch
                    export_logs_filters = self._init_export_logs_filters(
                        start_time, end_time, filters)
                    logs_exporter = CloudWatchLogsExporter(
                        resource_id=self.name,
                        log_group_name=self.stack.log_group_name,
                        bucket=bucket,
                        output_dir=root_archive_dir,
                        bucket_prefix=bucket_prefix,
                        keep_s3_objects=keep_s3_objects,
                    )
                    logs_exporter.execute(
                        log_stream_prefix=export_logs_filters.
                        log_stream_prefix,
                        start_time=export_logs_filters.start_time,
                        end_time=export_logs_filters.end_time,
                    )
                else:
                    LOGGER.debug(
                        "CloudWatch logging is not enabled for cluster %s, only CFN Stack events will be exported.",
                        {self.name},
                    )

                # Get stack events and write them into a file
                stack_events_file = os.path.join(
                    root_archive_dir, self._stack_events_stream_name)
                export_stack_events(self.stack_name, stack_events_file)

                archive_path = create_logs_archive(root_archive_dir,
                                                   output_file)
                if output_file:
                    return output_file
                else:
                    s3_path = upload_archive(bucket, bucket_prefix,
                                             archive_path)
                    return create_s3_presigned_url(s3_path)
        except Exception as e:
            raise ClusterActionError(
                f"Unexpected error when exporting cluster's logs: {e}")