コード例 #1
0
 def post_publish(self, bucket):
     logger.debug("Adding S3 redirect header from {} to in {} to {}".format(
         self.build_path, bucket.name, self.get_redirect_url()))
     s3_client, s3_resource = get_s3_client()
     s3_client.copy_object(ACL='public-read',
                           Bucket=bucket.name,
                           CopySource={
                               'Bucket': bucket.name,
                               'Key': self.build_path
                           },
                           Key=self.build_path,
                           WebsiteRedirectLocation=self.get_redirect_url())
コード例 #2
0
ファイル: base.py プロジェクト: datadesk/django-bakery
 def post_publish(self, bucket):
     logger.debug("Adding S3 redirect header from {} to in {} to {}".format(
         self.build_path,
         bucket.name,
         self.get_redirect_url()
     ))
     s3_client, s3_resource = get_s3_client()
     s3_client.copy_object(
         ACL='public-read',
         Bucket=bucket.name,
         CopySource={
              'Bucket': bucket.name,
              'Key': self.build_path
         },
         Key=self.build_path,
         WebsiteRedirectLocation=self.get_redirect_url()
     )
コード例 #3
0
    def post_publish(self, bucket):
        """
        Set up an S3-side redirect for all Wagtail Redirects

        This is inspired by django-bakery's BuildableRedirectView.post_publish method
        and gets called as part of the `publish` management command
        """
        s3_client, s3_resource = get_s3_client()

        #  For now, we're assuming we're only handling the default site
        site = Site.objects.filter(is_default_site=True).first()
        if not site:
            logger.warning("No default Site found - skipping generation of redirects")
            return

        no_site_q = Q(site__isnull=True)
        this_site_q = Q(site=site)
        redirects = Redirect.objects.filter(no_site_q | this_site_q)

        if not redirects:
            logger.info(
                "No Wagtail Redirects detected, so no S3 Website Redirects to create"
            )

        for redirect in redirects:
            original_dest = self.tidy_path(redirect.old_path)
            new_dest = self.get_redirect_url(redirect)

            logger.info(
                (
                    "Adding S3 Website Redirect in {bucket_name} from {old} to {new}"
                ).format(old=original_dest, bucket_name=bucket.name, new=new_dest)
            )
            s3_client.put_object(
                ACL="public-read",
                Bucket=bucket.name,
                Key=original_dest,
                WebsiteRedirectLocation=new_dest,
            )
コード例 #4
0
ファイル: publish.py プロジェクト: tolymbekovna/django-bakery
    def handle(self, *args, **options):
        """
        Sync files in the build directory to a specified S3 bucket
        """
        # Counts and such we can use to keep tabs on this as they progress
        self.uploaded_files = 0
        self.uploaded_file_list = []
        self.deleted_files = 0
        self.deleted_file_list = []
        self.start_time = time.time()

        # Configure all the options we're going to use
        self.set_options(options)

        # Initialize the boto connection
        logger.debug("Connecting to s3")
        if self.verbosity > 2:
            self.stdout.write("Connecting to s3")
        self.s3_client, self.s3_resource = get_s3_client()

        # Grab our bucket
        logger.debug("Retriving bucket {}".format(self.aws_bucket_name))
        if self.verbosity > 2:
            self.stdout.write("Retriving bucket {}".format(
                self.aws_bucket_name))
        self.bucket = self.s3_resource.Bucket(self.aws_bucket_name)

        # Get a list of all keys in our s3 bucket ...
        # ...nunless you're this is case where we're blindly pushing
        if self.force_publish and self.no_delete:
            self.blind_upload = True
            logger.debug(
                "Skipping object retrieval. We won't need to because we're blinding uploading everything."
            )
            self.s3_obj_dict = {}
        else:
            self.blind_upload = False
            logger.debug("Retrieving objects now published in bucket")
            if self.verbosity > 2:
                self.stdout.write("Retrieving objects now published in bucket")
            self.s3_obj_dict = {}
            self.s3_obj_dict = self.get_bucket_file_list()

        # Get a list of all the local files in our build directory
        logger.debug("Retrieving files built locally")
        if self.verbosity > 2:
            self.stdout.write("Retrieving files built locally")
        self.local_file_list = self.get_local_file_list()

        # Sync local files with s3 bucket
        logger.debug("Syncing local files with bucket")
        if self.verbosity > 2:
            self.stdout.write("Syncing local files with bucket")
        self.sync_with_s3()

        # Delete anything that's left in our keys dict
        if not self.dry_run and not self.no_delete:
            self.deleted_file_list = list(self.s3_obj_dict.keys())
            self.deleted_files = len(self.deleted_file_list)
            if self.deleted_files:
                logger.debug("Deleting %s keys" % self.deleted_files)
                if self.verbosity > 0:
                    self.stdout.write("Deleting %s keys" % self.deleted_files)
                self.batch_delete_s3_objects(self.deleted_file_list,
                                             self.aws_bucket_name)

        # Run any post publish hooks on the views
        if not hasattr(settings, 'BAKERY_VIEWS'):
            raise CommandError(self.views_unconfig_msg)
        for view_str in settings.BAKERY_VIEWS:
            view = get_callable(view_str)()
            if hasattr(view, 'post_publish'):
                getattr(view, 'post_publish')(self.bucket)

        # We're finished, print the final output
        elapsed_time = time.time() - self.start_time
        msg = "Publish completed, %d uploaded and %d deleted files in %.2f seconds" % (
            self.uploaded_files, self.deleted_files, elapsed_time)
        logger.info(msg)
        if self.verbosity > 0:
            self.stdout.write(msg)

        if self.dry_run:
            logger.info(
                "Publish executed with the --dry-run option. No content was changed on S3."
            )
            if self.verbosity > 0:
                self.stdout.write(
                    "Publish executed with the --dry-run option. No content was changed on S3."
                )
コード例 #5
0
ファイル: publish.py プロジェクト: datadesk/django-bakery
    def handle(self, *args, **options):
        """
        Sync files in the build directory to a specified S3 bucket
        """
        # Counts and such we can use to keep tabs on this as they progress
        self.uploaded_files = 0
        self.uploaded_file_list = []
        self.deleted_files = 0
        self.deleted_file_list = []
        self.start_time = time.time()

        # Configure all the options we're going to use
        self.set_options(options)

        # Initialize the boto connection
        logger.debug("Connecting to s3")
        if self.verbosity > 2:
            self.stdout.write("Connecting to s3")
        self.s3_client, self.s3_resource = get_s3_client()

        # Grab our bucket
        logger.debug("Retriving bucket {}".format(self.aws_bucket_name))
        if self.verbosity > 2:
            self.stdout.write("Retriving bucket {}".format(self.aws_bucket_name))
        self.bucket = self.s3_resource.Bucket(self.aws_bucket_name)

        # Get a list of all keys in our s3 bucket ...
        # ...nunless you're this is case where we're blindly pushing
        if self.force_publish and self.no_delete:
            self.blind_upload = True
            logger.debug("Skipping object retrieval. We won't need to because we're blinding uploading everything.")
            self.s3_obj_dict = {}
        else:
            self.blind_upload = False
            logger.debug("Retrieving objects now published in bucket")
            if self.verbosity > 2:
                self.stdout.write("Retrieving objects now published in bucket")
            self.s3_obj_dict = {}
            self.s3_obj_dict = self.get_bucket_file_list()

        # Get a list of all the local files in our build directory
        logger.debug("Retrieving files built locally")
        if self.verbosity > 2:
            self.stdout.write("Retrieving files built locally")
        self.local_file_list = self.get_local_file_list()

        # Sync local files with s3 bucket
        logger.debug("Syncing local files with bucket")
        if self.verbosity > 2:
            self.stdout.write("Syncing local files with bucket")
        self.sync_with_s3()

        # Delete anything that's left in our keys dict
        if not self.dry_run and not self.no_delete:
            self.deleted_file_list = list(self.s3_obj_dict.keys())
            self.deleted_files = len(self.deleted_file_list)
            if self.deleted_files:
                logger.debug("Deleting %s keys" % self.deleted_files)
                if self.verbosity > 0:
                    self.stdout.write("Deleting %s keys" % self.deleted_files)
                self.batch_delete_s3_objects(
                    self.deleted_file_list,
                    self.aws_bucket_name
                )

        # Run any post publish hooks on the views
        if not hasattr(settings, 'BAKERY_VIEWS'):
            raise CommandError(self.views_unconfig_msg)
        for view_str in settings.BAKERY_VIEWS:
            view = get_callable(view_str)()
            if hasattr(view, 'post_publish'):
                getattr(view, 'post_publish')(self.bucket)

        # We're finished, print the final output
        elapsed_time = time.time() - self.start_time
        msg = "Publish completed, %d uploaded and %d deleted files in %.2f seconds" % (
            self.uploaded_files,
            self.deleted_files,
            elapsed_time
        )
        logger.info(msg)
        if self.verbosity > 0:
            self.stdout.write(msg)

        if self.dry_run:
            logger.info("Publish executed with the --dry-run option. No content was changed on S3.")
            if self.verbosity > 0:
                self.stdout.write("Publish executed with the --dry-run option. No content was changed on S3.")
コード例 #6
0
    def handle(self, *args, **options):
        """
        Sync files in the build directory to a specified S3 bucket
        """
        # Counts and such we can use to keep tabs on this as they progress
        self.uploaded_files = 0
        self.uploaded_file_list = []
        self.deleted_files = 0
        self.deleted_file_list = []
        self.start_time = time.time()

        # Configure all the options we're going to use
        self.set_options(options)

        # Initialize the boto connection
        self.s3_client, self.s3_resource = get_s3_client()

        # Grab our bucket
        self.bucket = self.s3_resource.Bucket(self.aws_bucket_name)

        # Get a list of all keys in our s3 bucket
        self.s3_obj_dict = self.get_all_objects_in_bucket(
            self.aws_bucket_name, self.s3_client)

        # Get a list of all the local files in our build directory
        self.local_file_list = self.get_local_file_list()

        # Sync the two
        self.sync_with_s3()

        # Delete anything that's left in our keys dict
        if not self.dry_run and not self.no_delete:
            self.deleted_file_list = list(self.s3_obj_dict.keys())
            self.deleted_files = len(self.deleted_file_list)
            if self.deleted_files:
                if self.verbosity > 0:
                    logger.debug("deleting %s keys" % self.deleted_files)
                self.batch_delete_s3_objects(self.deleted_file_list,
                                             self.aws_bucket_name)

        # Run any post publish hooks on the views
        if not hasattr(settings, 'BAKERY_VIEWS'):
            raise CommandError(self.views_unconfig_msg)
        for view_str in settings.BAKERY_VIEWS:
            view = get_callable(view_str)()
            if hasattr(view, 'post_publish'):
                getattr(view, 'post_publish')(self.bucket)

        # We're finished, print the final output
        elapsed_time = time.time() - self.start_time
        if self.verbosity > 0:
            msg = "publish completed, %d uploaded and %d deleted files in %.2f seconds" % (
                self.uploaded_files, self.deleted_files, elapsed_time)
            self.stdout.write(msg)
            logger.info(msg)

        if self.verbosity > 2:
            for f in self.uploaded_file_list:
                logger.info("updated file: %s" % f)
            for f in self.deleted_file_list:
                logger.info("deleted file: %s" % f)

        if self.dry_run:
            logger.info(
                "publish executed with the --dry-run option. No content was changed on S3."
            )