Example #1
0
    def handle(self, *args, **options):
        weekly_index = Weekly.objects.get()
        site = Site.objects.get()

        # Move weekly articles into the index
        for article in WeeklyArticle.objects.all():
            print("moving article", article)
            Redirect.objects.create(
                old_path=Redirect.normalise_path(article.relative_url(site)),
                redirect_page=article,
            )

            # Find a slug that isn't taken under the index
            old_slug = article.slug
            article.slug = find_available_slug(weekly_index, article.slug)

            if article.slug != old_slug:
                print("changed article slug", old_slug, "=>", article.slug)
                article.save(update_fields=['slug'])

            article.move(weekly_index, pos='last-child')

        # Convert editions into redirects to weekly index
        for edition in WeeklyEdition.objects.all():
            print("deleting edition", edition)
            Redirect.objects.create(
                old_path=Redirect.normalise_path(edition.relative_url(site)),
                redirect_page=weekly_index,
            )

            edition.delete()
Example #2
0
    def test_handling_of_existing_redirects(self):
        # the page we'll be triggering the change for here is...
        test_subject = self.event_index

        descendants = test_subject.get_descendants().live()

        # but before we do, let's add some redirects that we'll expect to conflict
        # with ones created by the signal handler
        redirect1 = Redirect.objects.create(
            old_path=Redirect.normalise_path(descendants.first().specific.url),
            site=self.site,
            redirect_link="/some-place",
            automatically_created=False,
        )
        redirect2 = Redirect.objects.create(
            old_path=Redirect.normalise_path(descendants.last().specific.url),
            site=self.site,
            redirect_link="/some-other-place",
            automatically_created=True,
        )

        self.trigger_page_slug_changed_signal(test_subject)

        # pre-existing manually-created redirects should be preserved
        from_db = Redirect.objects.get(id=redirect1.id)
        self.assertEqual(
            (
                redirect1.old_path,
                redirect1.site_id,
                redirect1.is_permanent,
                redirect1.redirect_link,
                redirect1.redirect_page,
            ),
            (
                from_db.old_path,
                from_db.site_id,
                from_db.is_permanent,
                from_db.redirect_link,
                from_db.redirect_page,
            ),
        )

        # pre-existing automatically-created redirects should be replaced completely
        self.assertFalse(Redirect.objects.filter(pk=redirect2.pk).exists())
        self.assertTrue(
            Redirect.objects.filter(
                old_path=redirect2.old_path,
                site_id=redirect2.site_id,
            ).exists())
    def handle(self, path, **options):
        pages = list(Page.objects.filter(url_path__startswith=path))

        if not pages:
            self.stdout.write("No pages match the given URL path prefix")
            return

        self.stdout.write(f"This will create redirects for {len(pages)} pages. Continue? [y/n]")
        should_continue = input()

        if should_continue != 'y':
            self.stdout.write("Quitting.")
            return

        for page in pages:
            site_id, site_root, path = page.get_url_parts()

            self.stdout.write(f"Saving redirect '{path}' to '{page.title}'")
            Redirect.objects.update_or_create(
                old_path=Redirect.normalise_path(path),
                site_id=site_id,
                defaults={
                    'redirect_page': page,
                }
            )
    def read_and_save_data(self, file_contents, site):
        errors = ErrorList()
        try:
            book = xlrd.open_workbook(file_contents=file_contents.read())
        except IOError:
            errors.append(_("Something went wrong while reading the file."))
            return errors

        sheet = book.sheets()[0]
        with transaction.atomic():
            for row_id in range(0, sheet.nrows):
                data = sheet.row_values(row_id)
                old_path, redirect_link = data
                if old_path and redirect_link:
                    if old_path.startswith('/') and redirect_link.startswith('/'):
                        # Based on the wagtail.contrib.redirects form validation
                        # https://github.com/wagtail/wagtail/blob/master/wagtail/contrib/redirects/forms.py#L34
                        _old_path = Redirect.normalise_path(old_path)
                        duplicates = Redirect.objects.filter(old_path=_old_path, site=site)
                        if duplicates:
                            errors.append(
                                _(
                                    "Row: {} - Skipped import: the old path is "
                                    "a duplicate of an earlier record.".format(row_id + 1)
                                )
                            )
                        else:
                            Redirect.objects.create(old_path=old_path, redirect_link=redirect_link, site=site)
                    else:
                        errors.append(
                            _("Row: {} - The old path and new path, must both start with /".format(row_id + 1))
                        )
                else:
                    errors.append(_("Row: {} - The old path and new path, must both be filled in.".format(row_id + 1)))
        return errors
Example #5
0
def create_redirect_object_if_slug_changed(sender, **kwargs):
    instance = kwargs['instance']

    # The main part is getting the old URL from which the redirect is required.
    # Wagtail keeps the record of every page change in terms of revisions.
    # This will help to keep track of every change made to page including
    # page slug. The next part is determining the revision is for draft or
    # published page. For example, an admin user start editing the page
    # (with slug /original) change Url (/original-changed) and save as draft.
    # On next edit, user again change the URL to something else
    # (/original-desired) and then publish the page. So, in this case, redirect
    # should be created from /original to /original-desired. Page object that
    # has has_unpublished_changes value True, is draft revision. Interestingly
    # when admin user edit a page, user is editing the page object created from
    # JSON and value is stored as JSON in revision.
    page_revisions = instance.revisions.order_by('-created_at', '-id')
    for revision in page_revisions:
        page_obj = revision.page.specific_class.from_json(
            revision.content_json)

        # The first revision's page object that has has_published_changes
        # value False is the last published Page.
        if not page_obj.has_unpublished_changes:
            # Only create redirect if slug change
            if instance.url != page_obj.url:
                old_path = Redirect.normalise_path(page_obj.url)
                Redirect.objects.update_or_create(
                    old_path=old_path, defaults={'redirect_page': instance})
                # Also create redirect objects for children of this Page
                create_redirect_objects_for_children(old_path, page_obj)
            break
Example #6
0
def create_redirect_objects_for_children(parent_old_slug, parent):
    if not parent.get_children():
        return
    else:
        for child_page in parent.get_children():
            old_path = Redirect.normalise_path(parent_old_slug + '/' +
                                               child_page.slug)
            Redirect.objects.update_or_create(
                old_path=old_path, defaults={'redirect_page': child_page})

            create_redirect_objects_for_children(old_path, child_page)
Example #7
0
def create_redirect_object_after_page_move(sender, **kwargs):
    if kwargs['url_path_before'] == kwargs['url_path_after']:
        return

    page_after = kwargs['instance']
    parent_page_before_url = kwargs['parent_page_before'].get_url()
    page_before_url = Redirect.normalise_path(parent_page_before_url +
                                              page_after.slug)
    Redirect.objects.update_or_create(old_path=page_before_url,
                                      defaults={'redirect_page': page_after})

    create_redirect_objects_for_children(page_before_url, page_after)
Example #8
0
def after_move_page_hook(request, instance):
    old_path = Redirect.normalise_path(instance.url)
    instance.refresh_from_db()

    if supports_automatic_redirect(instance) and instance.live:
        Redirect.objects.update_or_create(
            old_path=old_path,
            defaults={
                'redirect_page': instance,
            }
        )

    create_redirect_objects_for_children(old_path, instance)
Example #9
0
    def get_object(self):
        path = self.request.GET.get("html_path", None)
        if path == None:
            raise ValidationError({"html_path": "Missing value"})

        if not path.startswith("/"):
            path = "/" + path

        path = Redirect.normalise_path(path)

        redirect = get_redirect(self.request, path)
        if not redirect:
            raise Http404
        return redirect
Example #10
0
def create_redirect_objects_for_children(parent_old_slug, parent):
    if not parent.get_children():
        return
    else:
        for child_page in parent.get_children().specific():
            old_path = Redirect.normalise_path(
                parent_old_slug + '/' + child_page.slug)

            if supports_automatic_redirect(child_page) and child_page.live:
                Redirect.objects.update_or_create(
                    old_path=old_path,
                    defaults={
                        'redirect_page': child_page
                    }
                )

            create_redirect_objects_for_children(old_path, child_page)
Example #11
0
    def clean(self):
        """
        The unique_together condition on the model is ignored if site is None, so need to
        check for duplicates manually
        """
        cleaned_data = super().clean()

        if cleaned_data.get('site') is None:
            old_path = cleaned_data.get('old_path')
            if old_path is None:
                # cleaned_data['old_path'] is empty because it has already failed validation,
                # so don't bother with our duplicate test
                return

            old_path = Redirect.normalise_path(old_path)
            duplicates = Redirect.objects.filter(old_path=old_path, site__isnull=True)
            if self.instance.pk:
                duplicates = duplicates.exclude(id=self.instance.pk)

            if duplicates:
                raise forms.ValidationError(_("A redirect with this path already exists."))
Example #12
0
    def clean(self):
        """
        The unique_together condition on the model is ignored if site is None, so need to
        check for duplicates manually
        """
        cleaned_data = super().clean()

        if cleaned_data.get('site') is None:
            old_path = cleaned_data.get('old_path')
            if old_path is None:
                # cleaned_data['old_path'] is empty because it has already failed validation,
                # so don't bother with our duplicate test
                return

            old_path = Redirect.normalise_path(old_path)
            duplicates = Redirect.objects.filter(old_path=old_path, site__isnull=True)
            if self.instance.pk:
                duplicates = duplicates.exclude(id=self.instance.pk)

            if duplicates:
                raise forms.ValidationError(_("A redirect with this path already exists."))
    def handle(self, *args, **kwargs):
        for src, dest in NON_WAGTAIL_REDIRECTS:
            r = WebRedirect(source_path=src, destination_path=dest, is_permanent=True)
            r.normalise_paths()
            WebRedirect.objects.get_or_create(
                source_path=r.source_path,
                destination_path=r.destination_path,
                is_permanent=True,
            )

        for old_path, dest_slug in WAGTAIL_REDIRECTS:
            try:
                page = Page.objects.get(slug=dest_slug)
            except Page.DoesNotExist:
                print("ERROR:", dest_slug)
                continue

            Redirect.objects.get_or_create(
                old_path=Redirect.normalise_path(old_path),
                defaults={"redirect_page": page, "is_permanent": True},
            )
def run(*args):
    if not args:
        logger.error("error. Use --script-args [PATH] to specify the " +
                     "location of the redirects csv.")
    else:
        redirects_file = args[0]

        dupes = []
        successes = 0
        deletes = 0

        with open(redirects_file, "r") as csv_file:
            redirect_list = csv.reader(csv_file, delimiter=',')
            for [from_url, to_id] in redirect_list:
                with transaction.atomic():
                    # If conflicting redirects exist for this from_url,
                    # delete them
                    existing_redirects = Redirect.objects.filter(
                        old_path__iexact=Redirect.normalise_path(from_url))
                    if len(existing_redirects) > 0:
                        dupes.append(from_url)
                        num, _ = existing_redirects.delete()
                        deletes += num
                        logger.debug(f"Removed duplicate redirect: {from_url}")

                    # Add the desired redirect
                    page = Page.objects.get(id=to_id)
                    Redirect.add_redirect(from_url,
                                          redirect_to=page,
                                          is_permanent=True)
                    logger.info(f"Added redirect: {from_url} -> {page.title}")
                    successes += 1

        logger.info(f"Done! Added {successes} redirects")
        if len(dupes) > 0:
            logger.debug(f"Redirects already existed for these urls: {dupes}")
            logger.info(f"Replaced {deletes} redirects with updated ones")