Пример #1
0
def test_get_sync_backend(settings, mocker):
    """ Verify that get_sync_backend() imports the backend based on settings.py """
    settings.CONTENT_SYNC_BACKEND = "custom.backend.Backend"
    import_string_mock = mocker.patch("content_sync.api.import_string")
    website = WebsiteFactory.create()
    api.get_sync_backend(website)
    import_string_mock.assert_any_call("custom.backend.Backend")
    import_string_mock.return_value.assert_any_call(website)
Пример #2
0
def sync_unsynced_websites(
    create_backends: bool = False,
    delete: Optional[bool] = False,
):
    """
    Sync all websites with unsynced content if they have existing repos.
    This should be rarely called, and only in a management command.
    """
    if not settings.CONTENT_SYNC_BACKEND:
        return
    for website_name in (  # pylint:disable=too-many-nested-blocks
            ContentSyncState.objects.exclude(
                Q(current_checksum=F("synced_checksum"),
                  content__deleted__isnull=True)
                & Q(synced_checksum__isnull=False)).values_list(
                    "content__website__name", flat=True).distinct()):
        if website_name:
            log.debug("Syncing website %s to backend", website_name)
            try:
                reset_publishing_fields(website_name)
                backend = api.get_sync_backend(
                    Website.objects.get(name=website_name))
                api.throttle_git_backend_calls(backend)
                if create_backends or backend.backend_exists():
                    backend.create_website_in_backend()
                    backend.sync_all_content_to_backend()
                    if delete:
                        backend.delete_orphaned_content_in_backend()
            except RateLimitExceededException:
                # Too late, can't even check rate limit reset time now so bail
                raise
            except:  # pylint:disable=bare-except
                log.exception("Error syncing website %s", website_name)
Пример #3
0
def create_website_backend(website_name: str):
    """ Create a backend for a website """
    try:
        website = Website.objects.get(name=website_name)
    except Website.DoesNotExist:
        log.debug(
            "Attempted to create backend for Website that doesn't exist: name=%s",
            website_name,
        )
    else:
        backend = api.get_sync_backend(website)
        backend.create_website_in_backend()
Пример #4
0
def sync_website_content(website_name: str):
    """ Commit any unsynced files to the backend for a website """
    try:
        website = Website.objects.get(name=website_name)
    except Website.DoesNotExist:
        log.debug(
            "Attempted to update backend for Website that doesn't exist: name=%s",
            website_name,
        )
    else:
        backend = api.get_sync_backend(website)
        backend.sync_all_content_to_backend()
Пример #5
0
 def handle(self, *args, **options):
     website = fetch_website(options["website"])
     commit = options["commit"] or NotSet
     path = options["path"]
     confirm = ("Y" if (path is not None or commit is NotSet) else input(
         "Are you sure you want to revert all files for this site to the specified commit? Y/N"
     ).upper())
     if confirm != "Y":
         exit(0)
     backend = get_sync_backend(website)
     self.stdout.write(
         f"Syncing content from backend to database for '{website.title}'..."
     )
     backend.sync_all_content_to_db(ref=commit, path=path)
     if commit is not NotSet:
         # Sync back to git
         backend.sync_all_content_to_backend()
     reset_publishing_fields(website.name)
     self.stdout.write(f"Completed syncing from backend to database")
Пример #6
0
 def handle(self, *args, **options):
     website = fetch_website(options["website"])
     backend = get_sync_backend(website)
     should_create = options["force_create"]
     should_delete = options["git_delete"]
     if not should_create:
         should_create = not ContentSyncState.objects.filter(
             content__website=website
         ).exists()
     if should_create:
         self.stdout.write(f"Creating website in backend for '{website.title}'...")
         backend.create_website_in_backend()
     self.stdout.write(
         f"Updating website content in backend for '{website.title}'..."
     )
     backend.sync_all_content_to_backend()
     if should_delete:
         backend.delete_orphaned_content_in_backend()
     reset_publishing_fields(website.name)
Пример #7
0
    def handle(self, *args, **options):
        self.stdout.write("Fixing repos for imported OCW sites")
        start = now_in_utc()
        errors = 0
        websites = (Website.objects.exclude(short_id__endswith="-2").filter(
            source="ocw-import",
            short_id__regex=r".+\-\d{1,2}$").order_by("name"))
        self.stdout.write(f"Repairing repos for {websites.count()} sites")
        for website in websites:
            try:
                with transaction.atomic():
                    short_id_secs = website.short_id.split("-")
                    base_repo, idx = ("-".join(short_id_secs[:-1]),
                                      short_id_secs[-1])
                    website.short_id = f"{base_repo}-2"
                    website.save()
                    ContentSyncState.objects.filter(
                        content__website=website).update(synced_checksum=None,
                                                         data=None)
                    backend = get_sync_backend(website)
                    backend.sync_all_content_to_backend()
                    get_sync_pipeline(website).upsert_pipeline()
                    for i in range(3, int(idx) + 1):
                        try:
                            backend.api.org.get_repo(
                                f"{base_repo}-{i}").delete()
                        except GithubException as ge:
                            if ge.status != 404:
                                raise
            except Exception as exc:  # pylint:disable=broad-except
                self.stderr.write(
                    f"Error occurred repairing repo for {website.name}: {exc}")
                errors += 1

        total_seconds = (now_in_utc() - start).total_seconds()
        if errors == 0:
            self.stdout.write(
                f"Repo repair finished, took {total_seconds} seconds")
        else:
            self.stderr.write(
                f"Repo repair finished with {errors} errors, took {total_seconds} seconds"
            )
Пример #8
0
def upsert_website_pipeline_batch(website_names: List[str],
                                  create_backend=False,
                                  unpause=False):
    """ Create/update publishing pipelines for multiple websites"""
    api_instance = None
    for website_name in website_names:
        website = Website.objects.get(name=website_name)
        if create_backend:
            backend = api.get_sync_backend(website)
            api.throttle_git_backend_calls(backend)
            backend.create_website_in_backend()
            backend.sync_all_content_to_backend()
        pipeline = api.get_sync_pipeline(website, api=api_instance)
        if not api_instance:
            # Keep using the same api instance to minimize multiple authentication calls
            api_instance = pipeline.api
        pipeline.upsert_pipeline()
        if unpause:
            for version in [
                    VERSION_LIVE,
                    VERSION_DRAFT,
            ]:
                pipeline.unpause_pipeline(version)
    return True