Exemple #1
0
def publish_content(db_object, versioned, is_major_update=True):
    """Publish a given content.

    Note: create a manifest.json without the introduction and conclusion if not needed. Also remove the "text" field
    of extracts.

    :param db_object: Database representation of the content
    :type db_object: PublishableContent
    :param versioned: version of the content to publish
    :type versioned: VersionedContent
    :param is_major_update: if set to `True`, will update the publication date
    :type is_major_update: bool
    :raise FailureDuringPublication: if something goes wrong
    :return: the published representation
    :rtype: zds.tutorialv2.models.models_database.PublishedContent
    """

    from zds.tutorialv2.models.models_database import PublishedContent

    if is_major_update:
        versioned.pubdate = datetime.now()

    # First write the files in a temporary directory: if anything goes wrong,
    # the last published version is not impacted !
    tmp_path = os.path.join(settings.ZDS_APP['content']['repo_public_path'],
                            versioned.slug + '__building')

    if os.path.exists(tmp_path):
        shutil.rmtree(tmp_path)  # erase previous attempt, if any

    # render HTML:
    altered_version = copy.deepcopy(versioned)
    publish_container(db_object, tmp_path, altered_version)
    altered_version.dump_json(os.path.join(tmp_path, 'manifest.json'))

    # make room for "extra contents"
    extra_contents_path = os.path.join(
        tmp_path, settings.ZDS_APP['content']['extra_contents_dirname'])
    os.makedirs(extra_contents_path)

    base_name = os.path.join(extra_contents_path, versioned.slug)

    # 1. markdown file (base for the others) :
    # If we come from a command line, we need to activate i18n, to have the date in the french language.
    cur_language = translation.get_language()
    versioned.pubdate = datetime.now()
    try:
        translation.activate(settings.LANGUAGE_CODE)
        parsed = render_to_string('tutorialv2/export/content.md',
                                  {'content': versioned})
    finally:
        translation.activate(cur_language)

    parsed_with_local_images = retrieve_and_update_images_links(
        parsed, directory=extra_contents_path)

    md_file_path = base_name + '.md'
    md_file = codecs.open(md_file_path, 'w', encoding='utf-8')
    try:
        md_file.write(parsed_with_local_images)
    except (UnicodeError, UnicodeEncodeError):
        raise FailureDuringPublication(
            _(u'Une erreur est survenue durant la génération du fichier markdown '
              u'à télécharger, vérifiez le code markdown'))
    md_file.close()

    pandoc_debug_str = ""
    if settings.PANDOC_LOG_STATE:
        pandoc_debug_str = " 2>&1 | tee -a " + settings.PANDOC_LOG

    # 2. HTML
    subprocess.call(settings.PANDOC_LOC + "pandoc -s -S --toc " +
                    md_file_path + " -o " + base_name + ".html" +
                    pandoc_debug_str,
                    shell=True,
                    cwd=extra_contents_path)

    # 3. EPUB
    subprocess.call(settings.PANDOC_LOC + "pandoc -s -S --toc " +
                    md_file_path + " -o " + base_name + ".epub" +
                    pandoc_debug_str,
                    shell=True,
                    cwd=extra_contents_path)

    # 4. PDF
    if ZDS_APP['content']['build_pdf_when_published']:
        subprocess.call(settings.PANDOC_LOC + "pandoc " +
                        settings.PANDOC_PDF_PARAM + " " + md_file_path +
                        " -o " + base_name + ".pdf" + pandoc_debug_str,
                        shell=True,
                        cwd=extra_contents_path)

    # ok, now we can really publish the thing !
    is_update = False

    if db_object.public_version:
        public_version = db_object.public_version
        is_update = True

        # the content have been published in the past, so clean old files !
        old_path = public_version.get_prod_path()
        shutil.rmtree(old_path)

        # if the slug change, instead of using the same object, a new one will be created
        if versioned.slug != public_version.content_public_slug:
            public_version.must_redirect = True  # set redirection
            publication_date = public_version.publication_date
            public_version.save()
            db_object.public_version = PublishedContent()
            public_version = db_object.public_version

            # if content have already been published, keep publication date !
            public_version.publication_date = publication_date

    else:
        public_version = PublishedContent()

    # make the new public version
    public_version.content_public_slug = versioned.slug
    public_version.content_type = versioned.type
    public_version.content_pk = db_object.pk
    public_version.content = db_object
    public_version.must_reindex = True
    public_version.save()

    # move the stuffs into the good position
    shutil.move(tmp_path, public_version.get_prod_path())

    # save public version
    if is_major_update or not is_update:
        public_version.publication_date = datetime.now()
    elif is_update:
        public_version.update_date = datetime.now()

    public_version.sha_public = versioned.current_version
    public_version.save()
    try:
        make_zip_file(public_version)
    except IOError:
        pass

    return public_version
Exemple #2
0
def publish_content(db_object, versioned, is_major_update=True):
    """
    Publish a given content.

    .. note::
        create a manifest.json without the introduction and conclusion if not needed. Also remove the 'text' field
        of extracts.

    :param db_object: Database representation of the content
    :type db_object: PublishableContent
    :param versioned: version of the content to publish
    :type versioned: VersionedContent
    :param is_major_update: if set to `True`, will update the publication date
    :type is_major_update: bool
    :raise FailureDuringPublication: if something goes wrong
    :return: the published representation
    :rtype: zds.tutorialv2.models.models_database.PublishedContent
    """

    from zds.tutorialv2.models.models_database import PublishedContent

    if is_major_update:
        versioned.pubdate = datetime.now()

    # First write the files in a temporary directory: if anything goes wrong,
    # the last published version is not impacted !
    tmp_path = os.path.join(settings.ZDS_APP['content']['repo_public_path'], versioned.slug + '__building')

    if os.path.exists(tmp_path):
        shutil.rmtree(tmp_path)  # erase previous attempt, if any

    # render HTML:
    altered_version = copy.deepcopy(versioned)
    publish_container(db_object, tmp_path, altered_version)
    altered_version.dump_json(os.path.join(tmp_path, 'manifest.json'))

    # make room for 'extra contents'
    extra_contents_path = os.path.join(tmp_path, settings.ZDS_APP['content']['extra_contents_dirname'])
    os.makedirs(extra_contents_path)

    base_name = os.path.join(extra_contents_path, versioned.slug)

    # 1. markdown file (base for the others) :
    # If we come from a command line, we need to activate i18n, to have the date in the french language.
    cur_language = translation.get_language()
    versioned.pubdate = datetime.now()
    try:
        translation.activate(settings.LANGUAGE_CODE)
        parsed = render_to_string('tutorialv2/export/content.md', {'content': versioned})
    finally:
        translation.activate(cur_language)

    parsed_with_local_images = retrieve_and_update_images_links(parsed, directory=extra_contents_path)

    md_file_path = base_name + '.md'
    md_file = codecs.open(md_file_path, 'w', encoding='utf-8')
    try:
        md_file.write(parsed_with_local_images)
    except (UnicodeError, UnicodeEncodeError):
        raise FailureDuringPublication(_('Une erreur est survenue durant la génération du fichier markdown '
                                         'à télécharger, vérifiez le code markdown'))
    finally:
        md_file.close()

    pandoc_debug_str = ''
    if settings.PANDOC_LOG_STATE:
        pandoc_debug_str = ' 2>&1 | tee -a ' + settings.PANDOC_LOG
    if settings.ZDS_APP['content']['extra_content_generation_policy'] == 'SYNC':
        # ok, now we can really publish the thing !
        generate_exernal_content(base_name, extra_contents_path, md_file_path, pandoc_debug_str)
    elif settings.ZDS_APP['content']['extra_content_generation_policy'] == 'WATCHDOG':
        PublicatorRegistery.get('watchdog').publish(md_file_path, base_name, silently_pass=False)

    is_update = False

    if db_object.public_version:
        public_version = db_object.public_version
        is_update = True

        # the content have been published in the past, so clean old files !
        old_path = public_version.get_prod_path()
        shutil.rmtree(old_path)

        # if the slug change, instead of using the same object, a new one will be created
        if versioned.slug != public_version.content_public_slug:
            public_version.must_redirect = True  # set redirection
            publication_date = public_version.publication_date
            public_version.save()
            db_object.public_version = PublishedContent()
            public_version = db_object.public_version

            # if content have already been published, keep publication date !
            public_version.publication_date = publication_date

    else:
        public_version = PublishedContent()

    # make the new public version
    public_version.content_public_slug = versioned.slug
    public_version.content_type = versioned.type
    public_version.content_pk = db_object.pk
    public_version.content = db_object
    public_version.must_reindex = True
    public_version.save()
    public_version.char_count = public_version.get_char_count(md_file_path)

    for author in db_object.authors.all():
        public_version.authors.add(author)
    public_version.save()
    # move the stuffs into the good position
    if settings.ZDS_APP['content']['extra_content_generation_policy'] != 'WATCHDOG':
        shutil.move(tmp_path, public_version.get_prod_path())
    else:  # if we use watchdog, we use copy to get md and zip file in prod but everything else will be handled by
        # watchdog
        shutil.copytree(tmp_path, public_version.get_prod_path())
    # save public version
    if is_major_update or not is_update:
        public_version.publication_date = datetime.now()
    elif is_update:
        public_version.update_date = datetime.now()

    public_version.sha_public = versioned.current_version
    public_version.save()
    try:
        make_zip_file(public_version)
    except OSError:
        pass

    return public_version
Exemple #3
0
def migrate_articles():
    articles = Article.objects.all()

    if len(articles) == 0:
        return
    for i in progressbar(xrange(len(articles)), "Exporting articles", 100):
        current = articles[i]
        if not os.path.exists(current.get_path(False)):
            sys.stderr.write(
                'Invalid physical path to repository « {} », skipping\n'.
                format(current.get_path(False)))
            continue

        exported = PublishableContent()
        exported.slug = current.slug
        exported.type = "ARTICLE"
        exported.title = current.title
        exported.creation_date = current.create_at
        exported.description = current.description
        exported.sha_draft = current.sha_draft
        exported.sha_validation = current.sha_validation
        exported.licence = current.licence
        exported.js_support = current.js_support
        exported.pubdate = current.pubdate
        exported.save(
        )  # before updating `ManyToMany` relation, we need to save !

        try:
            clean_commit = copy_and_clean_repo(current.get_path(False),
                                               exported.get_repo_path(False))
        except InvalidGitRepositoryError as e:
            exported.delete()
            sys.stderr.write(
                'Repository in « {} » is invalid, skipping\n'.format(e))
            continue

        if clean_commit:
            exported.sha_draft = clean_commit

            # save clean up in old module to avoid any trouble
            current.sha_draft = clean_commit
            current.save()

        [exported.authors.add(author) for author in current.authors.all()]
        [
            exported.subcategory.add(category)
            for category in current.subcategory.all()
        ]
        new_gallery = create_gallery_for_article(exported)

        if current.image:
            # migrate image using `Image()`
            try:
                path_to_image = current.image['article_illu'].url
            except InvalidImageFormatError:
                pass
            else:
                img = Image()

                # Create a new name for our image
                filename = os.path.basename(current.image['article_illu'].url)

                # Find original name
                split = filename.split('.')
                original_filename = split[0] + '.' + split[1]

                if "None" in path_to_image:

                    # Move image in the gallery folder
                    shutil.copyfile(
                        os.path.join(MEDIA_ROOT, 'articles', 'None',
                                     original_filename),
                        os.path.join(new_gallery.get_gallery_path(),
                                     original_filename))

                    # Update image information
                    img.physical = os.path.join('galleries',
                                                str(new_gallery.pk),
                                                original_filename)
                else:
                    # Move image in the gallery folder
                    shutil.copyfile(
                        os.path.join(MEDIA_ROOT, 'articles', str(current.id),
                                     original_filename),
                        os.path.join(new_gallery.get_gallery_path(),
                                     original_filename))

                    # Update image information
                    img.physical = os.path.join('galleries',
                                                str(new_gallery.pk),
                                                original_filename)

                img.title = 'icone de l\'article'
                img.slug = slugify(filename)
                img.pubdate = datetime.now()
                img.gallery = new_gallery
                img.save()
                exported.image = img

        # now, re create the manifest.json
        versioned = exported.load_version()
        versioned.type = "ARTICLE"

        if exported.licence:
            versioned.licence = exported.licence

        split_article_in_extracts(versioned)  # create extracts from text
        exported.sha_draft = versioned.commit_changes(u'Migration version 2')
        exported.old_pk = current.pk
        exported.save()

        reacts = Reaction.objects.filter(article__pk=current.pk)\
                                 .select_related("author")\
                                 .order_by("pubdate")\
                                 .all()
        if current.last_reaction:
            export_comments(reacts, exported, ArticleRead,
                            current.last_reaction.pk)
        migrate_validation(
            exported, ArticleValidation.objects.filter(article__pk=current.pk))

        if current.sha_public is not None and current.sha_public != "":
            # set mapping
            map_previous = PublishedContent()
            map_previous.content_public_slug = current.slug
            map_previous.content_pk = current.pk
            map_previous.content_type = 'ARTICLE'
            map_previous.must_redirect = True  # will send HTTP 301 if visited !
            map_previous.content = exported
            map_previous.save()

            # publish the article !
            published = publish_content(
                exported, exported.load_version(exported.sha_draft), False)
            exported.pubdate = current.pubdate
            exported.update_date = current.update
            exported.sha_public = exported.sha_draft
            exported.public_version = published
            exported.save()
            published.content_public_slug = exported.slug
            published.publication_date = exported.pubdate
            published.save()
            # as we changed the structure we have to update the validation history. Yes, it's ugly.
            last_validation = Validation.objects.filter(
                content__pk=exported.pk).last()
            structure_validation = Validation(
                content=exported,
                version=exported.sha_public,
                comment_authors="Migration v2",
                comment_validator="yeah",
                status="ACCEPT",
                validator=last_validation.validator,
                date_proposition=datetime.now(),
                date_validation=datetime.now(),
                date_reserve=datetime.now())
            structure_validation.save()
        # fix strange notification bug
        authors = list(exported.authors.all())
        reads_to_delete = ContentRead.objects\
                                     .filter(content=exported)\
                                     .exclude(user__pk__in=ContentReaction.objects
                                                                          .filter(related_content=exported)
                                                                          .exclude(author__in=authors)
                                                                          .values_list("author__pk", flat=True))
        for read in reads_to_delete.all():
            read.delete()
Exemple #4
0
def migrate_tuto(tutos, title="Exporting mini tuto"):

    if len(tutos) == 0:
        return

    for i in progressbar(xrange(len(tutos)), title, 100):
        current = tutos[i]
        if not os.path.exists(current.get_path(False)):
            sys.stderr.write(
                'Invalid physical path to repository « {} », skipping\n'.
                format(current.get_path(False)))
            continue
        exported = PublishableContent()
        exported.slug = current.slug
        exported.type = "TUTORIAL"
        exported.title = current.title
        exported.sha_draft = current.sha_draft
        exported.sha_beta = current.sha_beta
        exported.sha_validation = current.sha_validation
        exported.licence = current.licence
        exported.update_date = current.update
        exported.creation_date = current.create_at
        exported.description = current.description
        exported.js_support = current.js_support
        exported.source = current.source
        exported.pubdate = current.pubdate
        exported.save()

        try:
            clean_commit = copy_and_clean_repo(current.get_path(False),
                                               exported.get_repo_path(False))
        except InvalidGitRepositoryError as e:
            exported.delete()
            sys.stderr.write(
                'Repository in « {} » is invalid, skipping\n'.format(e))
            continue

        if clean_commit:
            exported.sha_draft = clean_commit

            # save clean up in old module to avoid any trouble
            current.sha_draft = clean_commit
            current.save()

        exported.gallery = current.gallery
        exported.image = current.image
        [
            exported.subcategory.add(category)
            for category in current.subcategory.all()
        ]
        [exported.helps.add(help) for help in current.helps.all()]
        [exported.authors.add(author) for author in current.authors.all()]
        exported.save()

        # now, re create the manifest.json
        versioned = exported.load_version()

        # this loop is there because of old .tuto import that failed with their chapter intros
        for container in versioned.traverse(True):
            if container.parent is None:
                continue
            # in old .tuto file chapter intro are represented as chapter_slug/introduction.md
            # instead of part_slug/chapter_slug/introduction.md
            corrected_intro_path = file_join(
                container.get_path(relative=False), "introduction.md")
            corrected_ccl_path = file_join(container.get_path(relative=False),
                                           "conclusion.md")
            if container.get_path(True) not in container.introduction:
                if file_exists(corrected_intro_path):
                    container.introduction = file_join(
                        container.get_path(relative=True), "introduction.md")
                else:
                    container.introduction = None
            if container.get_path(True) not in container.conclusion:
                if file_exists(corrected_ccl_path):
                    container.conclusion = file_join(
                        container.get_path(relative=True), "conclusion.md")
                else:
                    container.conclusion = None

        versioned.licence = exported.licence
        versioned.type = "TUTORIAL"
        versioned.dump_json()
        versioned.repository.index.add(['manifest.json'
                                        ])  # index new manifest before commit
        exported.sha_draft = versioned.commit_changes(u"Migration version 2")

        exported.old_pk = current.pk
        exported.save()
        # export beta forum post
        former_topic = Topic.objects.filter(key=current.pk).first()
        if former_topic is not None:
            former_topic.related_publishable_content = exported

            former_topic.save()
            former_first_post = former_topic.first_post()
            text = former_first_post.text
            text = text.replace(current.get_absolute_url_beta(),
                                exported.get_absolute_url_beta())
            former_first_post.update_content(text)
            former_first_post.save()
            exported.beta_topic = former_topic
            exported.save()
        # extract notes
        reacts = Note.objects.filter(tutorial__pk=current.pk)\
                             .select_related("author")\
                             .order_by("pubdate")\
                             .all()
        migrate_validation(
            exported,
            TutorialValidation.objects.filter(tutorial__pk=current.pk))
        if current.last_note:
            export_comments(reacts, exported, TutorialRead,
                            current.last_note.pk)
        if current.sha_public is not None and current.sha_public != "":
            published = publish_content(
                exported, exported.load_version(current.sha_public), False)
            exported.pubdate = current.pubdate
            exported.sha_public = current.sha_public
            exported.public_version = published
            exported.save()
            published.content_public_slug = exported.slug
            published.publication_date = current.pubdate

            published.save()
            # set mapping
            map_previous = PublishedContent()
            map_previous.content_public_slug = current.slug
            map_previous.content_pk = current.pk
            map_previous.content_type = 'TUTORIAL'
            map_previous.must_redirect = True  # will send HTTP 301 if visited !
            map_previous.content = exported
            map_previous.save()
        # fix strange notification bug
        authors = list(exported.authors.all())
        reads_to_delete = ContentRead.objects\
                                     .filter(content=exported)\
                                     .exclude(user__pk__in=ContentReaction.objects
                                                                          .filter(related_content=exported)
                                                                          .exclude(author__in=authors)
                                                                          .values_list("author__pk", flat=True))
        for read in reads_to_delete.all():
            read.delete()
Exemple #5
0
    def test_special_case_of_contents(self):
        """test that the old publishedcontent does not stay when a new one is created"""

        if not self.manager.connected_to_es:
            return

        # 1. Create a middle-tutorial, publish it, then index it
        tuto = PublishableContentFactory(type='TUTORIAL')
        tuto.authors.add(self.user)
        tuto.save()

        tuto_draft = tuto.load_version()
        chapter1 = ContainerFactory(parent=tuto_draft, db_object=tuto)
        ExtractFactory(container=chapter1, db_object=tuto)
        published = publish_content(tuto, tuto_draft, is_major_update=True)

        tuto.sha_public = tuto_draft.current_version
        tuto.sha_draft = tuto_draft.current_version
        tuto.public_version = published
        tuto.save()

        self.manager.es_bulk_indexing_of_model(PublishedContent,
                                               force_reindexing=True)  # index
        self.manager.refresh_index()

        first_publication = PublishedContent.objects.get(content_pk=tuto.pk)
        self.assertTrue(first_publication.es_already_indexed)
        self.assertFalse(first_publication.es_flagged)

        s = Search()
        s.query(MatchAll())
        results = self.manager.setup_search(s).execute()
        self.assertEqual(
            len(results),
            2)  # get 2 results, one for the content and one for the chapter

        self.assertEqual(PublishedContent.objects.count(), 1)

        # 2. Change thet title, which will trigger a change in the slug
        tuto = PublishableContent.objects.get(pk=tuto.pk)
        versioned = tuto.load_version(sha=tuto.sha_draft)

        tuto.title = u'un titre complètement différent!'
        tuto.save()

        versioned.repo_update_top_container(tuto.title, tuto.slug, u'osef',
                                            u'osef')
        second_publication = publish_content(tuto, versioned, True)

        tuto.sha_public = versioned.current_version
        tuto.sha_draft = versioned.current_version
        tuto.public_version = second_publication
        tuto.save()

        self.assertEqual(PublishedContent.objects.count(),
                         2)  # now there is two objects ...
        first_publication = PublishedContent.objects.get(
            pk=first_publication.pk)
        self.assertTrue(first_publication.must_redirect
                        )  # .. including the first one, for redirection

        self.manager.refresh_index()

        s = Search()
        s.query(MatchAll())
        results = self.manager.setup_search(s).execute()
        self.assertEqual(
            len(results), 0
        )  # the old one is gone (and we need to reindex to get the new one)

        # 3. Check if indexation brings the new one, and not the old one
        self.manager.es_bulk_indexing_of_model(PublishedContent,
                                               force_reindexing=True)  # index
        self.manager.refresh_index()

        first_publication = PublishedContent.objects.get(
            pk=first_publication.pk)
        second_publication = PublishedContent.objects.get(
            pk=second_publication.pk)

        s = Search()
        s.query(MatchAll())
        results = self.manager.setup_search(s).execute()
        self.assertEqual(len(results), 2)  # Still 2, not 4 !

        found_old = False
        found_new = False

        for hit in results:
            if hit.meta.doc_type == PublishedContent.get_es_document_type():
                if hit.meta.id == first_publication.es_id:
                    found_old = True
                if hit.meta.id == second_publication.es_id:
                    found_new = True

        self.assertTrue(found_new)
        self.assertFalse(found_old)