Esempio n. 1
0
def enqueue_item(published_item):
    """
    Creates the corresponding entries in the publish queue for the given item
    """
    published_item_id = ObjectId(published_item[config.ID_FIELD])
    published_service = get_resource_service(PUBLISHED)
    archive_service = get_resource_service(ARCHIVE)
    published_update = {QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS, 'last_queue_event': utcnow()}
    try:
        logger.info('Queueing item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))
        if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            # if scheduled then change the state to published
            logger.info('Publishing scheduled item_id: {}'.format(published_item_id))
            published_update[ITEM_STATE] = CONTENT_STATE.PUBLISHED
            archive_service.patch(published_item['item_id'], {ITEM_STATE: CONTENT_STATE.PUBLISHED})
            import_into_legal_archive.apply_async(countdown=3, kwargs={'item_id': published_item['item_id']})

        published_service.patch(published_item_id, published_update)
        get_enqueue_service(published_item[ITEM_OPERATION]).enqueue_item(published_item)
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.QUEUED})
        logger.info('Queued item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))
    except KeyError:
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.PENDING})
        logger.exception('No enqueue service found for operation %s', published_item[ITEM_OPERATION])
    except:
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.PENDING})
        raise
Esempio n. 2
0
    def delete(self, lookup):
        """
        Overriding to handle with Kill workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. For each article being killed do the following:
                i.   Apply the Kill Template and create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param lookup: query to find the article in archived repo
        :type lookup: dict
        """

        if app.testing and len(lookup) == 0:
            super().delete(lookup)
            return

        # Step 1
        articles_to_kill = self._find_articles_to_kill(lookup)
        articles_to_kill.sort(key=itemgetter(ITEM_TYPE), reverse=True)  # Needed because package has to be inserted last
        kill_service = KillPublishService()

        for article in articles_to_kill:
            # Step 2(i)
            to_apply_template = {'template_name': 'kill', 'item': article}
            get_resource_service('content_templates_apply').post([to_apply_template])
            article = to_apply_template['item']
            self._remove_and_set_kill_properties(article, articles_to_kill)

            # Step 2(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(req=None,
                                                                   lookup={'item_id': article[config.ID_FIELD]}))

            if transmission_details:
                subscriber_ids = [t['_subscriber_id'] for t in transmission_details]
                query = {'$and': [{config.ID_FIELD: {'$in': subscriber_ids}}]}
                subscribers = list(get_resource_service('subscribers').get(req=None, lookup=query))

                kill_service.queue_transmission(article, subscribers)

            # Step 2(iii)
            import_into_legal_archive.apply_async(kwargs={'doc': article})

            # Step 2(iv)
            super().delete({'item_id': article[config.ID_FIELD]})

            # Step 2(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            get_resource_service('published').post(docs)

            # Step 2(v)
            kill_service.broadcast_kill_email(article)
Esempio n. 3
0
def enqueue_item(published_item):
    """
    Creates the corresponding entries in the publish queue for the given item
    """
    published_item_id = ObjectId(published_item[config.ID_FIELD])
    published_service = get_resource_service(PUBLISHED)
    archive_service = get_resource_service(ARCHIVE)
    published_update = {QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS, 'last_queue_event': utcnow()}
    try:
        logger.info('Queueing item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))

        published_item = published_service.find_one(req=None, _id=published_item_id)
        if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
            logger.info('Queue State is not pending for published item {}. It is in {}'.
                        format(published_item_id, published_item.get(QUEUE_STATE)))
            return

        if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            # if scheduled then change the state to published
            # change the `version` and `versioncreated` for the item
            # in archive collection and published collection.
            versioncreated = utcnow()
            item_updates = {'versioncreated': versioncreated, ITEM_STATE: CONTENT_STATE.PUBLISHED}
            resolve_document_version(document=item_updates, resource=ARCHIVE,
                                     method='PATCH',
                                     latest_doc={config.VERSION: published_item[config.VERSION]})

            # update the archive collection
            archive_item = archive_service.find_one(req=None, _id=published_item['item_id'])
            archive_service.system_update(published_item['item_id'], item_updates, archive_item)
            # insert into version.
            insert_into_versions(published_item['item_id'], doc=None)
            # import to legal archive
            import_into_legal_archive.apply_async(countdown=3, kwargs={'item_id': published_item['item_id']})
            logger.info('Modified the version of scheduled item: {}'.format(published_item_id))

            logger.info('Publishing scheduled item_id: {}'.format(published_item_id))
            # update the published collection
            published_update.update(item_updates)
            published_item.update({'versioncreated': versioncreated,
                                   ITEM_STATE: CONTENT_STATE.PUBLISHED,
                                   config.VERSION: item_updates[config.VERSION]})

        published_service.patch(published_item_id, published_update)
        queued = get_enqueue_service(published_item[ITEM_OPERATION]).enqueue_item(published_item)
        # if the item is queued in the publish_queue then the state is "queued"
        # else the queue state is "queued_not_transmitted"
        queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
        published_service.patch(published_item_id, {QUEUE_STATE: queue_state})
        logger.info('Queued item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))
    except KeyError:
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.PENDING})
        logger.exception('No enqueue service found for operation %s', published_item[ITEM_OPERATION])
    except:
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.PENDING})
        raise
Esempio n. 4
0
    def _import_into_legal_archive(self, doc):
        """Import into legal archive async

        :param {dict} doc: document to be imported
        """

        if doc.get(ITEM_STATE) != CONTENT_STATE.SCHEDULED:
            kwargs = {"item_id": doc.get(config.ID_FIELD)}
            # countdown=3 is for elasticsearch to be refreshed with archive and published changes
            import_into_legal_archive.apply_async(countdown=3, kwargs=kwargs)  # @UndefinedVariable
Esempio n. 5
0
    def _import_into_legal_archive(self, doc):
        """
        Import into legal archive async
        :param {dict} doc: document to be imported
        """

        if doc.get(ITEM_STATE) != CONTENT_STATE.SCHEDULED:
            kwargs = {
                'doc': doc
            }
            import_into_legal_archive.apply_async(kwargs=kwargs)
Esempio n. 6
0
    def _import_into_legal_archive(self, doc):
        """
        Import into legal archive async
        :param {dict} doc: document to be imported
        """

        if doc.get(ITEM_STATE) != CONTENT_STATE.SCHEDULED:
            kwargs = {
                'item_id': doc.get(config.ID_FIELD)
            }

            # countdown=3 is for elasticsearch to be refreshed with archive and published changes
            import_into_legal_archive.apply_async(countdown=3, kwargs=kwargs)  # @UndefinedVariable
Esempio n. 7
0
def enqueue_item(published_item):
    """
    Creates the corresponding entries in the publish queue for the given item
    """
    published_item_id = ObjectId(published_item[config.ID_FIELD])
    published_service = get_resource_service(PUBLISHED)
    archive_service = get_resource_service(ARCHIVE)
    published_update = {
        QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS,
        'last_queue_event': utcnow()
    }
    try:
        logger.info('Queueing item with id: {} and item_id: {}'.format(
            published_item_id, published_item['item_id']))
        if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            # if scheduled then change the state to published
            logger.info(
                'Publishing scheduled item_id: {}'.format(published_item_id))
            published_update[ITEM_STATE] = CONTENT_STATE.PUBLISHED
            archive_service.patch(published_item['item_id'],
                                  {ITEM_STATE: CONTENT_STATE.PUBLISHED})
            import_into_legal_archive.apply_async(
                countdown=3, kwargs={'item_id': published_item['item_id']})

        published_service.patch(published_item_id, published_update)
        get_enqueue_service(
            published_item[ITEM_OPERATION]).enqueue_item(published_item)
        published_service.patch(published_item_id,
                                {QUEUE_STATE: PUBLISH_STATE.QUEUED})
        logger.info('Queued item with id: {} and item_id: {}'.format(
            published_item_id, published_item['item_id']))
    except KeyError:
        published_service.patch(published_item_id,
                                {QUEUE_STATE: PUBLISH_STATE.PENDING})
        logger.exception('No enqueue service found for operation %s',
                         published_item[ITEM_OPERATION])
    except:
        published_service.patch(published_item_id,
                                {QUEUE_STATE: PUBLISH_STATE.PENDING})
        raise
Esempio n. 8
0
    def update(self, id, updates, original):
        """Runs on update of archive item.

        Overriding to handle with Kill/Takedown workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. If the item is flagged as archived only then it was never created by or published from the system so all
                that needs to be done is to delete it and send an email to all subscribers
            3. For each article being killed do the following:
                i.   Create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param id: primary key of the item to be killed
        :type id: str
        :param updates: updates to be applied on the article before saving
        :type updates: dict
        :param original:
        :type original: dict
        """

        # Step 1
        articles_to_kill = self.find_articles_to_kill({"_id": id})
        logger.info("Fetched articles to kill for id: {}".format(id))
        articles_to_kill.sort(
            key=itemgetter(ITEM_TYPE),
            reverse=True)  # Needed because package has to be inserted last
        kill_service = KillPublishService() if updates.get(
            ITEM_OPERATION) == ITEM_KILL else TakeDownPublishService()

        updated = original.copy()

        for article in articles_to_kill:
            updates_copy = deepcopy(updates)
            kill_service.apply_kill_override(article, updates_copy)
            updated.update(updates_copy)
            # Step 2, If it is flagged as archived only it has no related items in the system so can be deleted.
            # An email is sent to all subscribers
            if original.get("flags", {}).get("marked_archived_only", False):
                super().delete({"item_id": article["item_id"]})
                logger.info("Delete for article: {}".format(
                    article[config.ID_FIELD]))
                kill_service.broadcast_kill_email(article, updates_copy)
                logger.info("Broadcast kill email for article: {}".format(
                    article[config.ID_FIELD]))
                continue

            # Step 3(i)
            self._remove_and_set_kill_properties(article, articles_to_kill,
                                                 updated)
            logger.info(
                "Removing and setting properties for article: {}".format(
                    article[config.ID_FIELD]))

            # Step 3(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(
                    req=None, lookup={"item_id": article["item_id"]}))

            if transmission_details:
                get_enqueue_service(updates.get(
                    ITEM_OPERATION, ITEM_KILL)).enqueue_archived_kill_item(
                        article, transmission_details)

            article[config.ID_FIELD] = article.pop("item_id",
                                                   article["item_id"])

            # Step 3(iv)
            super().delete({"item_id": article[config.ID_FIELD]})
            logger.info("Delete for article: {}".format(
                article[config.ID_FIELD]))

            # Step 3(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            published_doc = deepcopy(article)
            published_doc[QUEUE_STATE] = PUBLISH_STATE.QUEUED
            get_resource_service("published").post([published_doc])
            logger.info(
                "Insert into archive and published for article: {}".format(
                    article[config.ID_FIELD]))

            # Step 3(iii)
            import_into_legal_archive.apply_async(
                countdown=3, kwargs={"item_id": article[config.ID_FIELD]})
            logger.info("Legal Archive import for article: {}".format(
                article[config.ID_FIELD]))

            # Step 3(v)
            kill_service.broadcast_kill_email(article, updates_copy)
            logger.info("Broadcast kill email for article: {}".format(
                article[config.ID_FIELD]))
Esempio n. 9
0
    def enqueue_item(self, published_item):
        """
        Creates the corresponding entries in the publish queue for the given item
        """
        published_item_id = ObjectId(published_item[config.ID_FIELD])
        published_service = get_resource_service(PUBLISHED)
        archive_service = get_resource_service(ARCHIVE)
        published_update = {
            QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS,
            'last_queue_event': utcnow()
        }
        try:
            logger.info('Queueing item with id: {} and item_id: {}'.format(
                published_item_id, published_item['item_id']))

            published_item = published_service.find_one(req=None,
                                                        _id=published_item_id)
            if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
                logger.info(
                    'Queue State is not pending for published item {}. It is in {}'
                    .format(published_item_id,
                            published_item.get(QUEUE_STATE)))
                return

            if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
                # if scheduled then change the state to published
                # change the `version` and `versioncreated` for the item
                # in archive collection and published collection.
                versioncreated = utcnow()
                item_updates = {
                    'versioncreated': versioncreated,
                    ITEM_STATE: CONTENT_STATE.PUBLISHED
                }
                resolve_document_version(document=item_updates,
                                         resource=ARCHIVE,
                                         method='PATCH',
                                         latest_doc={
                                             config.VERSION:
                                             published_item[config.VERSION]
                                         })

                # update the archive collection
                archive_item = archive_service.find_one(
                    req=None, _id=published_item['item_id'])
                archive_service.system_update(published_item['item_id'],
                                              item_updates, archive_item)
                # insert into version.
                insert_into_versions(published_item['item_id'], doc=None)
                # update archive history
                app.on_archive_item_updated(item_updates, archive_item,
                                            ITEM_PUBLISH)
                # import to legal archive
                import_into_legal_archive.apply_async(
                    countdown=3, kwargs={'item_id': published_item['item_id']})
                logger.info(
                    'Modified the version of scheduled item: {}'.format(
                        published_item_id))

                logger.info('Publishing scheduled item_id: {}'.format(
                    published_item_id))
                # update the published collection
                published_update.update(item_updates)
                published_item.update({
                    'versioncreated':
                    versioncreated,
                    ITEM_STATE:
                    CONTENT_STATE.PUBLISHED,
                    config.VERSION:
                    item_updates[config.VERSION]
                })
                # send a notification to the clients
                push_content_notification([{
                    '_id':
                    str(published_item['item_id']),
                    'task':
                    published_item.get('task', None)
                }])
                #  apply internal destinations
                signals.item_published.send(self,
                                            item=archive_service.find_one(
                                                req=None,
                                                _id=published_item['item_id']))

            published_service.patch(published_item_id, published_update)
            # queue the item for publishing
            try:
                queued = get_enqueue_service(
                    published_item[ITEM_OPERATION]).enqueue_item(
                        published_item, None)
            except KeyError as key_error:
                error_updates = {
                    QUEUE_STATE: PUBLISH_STATE.ERROR,
                    ERROR_MESSAGE: str(key_error)
                }
                published_service.patch(published_item_id, error_updates)
                logger.exception('No enqueue service found for operation %s',
                                 published_item[ITEM_OPERATION])
                raise

            # if the item is queued in the publish_queue then the state is "queued"
            # else the queue state is "queued_not_transmitted"
            queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
            published_service.patch(published_item_id,
                                    {QUEUE_STATE: queue_state})
            logger.info('Queued item with id: {} and item_id: {}'.format(
                published_item_id, published_item['item_id']))
        except ConnectionTimeout as error:  # recoverable, set state to pending and retry next time
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.PENDING,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
        except SoftTimeLimitExceeded as error:
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.PENDING,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
        except Exception as error:
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.ERROR,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
Esempio n. 10
0
    def update(self, id, updates, original):
        """
        Overriding to handle with Kill workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. If the item is flagged as archived only then it was never created by or published from the system so all
                that needs to be done is to delete it and send an email to all subscribers
            3. For each article being killed do the following:
                i.   Create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param id: primary key of the item to be killed
        :type id: str
        :param updates: updates to be applied on the article before saving
        :type updates: dict
        :param original:
        :type original: dict
        """

        # Step 1
        articles_to_kill = self._find_articles_to_kill({'_id': id})
        logger.info('Fetched articles to kill for id: {}'.format(id))
        articles_to_kill.sort(
            key=itemgetter(ITEM_TYPE),
            reverse=True)  # Needed because package has to be inserted last

        updated = original.copy()
        updated.update(updates)

        for article in articles_to_kill:

            # Step 2, If it is flagged as archived only it has no related items in the system so can be deleted.
            # An email is sent to all subscribers
            if original.get('flags', {}).get('marked_archived_only', False):
                super().delete({'item_id': article['item_id']})
                logger.info('Delete for article: {}'.format(
                    article[config.ID_FIELD]))

                KillPublishService().broadcast_kill_email(article)
                logger.info('Broadcast kill email for article: {}'.format(
                    article[config.ID_FIELD]))
                continue

            # Step 3(i)
            self._remove_and_set_kill_properties(article, articles_to_kill,
                                                 updated)
            logger.info(
                'Removing and setting properties for article: {}'.format(
                    article[config.ID_FIELD]))

            # Step 3(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(
                    req=None, lookup={'item_id': article['item_id']}))

            if transmission_details:
                subscriber_ids = [
                    t['_subscriber_id'] for t in transmission_details
                ]
                query = {'$and': [{config.ID_FIELD: {'$in': subscriber_ids}}]}
                subscribers = list(
                    get_resource_service('subscribers').get(req=None,
                                                            lookup=query))

                EnqueueKilledService().queue_transmission(article, subscribers)
                logger.info('Queued Transmission for article: {}'.format(
                    article[config.ID_FIELD]))

            article[config.ID_FIELD] = article.pop('item_id',
                                                   article['item_id'])

            # Step 3(iv)
            super().delete({'item_id': article[config.ID_FIELD]})
            logger.info('Delete for article: {}'.format(
                article[config.ID_FIELD]))

            # Step 3(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            published_doc = deepcopy(article)
            published_doc[QUEUE_STATE] = PUBLISH_STATE.QUEUED
            get_resource_service('published').post([published_doc])
            logger.info(
                'Insert into archive and published for article: {}'.format(
                    article[config.ID_FIELD]))

            # Step 3(iii)
            import_into_legal_archive.apply_async(
                countdown=3, kwargs={'item_id': article[config.ID_FIELD]})
            logger.info('Legal Archive import for article: {}'.format(
                article[config.ID_FIELD]))

            # Step 3(v)
            KillPublishService().broadcast_kill_email(article)
            logger.info('Broadcast kill email for article: {}'.format(
                article[config.ID_FIELD]))
Esempio n. 11
0
    def update(self, id, updates, original):
        """
        Overriding to handle with Kill workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. If the item is flagged as archived only then it was never created by or published from the system so all
                that needs to be done is to delete it and send an email to all subscribers
            3. For each article being killed do the following:
                i.   Create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param id: primary key of the item to be killed
        :type id: str
        :param updates: updates to be applied on the article before saving
        :type updates: dict
        :param original:
        :type original: dict
        """

        # Step 1
        articles_to_kill = self._find_articles_to_kill({'_id': id})
        logger.info('Fetched articles to kill for id: {}'.format(id))
        articles_to_kill.sort(key=itemgetter(ITEM_TYPE), reverse=True)  # Needed because package has to be inserted last
        kill_service = KillPublishService()

        updated = original.copy()
        updated.update(updates)

        for article in articles_to_kill:

            # Step 2, If it is flagged as archived only it has no related items in the system so can be deleted.
            # An email is sent to all subscribers
            if original.get('flags', {}).get('marked_archived_only', False):
                super().delete({'item_id': article['item_id']})
                logger.info('Delete for article: {}'.format(article[config.ID_FIELD]))

                kill_service.broadcast_kill_email(article)
                logger.info('Broadcast kill email for article: {}'.format(article[config.ID_FIELD]))
                continue

            # Step 3(i)
            self._remove_and_set_kill_properties(article, articles_to_kill, updated)
            kill_service.apply_kill_override(article, article)
            logger.info('Removing and setting properties for article: {}'.format(article[config.ID_FIELD]))

            # Step 3(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(req=None,
                                                                   lookup={'item_id': article['item_id']}))

            if transmission_details:
                subscriber_ids = [t['_subscriber_id'] for t in transmission_details]
                query = {'$and': [{config.ID_FIELD: {'$in': subscriber_ids}}]}
                subscribers = list(get_resource_service('subscribers').get(req=None, lookup=query))

                EnqueueKilledService().queue_transmission(article, subscribers)
                logger.info('Queued Transmission for article: {}'.format(article[config.ID_FIELD]))

            article[config.ID_FIELD] = article.pop('item_id', article['item_id'])

            # Step 3(iv)
            super().delete({'item_id': article[config.ID_FIELD]})
            logger.info('Delete for article: {}'.format(article[config.ID_FIELD]))

            # Step 3(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            published_doc = deepcopy(article)
            published_doc[QUEUE_STATE] = PUBLISH_STATE.QUEUED
            get_resource_service('published').post([published_doc])
            logger.info('Insert into archive and published for article: {}'.format(article[config.ID_FIELD]))

            # Step 3(iii)
            import_into_legal_archive.apply_async(countdown=3, kwargs={'item_id': article[config.ID_FIELD]})
            logger.info('Legal Archive import for article: {}'.format(article[config.ID_FIELD]))

            # Step 3(v)
            kill_service.broadcast_kill_email(article)
            logger.info('Broadcast kill email for article: {}'.format(article[config.ID_FIELD]))
Esempio n. 12
0
    def delete(self, lookup):
        """
        Overriding to handle with Kill workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. For each article being killed do the following:
                i.   Apply the Kill Template and create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param lookup: query to find the article in archived repo
        :type lookup: dict
        """

        if app.testing and len(lookup) == 0:
            super().delete(lookup)
            return

        # Step 1
        articles_to_kill = self._find_articles_to_kill(lookup)
        articles_to_kill.sort(
            key=itemgetter(ITEM_TYPE),
            reverse=True)  # Needed because package has to be inserted last
        kill_service = KillPublishService()

        for article in articles_to_kill:
            # Step 2(i)
            to_apply_template = {'template_name': 'kill', 'item': article}
            get_resource_service('content_templates_apply').post(
                [to_apply_template])
            article = to_apply_template['item']
            self._remove_and_set_kill_properties(article, articles_to_kill)

            # Step 2(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(
                    req=None, lookup={'item_id': article[config.ID_FIELD]}))

            if transmission_details:
                subscriber_ids = [
                    t['_subscriber_id'] for t in transmission_details
                ]
                query = {'$and': [{config.ID_FIELD: {'$in': subscriber_ids}}]}
                subscribers = list(
                    get_resource_service('subscribers').get(req=None,
                                                            lookup=query))

                kill_service.queue_transmission(article, subscribers)

            # Step 2(iii)
            import_into_legal_archive.apply_async(kwargs={'doc': article})

            # Step 2(iv)
            super().delete({'item_id': article[config.ID_FIELD]})

            # Step 2(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            get_resource_service('published').post(docs)

            # Step 2(v)
            kill_service.broadcast_kill_email(article)