示例#1
0
UPDATE_SCHEDULE_DEFAULT = {'seconds': 10}

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'
ITEM_TAKEDOWN = 'takedown'
ITEM_UNPUBLISH = 'unpublish'

enqueue_services = {
    ITEM_PUBLISH:
    EnqueuePublishedService(),
    ITEM_CORRECT:
    EnqueueCorrectedService(),
    ITEM_KILL:
    EnqueueKilledService(),
    ITEM_TAKEDOWN:
    EnqueueKilledService(published_state=CONTENT_STATE.RECALLED),
    ITEM_UNPUBLISH:
    EnqueueKilledService(published_state=CONTENT_STATE.UNPUBLISHED),
}


def get_enqueue_service(operation):
    try:
        enqueue_services[operation].get_filters()
    except KeyError:
        # Hot fix for https://dev.sourcefabric.org/browse/SDESK-3555
        # FIXME: this issue needs investigation and a proper fix.
        logger.error(
            "unexpected operation: {operation}".format(operation=operation))
示例#2
0
    def update(self, id, updates, original):
        """
        Overriding to handle with Kill workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. If the item is flagged as archived only then it was never created by or published from the system so all
                that needs to be done is to delete it and send an email to all subscribers
            3. For each article being killed do the following:
                i.   Create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param id: primary key of the item to be killed
        :type id: str
        :param updates: updates to be applied on the article before saving
        :type updates: dict
        :param original:
        :type original: dict
        """

        # Step 1
        articles_to_kill = self._find_articles_to_kill({'_id': id})
        logger.info('Fetched articles to kill for id: {}'.format(id))
        articles_to_kill.sort(
            key=itemgetter(ITEM_TYPE),
            reverse=True)  # Needed because package has to be inserted last

        updated = original.copy()
        updated.update(updates)

        for article in articles_to_kill:

            # Step 2, If it is flagged as archived only it has no related items in the system so can be deleted.
            # An email is sent to all subscribers
            if original.get('flags', {}).get('marked_archived_only', False):
                super().delete({'item_id': article['item_id']})
                logger.info('Delete for article: {}'.format(
                    article[config.ID_FIELD]))

                KillPublishService().broadcast_kill_email(article)
                logger.info('Broadcast kill email for article: {}'.format(
                    article[config.ID_FIELD]))
                continue

            # Step 3(i)
            self._remove_and_set_kill_properties(article, articles_to_kill,
                                                 updated)
            logger.info(
                'Removing and setting properties for article: {}'.format(
                    article[config.ID_FIELD]))

            # Step 3(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(
                    req=None, lookup={'item_id': article['item_id']}))

            if transmission_details:
                subscriber_ids = [
                    t['_subscriber_id'] for t in transmission_details
                ]
                query = {'$and': [{config.ID_FIELD: {'$in': subscriber_ids}}]}
                subscribers = list(
                    get_resource_service('subscribers').get(req=None,
                                                            lookup=query))

                EnqueueKilledService().queue_transmission(article, subscribers)
                logger.info('Queued Transmission for article: {}'.format(
                    article[config.ID_FIELD]))

            article[config.ID_FIELD] = article.pop('item_id',
                                                   article['item_id'])

            # Step 3(iv)
            super().delete({'item_id': article[config.ID_FIELD]})
            logger.info('Delete for article: {}'.format(
                article[config.ID_FIELD]))

            # Step 3(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            published_doc = deepcopy(article)
            published_doc[QUEUE_STATE] = PUBLISH_STATE.QUEUED
            get_resource_service('published').post([published_doc])
            logger.info(
                'Insert into archive and published for article: {}'.format(
                    article[config.ID_FIELD]))

            # Step 3(iii)
            import_into_legal_archive.apply_async(
                countdown=3, kwargs={'item_id': article[config.ID_FIELD]})
            logger.info('Legal Archive import for article: {}'.format(
                article[config.ID_FIELD]))

            # Step 3(v)
            KillPublishService().broadcast_kill_email(article)
            logger.info('Broadcast kill email for article: {}'.format(
                article[config.ID_FIELD]))
示例#3
0
from superdesk.errors import ConnectionTimeout

logger = logging.getLogger(__name__)

profile = cProfile.Profile()

UPDATE_SCHEDULE_DEFAULT = {'seconds': 10}

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'

enqueue_services = {
    ITEM_PUBLISH: EnqueuePublishedService(),
    ITEM_CORRECT: EnqueueCorrectedService(),
    ITEM_KILL: EnqueueKilledService()
}


def get_enqueue_service(operation):
    return enqueue_services[operation]


class EnqueueContent(superdesk.Command):
    """Runs deliveries"""
    def run(self):
        """Fetches items from publish queue as per the configuration, calls the transmit function.
        """
        lock_name = get_lock_id('publish', 'enqueue_published')
        if not lock(lock_name, expire=310):
            logger.info(
示例#4
0
logger = logging.getLogger(__name__)

profile = cProfile.Profile()

UPDATE_SCHEDULE_DEFAULT = {'seconds': 10}

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'
ITEM_TAKEDOWN = 'takedown'
ITEM_UNPUBLISH = 'unpublish'

enqueue_services = {
    ITEM_PUBLISH: EnqueuePublishedService(),
    ITEM_CORRECT: EnqueueCorrectedService(),
    ITEM_KILL: EnqueueKilledService(),
    ITEM_TAKEDOWN: EnqueueKilledService(published_state=CONTENT_STATE.RECALLED),
    ITEM_UNPUBLISH: EnqueueKilledService(published_state=CONTENT_STATE.UNPUBLISHED),
}


def get_enqueue_service(operation):
    try:
        enqueue_services[operation].get_filters()
    except KeyError:
        # Hot fix for https://dev.sourcefabric.org/browse/SDESK-3555
        # FIXME: this issue needs investigation and a proper fix.
        logger.error("unexpected operation: {operation}".format(operation=operation))
        operation = "correct"
        enqueue_services[operation].get_filters()
    return enqueue_services[operation]
示例#5
0
logger = logging.getLogger(__name__)

profile = cProfile.Profile()

UPDATE_SCHEDULE_DEFAULT = {'seconds': 10}

ITEM_PUBLISH = 'publish'
ITEM_CORRECT = 'correct'
ITEM_KILL = 'kill'
ITEM_TAKEDOWN = 'takedown'

enqueue_services = {
    ITEM_PUBLISH: EnqueuePublishedService(),
    ITEM_CORRECT: EnqueueCorrectedService(),
    ITEM_KILL: EnqueueKilledService(),
    ITEM_TAKEDOWN: EnqueueKilledService(),
}


def get_enqueue_service(operation):
    enqueue_services[operation].get_filters()
    return enqueue_services[operation]


class EnqueueContent(superdesk.Command):
    """Runs deliveries"""
    def run(self):
        """Fetches items from publish queue as per the configuration, calls the transmit function.
        """
        lock_name = get_lock_id('publish', 'enqueue_published')