Exemple #1
0
    def process_takes(self, updates_of_take_to_be_published, package_id, original_of_take_to_be_published=None):
        """
        Primary rule for publishing a Take in Takes Package is: all previous takes must be published before a take
        can be published.

        This method validates if the take(s) previous to this article are published. If not published then raises error.
        Also, generates body_html of the takes package and make sure the metadata for the package is the same as the
        metadata of the take to be published.

        :param updates_of_take_to_be_published: The take to be published
        :return: Takes Package document and body_html of the Takes Package
        :raises:
            1. Article Not Found Error: If take identified by GUID in the Takes Package is not found in archive.
            2. Previous Take Not Published Error
        """

        package = super().find_one(req=None, _id=package_id)
        body_html = updates_of_take_to_be_published.get('body_html', original_of_take_to_be_published['body_html'])
        package_updates = {'body_html': body_html + '<br>'}

        groups = package.get('groups', [])
        if groups:
            take_refs = [ref for group in groups if group['id'] == 'main' for ref in group.get('refs')]
            sequence_num_of_take_to_be_published = 0

            take_article_id = updates_of_take_to_be_published.get(
                config.ID_FIELD, original_of_take_to_be_published[config.ID_FIELD])

            for r in take_refs:
                if r[GUID_FIELD] == take_article_id:
                    sequence_num_of_take_to_be_published = r[SEQUENCE]
                    break

            if sequence_num_of_take_to_be_published:
                if self.published_state != "killed":
                    for sequence in range(sequence_num_of_take_to_be_published, 0, -1):
                        previous_take_ref = next(ref for ref in take_refs if ref.get(SEQUENCE) == sequence)
                        if previous_take_ref[GUID_FIELD] != take_article_id:
                            previous_take = super().find_one(req=None, _id=previous_take_ref[GUID_FIELD])

                            if not previous_take:
                                raise PublishQueueError.article_not_found_error(
                                    Exception("Take with id %s not found" % previous_take_ref[GUID_FIELD]))

                            if previous_take and previous_take[config.CONTENT_STATE] not in ['published', 'corrected']:
                                raise PublishQueueError.previous_take_not_published_error(
                                    Exception("Take with id {} is not published in Takes Package with id {}"
                                              .format(previous_take_ref[GUID_FIELD], package[config.ID_FIELD])))

                            package_updates['body_html'] = \
                                previous_take['body_html'] + '<br>' + package_updates['body_html']

                metadata_tobe_copied = ['headline', 'abstract', 'anpa_category', 'pubstatus', 'slugline', 'urgency',
                                        'subject', 'byline', 'dateline']

                for metadata in metadata_tobe_copied:
                    package_updates[metadata] = \
                        updates_of_take_to_be_published.get(metadata, original_of_take_to_be_published.get(metadata))

        return package, package_updates
def transmit_items(queue_items, subscriber, destination, output_channels):
    failed_items = []

    for queue_item in queue_items:
        # Check if output channel is active
        output_channel = output_channels.get(str(queue_item['output_channel_id']))

        if not output_channel:
            raise PublishQueueError.output_channel_not_found_error(
                Exception('Output Channel: {}'.format(queue_item['output_channel_id'])))

        if not output_channel.get('is_active', False):
            continue

        try:
            if not is_on_time(queue_item, destination):
                continue

            # update the status of the item to in-progress
            queue_update = {'state': 'in-progress', 'transmit_started_at': utcnow()}
            superdesk.get_resource_service('publish_queue').patch(queue_item.get('_id'), queue_update)

            # get the formatted item
            formatted_item = superdesk.get_resource_service('formatted_item').\
                find_one(req=None, _id=queue_item['formatted_item_id'])

            transmitter = superdesk.publish.transmitters[destination.get('delivery_type')]
            transmitter.transmit(queue_item, formatted_item, subscriber, destination, output_channel)
            update_content_state(queue_item)
        except:
            failed_items.append(queue_item)

    if len(failed_items) > 0:
        logger.error('Failed to publish the following items: %s', str(failed_items))
    def publish(self, doc, updates, target_output_channels=None):
        any_channel_closed, wrong_formatted_channels, queued = \
            self.queue_transmission(doc=doc, target_output_channels=target_output_channels)

        if updates:
            desk = None

            if doc.get('task', {}).get('desk'):
                desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk'])

            if not doc.get('ingest_provider'):
                updates['source'] = desk['source'] if desk and desk.get('source', '') \
                    else DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

        user = get_user()

        if wrong_formatted_channels and len(wrong_formatted_channels) > 0:
            push_notification('item:publish:wrong:format',
                              item=str(doc['_id']), unique_name=doc['unique_name'],
                              desk=str(doc['task']['desk']),
                              user=str(user.get('_id', '')),
                              output_channels=[c['name'] for c in wrong_formatted_channels])

        if not target_output_channels and not queued:
            raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

        return any_channel_closed, queued
    def test_close_subscriber_does_close(self):
        with self.app.app_context():
            subscriber = self.app.data.find_one("subscribers", None)
            self.assertTrue(subscriber.get("is_active"))

            PublishService().close_transmitter(subscriber, PublishQueueError.bad_schedule_error())
            subscriber = self.app.data.find_one("subscribers", None)
            self.assertFalse(subscriber.get("is_active"))
    def test_close_subscriber_doesnt_close(self):
        with self.app.app_context():
            subscriber = self.app.data.find('subscribers', None, None)[0]
            self.assertTrue(subscriber.get('is_active'))

            PublishService().close_transmitter(subscriber, PublishQueueError.unknown_format_error())
            subscriber = self.app.data.find('subscribers', None, None)[0]
            self.assertTrue(subscriber.get('is_active'))
    def test_close_subscriber_does_close(self):
        with self.app.app_context():
            subscriber = self.app.data.find("subscribers", None, None)[0]
            self.assertTrue(subscriber.get("is_active"))

            PublishService().close_transmitter(subscriber, PublishQueueError.bad_schedule_error())
            subscriber = self.app.data.find("subscribers", None, None)[0]
            self.assertFalse(subscriber.get("is_active"))
Exemple #7
0
    def test_close_subscriber_doesnt_close(self):
        with self.app.app_context():
            subscriber = self.app.data.find_one('subscribers', None)
            self.assertTrue(subscriber.get('is_active'))

            PublishService().close_transmitter(subscriber, PublishQueueError.unknown_format_error())
            subscriber = self.app.data.find_one('subscribers', None)
            self.assertTrue(subscriber.get('is_active'))
 def test_close_output_channel_does_close(self):
     with self.app.app_context():
         output_channel = self.app.data.find('output_channels', None, None)[1]
         self.assertTrue(output_channel.get('is_active'))
         PublishService().close_transmitter(output_channel, 'output_channels',
                                            PublishQueueError.bad_schedule_error())
         output_channel = self.app.data.find('output_channels', None, None)[1]
         self.assertFalse(output_channel.get('is_active'))
Exemple #9
0
    def queue_transmission(self, doc):
        try:
            if doc.get('destination_groups'):
                destination_groups = self.resolve_destination_groups(
                    doc.get('destination_groups'))
                output_channels, selector_codes, format_types = \
                    self.resolve_output_channels(destination_groups.values())

                for output_channel in output_channels.values():
                    subscribers = self.get_subscribers(output_channel)
                    if subscribers and subscribers.count() > 0:
                        formatter = get_formatter(output_channel['format'])

                        pub_seq_num, formatted_doc = formatter.format(
                            doc, output_channel)

                        formatted_item = {
                            'formatted_item': formatted_doc,
                            'format': output_channel['format'],
                            'item_id': doc['_id'],
                            'item_version': doc.get('last_version', 0),
                            'published_seq_num': pub_seq_num
                        }

                        formatted_item_id = get_resource_service(
                            'formatted_item').post([formatted_item])[0]

                        publish_queue_items = []

                        for subscriber in subscribers:
                            for destination in subscriber.get(
                                    'destinations', []):
                                publish_queue_item = dict()
                                publish_queue_item['item_id'] = doc['_id']
                                publish_queue_item[
                                    'formatted_item_id'] = formatted_item_id
                                publish_queue_item[
                                    'subscriber_id'] = subscriber['_id']
                                publish_queue_item['destination'] = destination
                                publish_queue_item[
                                    'output_channel_id'] = output_channel[
                                        '_id']
                                publish_queue_item[
                                    'selector_codes'] = selector_codes.get(
                                        output_channel['_id'], [])
                                publish_queue_item[
                                    'published_seq_num'] = pub_seq_num

                                publish_queue_items.append(publish_queue_item)

                        get_resource_service('publish_queue').post(
                            publish_queue_items)
            else:
                raise PublishQueueError.destination_group_not_found_error(
                    KeyError('Destination groups empty for article: {}'.format(
                        doc['_id'])), None)
        except:
            raise
def get_file_extension(queue_item):
    try:
        format = queue_item['destination']['format'].upper()
        if format == 'NITF':
            return 'ntf'
        if format == 'XML':
            return 'xml'
    except Exception as ex:
        raise PublishQueueError.item_update_error(ex)
def is_on_time(queue_item, destination):
    """
    Checks if the item is ready to be processed
    :param queue_item: item to be checked
    :return: True if the item is ready
    """
    try:
        if queue_item.get('publish_schedule'):
            publish_schedule = queue_item['publish_schedule']
            if type(publish_schedule) is not datetime:
                raise PublishQueueError.bad_schedule_error(Exception("Schedule is not datetime"),
                                                           destination)
            return utcnow() >= publish_schedule
        return True
    except PublishQueueError:
        raise
    except Exception as ex:
        raise PublishQueueError.bad_schedule_error(ex, destination)
 def test_close_subscriber_doesnt_close(self):
     with self.app.app_context():
         subscriber = self.app.data.find('subscribers', None, None)[0]
         self.assertTrue(subscriber.get('is_active'))
         PublishService().close_transmitter(subscriber, 'subscribers',
                                            PublishQueueError.destination_group_not_found_error())
         subscriber = self.app.data.find('subscribers', None, None)[0]
         self.assertTrue(subscriber.get('is_active'))
         self.assertIsNone(subscriber.get('last_closed'))
def get_file_extension(queue_item):
    try:
        format = queue_item["destination"]["format"].upper()
        if format == "NITF":
            return "ntf"
        if format == "XML":
            return "xml"
    except Exception as ex:
        raise PublishQueueError.item_update_error(ex)
Exemple #14
0
def get_file_extension(queue_item):
    try:
        format = queue_item['destination']['format'].upper()
        if format == 'NITF':
            return 'ntf'
        if format == 'XML':
            return 'xml'
    except Exception as ex:
        raise PublishQueueError.item_update_error(ex)
Exemple #15
0
    def update(self, id, updates, original):
        """
        Handles workflow of each Publish, Corrected and Killed.
        """
        try:
            user = get_user()
            last_updated = updates.get(config.LAST_UPDATED, utcnow())

            if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                self._publish_package_items(original, last_updated)

            set_sign_off(updates, original)
            queued_digital = False
            package_id = None

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
                # if target_for is set the we don't to digital client.
                if not updates.get('targeted_for', original.get('targeted_for')):
                    # check if item is in a digital package
                    package_id = TakesPackageService().get_take_package_id(original)

                    if package_id:
                        queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                    original, last_updated)
                    else:
                        # if item is going to be sent to digital subscribers, package it as a take
                        if self.sending_to_digital_subscribers(updates):
                            updated = copy(original)
                            updated.update(updates)
                            # create a takes package
                            package_id = TakesPackageService().package_story_as_a_take(updated, {}, None)
                            original = get_resource_service('archive').find_one(req=None, _id=original['_id'])
                            queued_digital, takes_package = self._publish_takes_package(package_id, updates,
                                                                                        original, last_updated)

                # queue only text items
                queued_wire = \
                    self.publish(doc=original, updates=updates, target_media_type=WIRE if package_id else None)

                queued = queued_digital or queued_wire
                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self._set_version_last_modified_and_state(original, updates, last_updated)
            self._update_archive(original=original, updates=updates, should_insert_into_versions=False)
            push_notification('item:publish', item=str(id), unique_name=original['unique_name'],
                              desk=str(original.get('task', {}).get('desk', '')), user=str(user.get('_id', '')))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}".format(str(e)))
        except Exception as e:
            logger.exception("Something bad happened while publishing %s".format(id))
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}".format(str(e)))
Exemple #16
0
    def on_update(self, updates, original):
        self.raise_if_not_marked_for_publication(original)
        self.raise_if_invalid_state_transition(original)

        updated = original.copy()
        updated.update(updates)

        takes_package = self.takes_package_service.get_take_package(original)

        if self.publish_type == 'publish':
            # validate if take can be published
            if takes_package and not self.takes_package_service.can_publish_take(
                    takes_package,
                    updates.get(SEQUENCE, original.get(SEQUENCE, 1))):
                raise PublishQueueError.previous_take_not_published_error(
                    Exception("Previous takes are not published."))

            validate_schedule(
                updated.get('publish_schedule'),
                takes_package.get(SEQUENCE, 1) if takes_package else 1)

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(
                    EMBARGO):
                get_resource_service(ARCHIVE).validate_embargo(updated)

        if self.publish_type in ['correct', 'kill']:
            if updates.get(EMBARGO):
                raise SuperdeskApiError.badRequestError(
                    "Embargo can't be set after publishing")

            if updates.get('dateline'):
                raise SuperdeskApiError.badRequestError(
                    "Dateline can't be modified after publishing")

        validate_item = {
            'act': self.publish_type,
            'type': original['type'],
            'validate': updated
        }
        validation_errors = get_resource_service('validate').post(
            [validate_item])
        if validation_errors[0]:
            raise ValidationError(validation_errors)

        # validate the package if it is one
        package_validation_errors = []
        self._validate_package_contents(original, takes_package,
                                        package_validation_errors)
        if len(package_validation_errors) > 0:
            raise ValidationError(package_validation_errors)

        self._set_updates(original, updates,
                          updates.get(config.LAST_UPDATED, utcnow()))
def update_content_state(queue_item):
    """
    Updates the state of the content item to published, in archive and published collections.
    """

    if queue_item.get("publish_schedule"):
        try:
            item_update = {"state": "published"}
            get_resource_service("archive").patch(queue_item["item_id"], item_update)
            get_resource_service("published").update_published_items(queue_item["item_id"], "state", "published")
        except Exception as ex:
            raise PublishQueueError.content_update_error(ex)
Exemple #18
0
    def queue_transmission(self, doc):
        try:
            if doc.get('destination_groups'):
                any_channel_closed = False

                destination_groups = self.resolve_destination_groups(doc.get('destination_groups'))
                output_channels, selector_codes, format_types = \
                    self.resolve_output_channels(destination_groups.values())

                for output_channel in output_channels.values():
                    if output_channel.get('is_active', True) is False:
                        any_channel_closed = True

                    subscribers = self.get_subscribers(output_channel)
                    if subscribers and subscribers.count() > 0:
                        formatter = get_formatter(output_channel['format'])

                        pub_seq_num, formatted_doc = formatter.format(doc, output_channel)

                        formatted_item = {'formatted_item': formatted_doc, 'format': output_channel['format'],
                                          'item_id': doc['_id'], 'item_version': doc.get('last_version', 0),
                                          'published_seq_num': pub_seq_num}

                        formatted_item_id = get_resource_service('formatted_item').post([formatted_item])[0]

                        publish_queue_items = []

                        for subscriber in subscribers:
                            for destination in subscriber.get('destinations', []):
                                publish_queue_item = dict()
                                publish_queue_item['item_id'] = doc['_id']
                                publish_queue_item['formatted_item_id'] = formatted_item_id
                                publish_queue_item['subscriber_id'] = subscriber['_id']
                                publish_queue_item['destination'] = destination
                                publish_queue_item['output_channel_id'] = output_channel['_id']
                                publish_queue_item['selector_codes'] = selector_codes.get(output_channel['_id'], [])
                                publish_queue_item['published_seq_num'] = pub_seq_num
                                publish_queue_item['publish_schedule'] = doc.get('publish_schedule', None)
                                publish_queue_item['publishing_action'] = doc.get(config.CONTENT_STATE, None)
                                publish_queue_item['unique_name'] = doc.get('unique_name', None)
                                publish_queue_item['content_type'] = doc.get('type', None)
                                publish_queue_item['headline'] = doc.get('headline', None)

                                publish_queue_items.append(publish_queue_item)

                        get_resource_service('publish_queue').post(publish_queue_items)

                return any_channel_closed
            else:
                raise PublishQueueError.destination_group_not_found_error(
                    KeyError('Destination groups empty for article: {}'.format(doc['_id'])), None)
        except:
            raise
def update_content_state(queue_item):
    """
    Updates the state of the content item to published, in archive and published collections.
    """

    if queue_item.get('publish_schedule'):
        try:
            item_update = {'state': 'published'}
            get_resource_service('archive').patch(queue_item['item_id'], item_update)
            get_resource_service('published').update_published_items(queue_item['item_id'], 'state', 'published')
        except Exception as ex:
            raise PublishQueueError.content_update_error(ex)
Exemple #20
0
    def _validate(self, original, updates):
        self.raise_if_invalid_state_transition(original)

        updated = original.copy()
        updated.update(updates)

        self.raise_if_not_marked_for_publication(updated)

        takes_package = self.takes_package_service.get_take_package(original)

        if self.publish_type == 'publish':
            # validate if take can be published
            if takes_package and not self.takes_package_service.can_publish_take(
                    takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1))):
                raise PublishQueueError.previous_take_not_published_error(
                    Exception("Previous takes are not published."))

            update_schedule_settings(updated, PUBLISH_SCHEDULE, updated.get(PUBLISH_SCHEDULE))
            validate_schedule(updated.get(SCHEDULE_SETTINGS, {}).get('utc_{}'.format(PUBLISH_SCHEDULE)),
                              takes_package.get(SEQUENCE, 1) if takes_package else 1)

        if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(EMBARGO):
            update_schedule_settings(updated, EMBARGO, updated.get(EMBARGO))
            get_resource_service(ARCHIVE).validate_embargo(updated)

        if self.publish_type in [ITEM_CORRECT, ITEM_KILL]:
            if updates.get(EMBARGO) and not original.get(EMBARGO):
                raise SuperdeskApiError.badRequestError("Embargo can't be set after publishing")

        if self.publish_type in [ITEM_CORRECT, ITEM_KILL]:
            if updates.get('dateline'):
                raise SuperdeskApiError.badRequestError("Dateline can't be modified after publishing")

        if self.publish_type == ITEM_PUBLISH and updated.get('rewritten_by'):
            # if update is published then user cannot publish the takes
            rewritten_by = get_resource_service(ARCHIVE).find_one(req=None, _id=updated.get('rewritten_by'))
            if rewritten_by and rewritten_by.get(ITEM_STATE) in PUBLISH_STATES:
                raise SuperdeskApiError.badRequestError("Cannot publish the story after Update is published.!")

        publish_type = 'auto_publish' if updates.get('auto_publish') else self.publish_type
        validate_item = {'act': publish_type, 'type': original['type'], 'validate': updated}
        validation_errors = get_resource_service('validate').post([validate_item])
        if validation_errors[0]:
            raise ValidationError(validation_errors)

        validation_errors = []
        self._validate_associated_items(original, takes_package, validation_errors)

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self._validate_package(original, updates, validation_errors)

        if len(validation_errors) > 0:
            raise ValidationError(validation_errors)
Exemple #21
0
    def _validate(self, original, updates):
        self.raise_if_not_marked_for_publication(original)
        self.raise_if_invalid_state_transition(original)

        updated = original.copy()
        updated.update(updates)

        takes_package = self.takes_package_service.get_take_package(original)

        if self.publish_type == 'publish':
            # validate if take can be published
            if takes_package and not self.takes_package_service.can_publish_take(
                    takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1))):
                raise PublishQueueError.previous_take_not_published_error(
                    Exception("Previous takes are not published."))

            update_schedule_settings(updated, PUBLISH_SCHEDULE, updated.get(PUBLISH_SCHEDULE))
            validate_schedule(updated.get(SCHEDULE_SETTINGS, {}).get('utc_{}'.format(PUBLISH_SCHEDULE)),
                              takes_package.get(SEQUENCE, 1) if takes_package else 1)

        if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(EMBARGO):
            update_schedule_settings(updated, EMBARGO, updated.get(EMBARGO))
            get_resource_service(ARCHIVE).validate_embargo(updated)

        if self.publish_type in [ITEM_CORRECT, ITEM_KILL]:
            if updates.get(EMBARGO) and not original.get(EMBARGO):
                raise SuperdeskApiError.badRequestError("Embargo can't be set after publishing")

        if self.publish_type in [ITEM_CORRECT, ITEM_KILL]:
            if updates.get('dateline'):
                raise SuperdeskApiError.badRequestError("Dateline can't be modified after publishing")

        if self.publish_type == ITEM_PUBLISH and updated.get('rewritten_by'):
            # if update is published then user cannot publish the takes
            rewritten_by = get_resource_service(ARCHIVE).find_one(req=None, _id=updated.get('rewritten_by'))
            if rewritten_by and rewritten_by.get(ITEM_STATE) in PUBLISH_STATES:
                raise SuperdeskApiError.badRequestError("Cannot publish the story after Update is published.!")

        publish_type = 'auto_publish' if updates.get('auto_publish') else self.publish_type
        validate_item = {'act': publish_type, 'type': original['type'], 'validate': updated}
        validation_errors = get_resource_service('validate').post([validate_item])
        if validation_errors[0]:
            raise ValidationError(validation_errors)

        validation_errors = []
        self._validate_associated_items(original, takes_package, validation_errors)

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self._validate_package(original, updates, validation_errors)

        if len(validation_errors) > 0:
            raise ValidationError(validation_errors)
Exemple #22
0
def update_item_status(queue_item, status, error=None):
    try:
        item_update = {'state': status}
        if status == 'in-progress':
            item_update['transmit_started_at'] = utcnow()
        elif status == 'success':
            item_update['completed_at'] = utcnow()
        elif status == 'error' and error:
            item_update['error_message'] = '{}:{}'.format(error, str(error.system_exception))

        superdesk.get_resource_service('publish_queue').patch(queue_item.get('_id'), item_update)
    except Exception as ex:
        raise PublishQueueError.item_update_error(ex)
def update_item_status(queue_item, status, error=None):
    try:
        item_update = {"state": status}
        if status == "in-progress":
            item_update["transmit_started_at"] = utcnow()
        elif status == "success":
            item_update["completed_at"] = utcnow()
        elif status == "error" and error:
            item_update["error_message"] = "{}:{}".format(error, str(error.system_exception))

        superdesk.get_resource_service("publish_queue").patch(queue_item.get("_id"), item_update)
    except Exception as ex:
        raise PublishQueueError.item_update_error(ex)
def is_on_time(queue_item):
    """
    Checks if the item is ready to be processed

    :param queue_item: item to be checked
    :return: True if the item is ready
    """

    try:
        if queue_item.get('publish_schedule'):
            publish_schedule = queue_item['publish_schedule']
            if type(publish_schedule) is not datetime:
                raise PublishQueueError.bad_schedule_error(
                    Exception("Schedule is not datetime"), queue_item['_id'])
            return utcnow() >= publish_schedule

        return True
    except PublishQueueError:
        raise
    except Exception as ex:
        raise PublishQueueError.bad_schedule_error(ex,
                                                   queue_item['destination'])
Exemple #25
0
def update_item_status(queue_item, status, error=None):
    try:
        item_update = {'state': status}
        if status == 'in-progress':
            item_update['transmit_started_at'] = utcnow()
        elif status == 'success':
            item_update['completed_at'] = utcnow()
        elif status == 'error' and error:
            item_update['error_message'] = '{}:{}'.format(error, str(error.system_exception))

        superdesk.get_resource_service('publish_queue').patch(queue_item.get('_id'), item_update)
    except Exception as ex:
        raise PublishQueueError.item_update_error(ex)
    def on_update(self, updates, original):
        if original.get('marked_for_not_publication', False):
            raise SuperdeskApiError.badRequestError(
                message='Cannot publish an item which is marked as Not for Publication')

        if not is_workflow_state_transition_valid(self.publish_type, original[config.CONTENT_STATE]):
            raise InvalidStateTransitionError()
        if original.get('item_id') and get_resource_service('published').is_published_before(original['item_id']):
            raise PublishQueueError.post_publish_exists_error(Exception('Story with id:{}'.format(original['_id'])))

        validate_item = {'act': self.publish_type, 'validate': updates}
        validation_errors = get_resource_service('validate').post([validate_item])
        if validation_errors[0]:
            raise ValidationError(validation_errors)
def update_content_state(queue_item):
    """
    Updates the state of the content item to published, in archive and published collections.
    """

    if queue_item.get('publish_schedule'):
        try:
            item_update = {'state': 'published'}
            get_resource_service('archive').patch(queue_item['item_id'],
                                                  item_update)
            get_resource_service('published').update_published_items(
                queue_item['item_id'], 'state', 'published')
        except Exception as ex:
            raise PublishQueueError.content_update_error(ex)
def update_content_state(queue_item):
    """
    Updates the state of the content item to published
    In archive and published repos
    :param queue_item:
    :return:
    """
    if queue_item.get('publish_schedule'):
        try:
            item_update = {'state': 'published'}
            superdesk.get_resource_service('archive').patch(queue_item['item_id'], item_update)
            superdesk.get_resource_service('published').\
                update_published_items(queue_item['item_id'], 'state', 'published')
        except Exception as ex:
            raise PublishQueueError.content_update_error(ex)
    def update_item_status(self, queue_item, status, error=None):
        try:
            item_update = {"state": status}
            if status == "in-progress":
                item_update["transmit_started_at"] = utcnow()
            elif status == "success":
                item_update["completed_at"] = utcnow()
            elif status == "error" and error:
                item_update["error_message"] = "{}:{}".format(
                    error, str(error.system_exception))

            publish_queue_service = superdesk.get_resource_service(
                "publish_queue")
            queue_id = (ObjectId(queue_item.get("_id")) if isinstance(
                queue_item.get("_id"), str) else queue_item.get("_id"))
            publish_queue_service.patch(queue_id, item_update)
        except Exception as ex:
            raise PublishQueueError.item_update_error(ex)
Exemple #30
0
    def on_update(self, updates, original):
        self.raise_if_not_marked_for_publication(original)
        self.raise_if_invalid_state_transition(original)

        updated = original.copy()
        updated.update(updates)

        takes_package = self.takes_package_service.get_take_package(original)

        if self.publish_type == "publish":
            # validate if take can be published
            if takes_package and not self.takes_package_service.can_publish_take(
                takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1))
            ):
                raise PublishQueueError.previous_take_not_published_error(
                    Exception("Previous takes are not published.")
                )

            validate_schedule(updated.get("publish_schedule"), takes_package.get(SEQUENCE, 1) if takes_package else 1)

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(EMBARGO):
                get_resource_service(ARCHIVE).validate_embargo(updated)

        if self.publish_type in ["correct", "kill"]:
            if updates.get(EMBARGO):
                raise SuperdeskApiError.badRequestError("Embargo can't be set after publishing")

            if updates.get("dateline"):
                raise SuperdeskApiError.badRequestError("Dateline can't be modified after publishing")

        validate_item = {"act": self.publish_type, "type": original["type"], "validate": updated}
        validation_errors = get_resource_service("validate").post([validate_item])
        if validation_errors[0]:
            raise ValidationError(validation_errors)

        # validate the package if it is one
        package_validation_errors = []
        self._validate_package_contents(original, takes_package, package_validation_errors)
        if len(package_validation_errors) > 0:
            raise ValidationError(package_validation_errors)

        self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow()))
        updates[ITEM_OPERATION] = ITEM_PUBLISH
        convert_task_attributes_to_objectId(updates)
Exemple #31
0
    def queue_transmission(self, doc):
        try:
            if doc.get('destination_groups'):
                destination_groups = self.resolve_destination_groups(doc.get('destination_groups'))
                output_channels, selector_codes, format_types = \
                    self.resolve_output_channels(destination_groups.values())

                for output_channel in output_channels.values():
                    subscribers = self.get_subscribers(output_channel)
                    if subscribers and subscribers.count() > 0:
                        formatter = get_formatter(output_channel['format'])

                        pub_seq_num, formatted_doc = formatter.format(doc, output_channel)

                        formatted_item = {'formatted_item': formatted_doc, 'format': output_channel['format'],
                                          'item_id': doc['_id'], 'item_version': doc.get('last_version', 0),
                                          'published_seq_num': pub_seq_num}

                        formatted_item_id = get_resource_service('formatted_item').post([formatted_item])[0]

                        publish_queue_items = []

                        for subscriber in subscribers:
                            for destination in subscriber.get('destinations', []):
                                publish_queue_item = dict()
                                publish_queue_item['item_id'] = doc['_id']
                                publish_queue_item['formatted_item_id'] = formatted_item_id
                                publish_queue_item['subscriber_id'] = subscriber['_id']
                                publish_queue_item['destination'] = destination
                                publish_queue_item['output_channel_id'] = output_channel['_id']
                                publish_queue_item['selector_codes'] = selector_codes.get(output_channel['_id'], [])
                                publish_queue_item['published_seq_num'] = pub_seq_num

                                publish_queue_items.append(publish_queue_item)

                        get_resource_service('publish_queue').post(publish_queue_items)
            else:
                raise PublishQueueError.destination_group_not_found_error(
                    KeyError('Destination groups empty for article: {}'.format(doc['_id'])), None)
        except:
            raise
Exemple #32
0
    def on_update(self, updates, original):
        self.raise_if_not_marked_for_publication(original)
        self.raise_if_invalid_state_transition(original)

        updated = original.copy()
        updated.update(updates)

        takes_package = self.takes_package_service.get_take_package(original)

        if self.publish_type == 'publish':
            # validate if take can be published
            if takes_package and not self.takes_package_service.can_publish_take(
                    takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1))):
                raise PublishQueueError.previous_take_not_published_error(
                    Exception("Previous takes are not published."))

            validate_schedule(updated.get('publish_schedule'), takes_package.get(SEQUENCE, 1) if takes_package else 1)

            if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get(EMBARGO):
                get_resource_service(ARCHIVE).validate_embargo(updated)

        if self.publish_type in ['correct', 'kill']:
            if updates.get(EMBARGO):
                raise SuperdeskApiError.badRequestError("Embargo can't be set after publishing")

            if updates.get('dateline'):
                raise SuperdeskApiError.badRequestError("Dateline can't be modified after publishing")

        validate_item = {'act': self.publish_type, 'type': original['type'], 'validate': updated}
        validation_errors = get_resource_service('validate').post([validate_item])
        if validation_errors[0]:
            raise ValidationError(validation_errors)

        # validate the package if it is one
        package_validation_errors = []
        self._validate_package_contents(original, takes_package, package_validation_errors)
        if len(package_validation_errors) > 0:
            raise ValidationError(package_validation_errors)

        self._set_updates(original, updates, updates.get(config.LAST_UPDATED, utcnow()))
Exemple #33
0
    def publish(self, doc, updates, target_media_type=None):
        """
        Queue the content for publishing.
        1. Sets the Metadata Properties - source and pubstatus
        2. Get the subscribers.
        3. Queue the content for subscribers
        4. Queue the content for previously published subscribers if any.
        5. Sends notification if no formatter has found for any of the formats configured in Subscriber.
        6. If not queued and not formatters then raise exception.
        :param dict doc: document to publish
        :param dict updates: updates for the document
        :param str target_media_type: dictate if the doc being queued is a Takes Package or an Individual Article.
                Valid values are - Wire, Digital. If Digital then the doc being queued is a Takes Package and if Wire
                then the doc being queues is an Individual Article.
        :return bool: if content is queued then True else False
        :raises PublishQueueError.item_not_queued_error:
                If the nothing is queued.
        """

        queued = True
        no_formatters = []
        updated = doc.copy()

        # Step 1
        if updates:
            desk = None

            if doc.get('task', {}).get('desk'):
                desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk'])

            if not doc.get('ingest_provider'):
                updates['source'] = desk['source'] if desk and desk.get('source', '') \
                    else DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

            updates['pubstatus'] = PUB_STATUS.CANCELED if self.publish_type == 'killed' else PUB_STATUS.USABLE
            updated.update(updates)

        # Step 2
        subscribers, subscribers_yet_to_receive = self.get_subscribers(doc, target_media_type)

        # Step 3
        no_formatters, queued = self.queue_transmission(updated, subscribers)

        # Step 4
        if subscribers_yet_to_receive:
            formatters_not_found, queued_new_subscribers = self.queue_transmission(updated, subscribers_yet_to_receive)
            no_formatters.extend(formatters_not_found)
            queued = queued or queued_new_subscribers

        # Step 5
        user = get_user()
        if len(no_formatters) > 0:
            push_notification('item:publish:wrong:format',
                              item=str(doc[config.ID_FIELD]), unique_name=doc['unique_name'],
                              desk=str(doc.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')),
                              formats=no_formatters)

        # Step 6
        if not target_media_type and not queued:
            raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

        return queued
    def update(self, id, updates, original):
        archived_item = super().find_one(req=None, _id=id)

        try:
            any_channel_closed = False

            if archived_item['type'] == 'composite':
                self.__publish_package_items(archived_item, updates[config.LAST_UPDATED])

            # document is saved to keep the initial changes
            set_sign_off(updates, original)
            self.backend.update(self.datasource, id, updates, original)

            # document is saved to change the status
            if (original.get('publish_schedule') or updates.get('publish_schedule')) \
                    and original[config.CONTENT_STATE] not in PUBLISH_STATES:
                updates[config.CONTENT_STATE] = 'scheduled'
            else:
                updates['publish_schedule'] = None
                updates[config.CONTENT_STATE] = self.published_state

            original.update(updates)
            get_component(ItemAutosave).clear(original['_id'])

            if archived_item['type'] != 'composite':
                # check if item is in a digital package
                package_id = TakesPackageService().get_take_package_id(original)
                if package_id:
                    # process the takes to form digital master file content
                    package, package_updates = self.process_takes(take=original, package_id=package_id)
                    package_updates[config.CONTENT_STATE] = self.published_state
                    resolve_document_version(document=package_updates,
                                             resource=ARCHIVE, method='PATCH',
                                             latest_doc=package)
                    self.backend.update(self.datasource, package['_id'], package_updates, package)
                    package.update(package_updates)
                    insert_into_versions(doc=package)

                    # send it to the digital channels
                    any_channel_closed_digital, queued_digital = \
                        self.publish(doc=package, updates=None, target_output_channels=DIGITAL)

                    self.update_published_collection(published_item=package)
                else:
                    any_channel_closed_digital = False
                    queued_digital = False

                # queue only text items
                any_channel_closed_wire, queued_wire = \
                    self.publish(doc=original, updates=updates, target_output_channels=WIRE if package_id else None)

                any_channel_closed = any_channel_closed_digital or any_channel_closed_wire
                queued = queued_digital or queued_wire

                if not queued:
                    raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

            self.backend.update(self.datasource, id, updates, original)
            user = get_user()
            push_notification('item:publish:closed:channels' if any_channel_closed else 'item:publish',
                              item=str(id), unique_name=archived_item['unique_name'],
                              desk=str(archived_item.get('task', {}).get('desk', '')),
                              user=str(user.get('_id', '')))
            original.update(super().find_one(req=None, _id=id))
        except SuperdeskApiError as e:
            raise e
        except KeyError as e:
            raise SuperdeskApiError.badRequestError(
                message="Key is missing on article to be published: {}"
                .format(str(e)))
        except Exception as e:
            logger.error("Something bad happened while publishing %s".format(id), e)
            raise SuperdeskApiError.internalError(message="Failed to publish the item: {}"
                                                  .format(str(e)))
Exemple #35
0
    def publish(self, doc, updates, target_media_type=None):
        """
        1. Sets the Metadata Properties - source and pubstatus
        2. Formats and queues the article to subscribers based on the state of the article:
            a. If the state of the article is killed then:
                i.  An email should be sent to all Subscribers irrespective of their status.
                ii. The article should be formatted as per the type of the format and then queue article to the
                    Subscribers who received the article previously.
            b. If the state of the article is corrected then:
                i.      The article should be formatted as per the type of the format and then queue article to the
                        Subscribers who received the article previously.
                ii.     Fetch Active Subscribers and exclude those who received the article previously.
                iii.    If article has 'targeted_for' property then exclude subscribers of type Internet from
                        Subscribers list.
                iv.     For each subscriber in the list, check if the article matches against publish filters and
                        global filters if configured for the subscriber. If matches then the article should be formatted
                        as per the type of the format and then queue article to the subscribers.
            c. If the state of the article is published then:
                i.     Fetch Active Subscribers.
                ii.    If article has 'targeted_for' property then exclude subscribers of type Internet from
                       Subscribers list.
                iii.    For each subscriber in the list, check if the article matches against publish filters and global
                        filters if configured for the subscriber. If matches then the article should be formatted
                        as per the type of the format and then queue article.
        3. Sends notification if no formatter has found for any of the formats configured in Subscriber.
        """

        queued = True
        no_formatters = []
        updated = doc.copy()

        # Step 1
        if updates:
            desk = None

            if doc.get('task', {}).get('desk'):
                desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk'])

            if not doc.get('ingest_provider'):
                updates['source'] = desk['source'] if desk and desk.get('source', '') \
                    else DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES

            updates['pubstatus'] = PUB_STATUS.CANCELED if self.publish_type == 'killed' else PUB_STATUS.USABLE
            updated.update(updates)

        # Step 2(a)
        if self.published_state == 'killed':
            req = ParsedRequest()
            req.sort = '[("completed_at", 1)]'
            queued_items = get_resource_service('publish_queue').get(
                req=req, lookup={'item_id': updated[config.ID_FIELD]})

            if queued_items.count():
                queued_items = list(queued_items)

                # Step 2(a)(i)
                subscribers = list(get_resource_service('subscribers').get(req=None, lookup=None))
                recipients = [s.get('email') for s in subscribers if s.get('email')]
                send_article_killed_email(doc, recipients, queued_items[0].get('completed_at'))

                # Step 2(a)(ii)
                no_formatters, queued = self.queue_transmission(updated, subscribers, None)
        elif self.published_state == 'corrected':  # Step 2(b)
            subscribers, subscribers_yet_to_receive = self.get_subscribers(updated)
            if subscribers:
                no_formatters, queued = self.queue_transmission(updated, subscribers)

                if subscribers_yet_to_receive:
                    # Step 2(b)(iv)
                    formatters_not_found, queued_new_subscribers = \
                        self.queue_transmission(updated, subscribers_yet_to_receive, target_media_type)
                    no_formatters.extend(formatters_not_found)
        elif self.published_state == 'published':  # Step 2(c)
            subscribers, subscribers_yet_to_receive = self.get_subscribers(updated)

            # Step 2(c)(iii)
            no_formatters, queued = self.queue_transmission(updated, subscribers, target_media_type)

        # Step 3
        user = get_user()
        if len(no_formatters) > 0:
            push_notification('item:publish:wrong:format',
                              item=str(doc[config.ID_FIELD]), unique_name=doc['unique_name'],
                              desk=str(doc.get('task', {}).get('desk', '')),
                              user=str(user.get(config.ID_FIELD, '')),
                              formats=no_formatters)

        if not target_media_type and not queued:
            raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None)

        return queued
 def mock_transmit(*args):
     raise PublishQueueError.bad_schedule_error()
Exemple #37
0
 def mock_transmit(*args):
     raise PublishQueueError.bad_schedule_error()