def create_item_from_template(doc, extra_fields_to_override=None):
    fields_to_override = deepcopy(TEMPLATE_FIELDS_TO_OVERRIDE)
    if extra_fields_to_override is not None:
        fields_to_override.extend(extra_fields_to_override)

    archive_service = get_resource_service('archive')

    # First post the item in it's entirety
    item_id = archive_service.post([deepcopy(doc)])[0]

    # Then calculate the fields to override
    # and apply them if any found
    updates = {
        key: val
        for key, val in doc.items() if key in fields_to_override
    }

    if len(updates):
        archive_service.patch(item_id, updates)

    # Finally retrieve the full item from the database
    # and insert it into versions
    item = archive_service.find_one(req=None, _id=item_id)
    insert_into_versions(doc=item)
    return item
Example #2
0
    def on_updated(self, updates, original):
        updated = copy(original)
        updated.update(updates)
        if self._stage_changed(updates, original):
            insert_into_versions(doc=updated)
        new_task = updates.get('task', {})
        old_task = original.get('task', {})
        if new_task.get('stage') != old_task.get('stage'):
            push_notification('task:stage',
                              new_stage=str(new_task.get('stage', '')),
                              old_stage=str(old_task.get('stage', '')),
                              new_desk=str(new_task.get('desk', '')),
                              old_desk=str(old_task.get('desk', '')))
        else:
            push_notification(self.datasource, updated=1)

        if is_assigned_to_a_desk(updated):
            if self.__is_content_assigned_to_new_desk(original, updates) and \
                    not self._stage_changed(updates, original):
                insert_into_versions(doc=updated)
            add_activity(ACTIVITY_UPDATE,
                         'updated task {{ subject }} for item {{ type }}',
                         self.datasource,
                         item=updated,
                         subject=get_subject(updated),
                         type=updated['type'])
Example #3
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc,
             desk_id=doc.get('task', {}).get('desk'),
             stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {
             CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
             CONTENT_STATE.KILLED
     }:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc,
                            original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
     app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
Example #4
0
    def on_updated(self, updates, original):
        updated = copy(original)
        updated.update(updates)
        if self._stage_changed(updates, original):
            insert_into_versions(doc=updated)
        new_task = updates.get("task", {})
        old_task = original.get("task", {})
        if new_task.get("stage") != old_task.get("stage"):
            push_notification(
                "task:stage",
                new_stage=str(new_task.get("stage", "")),
                old_stage=str(old_task.get("stage", "")),
                new_desk=str(new_task.get("desk", "")),
                old_desk=str(old_task.get("desk", "")),
            )
        else:
            push_notification(self.datasource, updated=1)

        if is_assigned_to_a_desk(updated):
            if self.__is_content_assigned_to_new_desk(original, updates) and not self._stage_changed(updates, original):
                insert_into_versions(doc=updated)
            add_activity(
                ACTIVITY_UPDATE,
                "updated task {{ subject }} for item {{ type }}",
                self.datasource,
                item=updated,
                subject=get_subject(updated),
                type=updated["type"],
            )
Example #5
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)

        send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage'))

        if archived_doc[config.CONTENT_STATE] != 'published':
            archived_doc[config.CONTENT_STATE] = 'submitted'

        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc['_id']
        archive_service.update(original['_id'], archived_doc, original)

        insert_into_versions(guid=original['_id'])

        return archived_doc
Example #6
0
    def on_updated(self, updates, original):
        """
        Locates the published or corrected non-take packages containing the corrected item
        and corrects them
        :param updates: correction
        :param original: original story
        """
        original_updates = dict()
        original_updates['operation'] = updates['operation']
        original_updates[ITEM_STATE] = updates[ITEM_STATE]
        super().on_updated(updates, original)
        ArchiveCropService().delete_replaced_crop_files(updates, original)
        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service('archive_correct')
            processed_packages = []
            for package in packages:
                if package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] and \
                        package.get(PACKAGE_TYPE, '') == '' and \
                        str(package[config.ID_FIELD]) not in processed_packages:
                    original_updates['groups'] = package['groups']

                    if updates.get('headline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'headline', updates.get('headline'))

                    if updates.get('slugline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'slugline', updates.get('slugline'))

                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
Example #7
0
    def __publish_package_items(self, package, last_updated):
        """
        Publishes items of a package recursively
        """

        items = [ref.get('residRef') for group in package.get('groups', [])
                 for ref in group.get('refs', []) if 'residRef' in ref]

        if items:
            for guid in items:
                doc = super().find_one(req=None, _id=guid)
                original = copy(doc)
                try:
                    if doc['type'] == 'composite':
                        self.__publish_package_items(doc)

                    resolve_document_version(document=doc, resource=ARCHIVE, method='PATCH', latest_doc=doc)
                    doc[config.CONTENT_STATE] = self.published_state
                    doc[config.LAST_UPDATED] = last_updated
                    doc[config.ETAG] = document_etag(doc)
                    self.backend.update(self.datasource, guid, {config.CONTENT_STATE: doc[config.CONTENT_STATE],
                                                                config.ETAG: doc[config.ETAG],
                                                                config.VERSION: doc[config.VERSION],
                                                                config.LAST_UPDATED: doc[config.LAST_UPDATED]},
                                        original)
                    insert_into_versions(doc=doc)
                except KeyError:
                    raise SuperdeskApiError.badRequestError("A non-existent content id is requested to publish")
    def package_story_as_a_take(self, target, takes_package, link):
        """
        This function create the takes package using the target item metadata and links the
        target and link together in the takes package as target as take1 and link as take2.
        If the link is not provided then only target is added to the takes package.
        :param dict target: Target item to be added to the takes package.
        :param dict takes_package: takes package.
        :param dict link: item to be linked.
        :return: Takes Package Id
        """
        takes_package[ITEM_TYPE] = CONTENT_TYPE.COMPOSITE
        takes_package[PACKAGE_TYPE] = TAKES_PACKAGE
        fields_for_creating_takes_package = self.fields_for_creating_take.copy()
        fields_for_creating_takes_package.extend(['publish_schedule', 'event_id', 'rewrite_of', 'task',
                                                  EMBARGO])

        for field in fields_for_creating_takes_package:
            if field in target:
                takes_package[field] = target.get(field)

        takes_package.setdefault(config.VERSION, 1)

        create_root_group([takes_package])
        self.__link_items__(takes_package, target, link)

        ids = get_resource_service(ARCHIVE).post([takes_package])
        insert_into_versions(id_=ids[0])
        original_target = get_resource_service(ARCHIVE).find_one(req=None, _id=target[config.ID_FIELD])
        target[LINKED_IN_PACKAGES] = original_target[LINKED_IN_PACKAGES]
        get_resource_service('archive_broadcast').on_broadcast_master_updated(ITEM_CREATE, target,
                                                                              takes_package_id=ids[0])
        return ids[0]
Example #9
0
    def create(self, docs, **kwargs):
        target_id = request.view_args['target_id']
        doc = docs[0]
        link_id = doc.get('link_id')
        desk_id = doc.get('desk')
        service = get_resource_service(ARCHIVE)
        target = service.find_one(req=None, _id=target_id)
        self._validate_link(target, target_id)
        link = {}

        if desk_id:
            link = {'task': {'desk': desk_id}}
            user = get_user()
            lookup = {'_id': desk_id, 'members.user': user['_id']}
            desk = get_resource_service('desks').find_one(req=None, **lookup)
            if not desk:
                raise SuperdeskApiError.forbiddenError(
                    "No privileges to create new take on requested desk.")

            link['task']['stage'] = desk['working_stage']

        if link_id:
            link = service.find_one(req=None, _id=link_id)

        linked_item = self.packageService.link_as_next_take(target, link)
        insert_into_versions(id_=linked_item[config.ID_FIELD])
        doc.update(linked_item)
        build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return [linked_item['_id']]
def create_scheduled_content(now=None):
    lock_name = get_lock_id("Template", "Schedule")
    if not lock(lock_name, expire=130):
        logger.info("Task: {} is already running.".format(lock_name))
        return

    try:
        if now is None:
            now = utcnow()
        templates = get_scheduled_templates(now)
        production = superdesk.get_resource_service(ARCHIVE)
        items = []
        for template in templates:
            set_template_timestamps(template, now)
            item = get_item_from_template(template)
            item[config.VERSION] = 1
            production.post([item])
            insert_into_versions(doc=item)
            try:
                apply_onstage_rule(item, item.get(config.ID_FIELD))
            except Exception as ex:  # noqa
                logger.exception(
                    "Failed to apply on stage rule while scheduling template.")
            items.append(item)
        return items
    except Exception as e:
        logger.exception("Task: {} failed with error {}.".format(
            lock_name, str(e)))
    finally:
        unlock(lock_name)
    def create(self, docs):
        ids = []
        production = get_resource_service('archive')
        for doc in docs:
            item_type = doc.pop('type')
            item_list = get_items(doc.pop('items', []), item_type)
            desk = get_resource_service('desks').find_one(req=None, _id=doc.pop('desk')) or {}
            article_template = doc.pop('article_template', None)
            if article_template:
                content_template = superdesk.get_resource_service('content_templates').find_one(
                    req=None, _id=article_template) or {}
            else:
                content_template = get_desk_template(desk)

            item = get_item_from_template(content_template)
            item[current_app.config['VERSION']] = 1
            item.setdefault('type', 'text')
            item.setdefault('slugline', 'Planning' if item_type == 'planning' else 'Event')
            item['task'] = {
                'desk': desk.get('_id'),
                'user': get_user_id(),
                'stage': desk.get('working_stage'),
            }
            item_from_template = generate_text_item(item_list, doc.pop('template', None), item_type)
            for key, val in item_from_template.items():
                placeholder = PLACEHOLDER_HTML if '_html' in key else PLACEHOLDER_TEXT
                if item.get(key) and placeholder in item[key]:
                    item[key] = item[key].replace(placeholder, val)
                else:
                    item[key] = val
            ids = production.post([item])
            insert_into_versions(doc=item)
            doc.update(item)
            ids.append(doc['_id'])
        return ids
Example #12
0
    def on_updated(self, updates, original):
        """Runs on update

        Locates the published or corrected packages containing the corrected item
        and corrects them

        :param updates: correction
        :param original: original story
        """
        original_updates = dict()
        original_updates['operation'] = updates['operation']
        original_updates[ITEM_STATE] = updates[ITEM_STATE]
        super().on_updated(updates, original)
        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service('archive_correct')
            processed_packages = []
            for package in packages:
                if package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] and \
                        str(package[config.ID_FIELD]) not in processed_packages:
                    original_updates['groups'] = package['groups']

                    if updates.get('headline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'headline', updates.get('headline'))

                    if updates.get('slugline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'slugline', updates.get('slugline'))

                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
Example #13
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service("ingest_providers").find_one(source="aapmm", req=None)
        if provider and "config" in provider and "username" in provider["config"]:
            self.backend.set_credentials(provider["config"]["username"], provider["config"]["password"])
        for doc in docs:
            if not doc.get("desk"):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")
            try:
                archived_doc = self.backend.find_one_raw(doc["guid"], doc["guid"])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc["_id"] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc["ingest_provider"] = str(provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get("desk"), stage_id=doc.get("stage"))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc["_id"]
            dest_doc[FAMILY_ID] = archived_doc["_id"]
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get("_id"))

        return new_guids
Example #14
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = self.get_provider()
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    _("Destination desk cannot be empty."))
            try:
                archived_doc = self.fetch(doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = fetch_item(archived_doc,
                                  doc.get('desk'),
                                  doc.get('stage'),
                                  state=doc.get('state'))
            new_guids.append(dest_doc['guid'])

            if provider:
                dest_doc['ingest_provider'] = str(
                    provider[superdesk.config.ID_FIELD])

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        if new_guids:
            get_resource_service('search_providers').system_update(
                provider.get(config.ID_FIELD), {'last_item_update': utcnow()},
                provider)

        return new_guids
def create_scheduled_content(now=None):
    lock_name = get_lock_id("Template", "Schedule")
    if not lock(lock_name, expire=130):
        logger.info('Task: {} is already running.'.format(lock_name))
        return

    try:
        if now is None:
            now = utcnow()
        templates = get_scheduled_templates(now)
        production = superdesk.get_resource_service(ARCHIVE)
        items = []
        for template in templates:
            set_template_timestamps(template, now)
            item = get_item_from_template(template)
            item[config.VERSION] = 1
            production.post([item])
            insert_into_versions(doc=item)
            try:
                apply_onstage_rule(item, item.get(config.ID_FIELD))
            except Exception as ex:  # noqa
                logger.exception('Failed to apply on stage rule while scheduling template.')
            items.append(item)
        return items
    except Exception as e:
        logger.exception('Task: {} failed with error {}.'.format(lock_name, str(e)))
    finally:
        unlock(lock_name)
Example #16
0
    def __publish_package_items(self, package, last_updated):
        """
        Publishes items of a package recursively

        :return: True if all the items of a package have been published successfully. False otherwise.
        """

        items = [ref.get('residRef') for group in package.get('groups', [])
                 for ref in group.get('refs', []) if 'residRef' in ref]

        if items:
            for guid in items:
                doc = super().find_one(req=None, _id=guid)
                original = copy(doc)
                try:
                    if doc['type'] == 'composite':
                        self.__publish_package_items(doc)

                    resolve_document_version(document=doc, resource=ARCHIVE, method='PATCH', latest_doc=doc)
                    doc[config.CONTENT_STATE] = 'published'
                    doc[config.LAST_UPDATED] = last_updated
                    doc[config.ETAG] = document_etag(doc)
                    self.backend.update(self.datasource, guid, {config.CONTENT_STATE: doc[config.CONTENT_STATE],
                                                                config.ETAG: doc[config.ETAG],
                                                                config.VERSION: doc[config.VERSION],
                                                                config.LAST_UPDATED: doc[config.LAST_UPDATED]},
                                        original)
                    insert_into_versions(doc=doc)
                except KeyError:
                    raise SuperdeskApiError.badRequestError("A non-existent content id is requested to publish")
Example #17
0
    def remove_refs_in_package(self,
                               package,
                               ref_id_to_remove,
                               processed_packages=None):
        """Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.

        Before removing checks if the package has been processed. If processed the package is skipped.
        In case of takes package, sequence is decremented and last_take field is updated.
        If sequence is zero then the takes package is deleted.

        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [
            ref['guid'] for group in groups for ref in group[REFS]
            if ref.get('type') == CONTENT_TYPE.COMPOSITE
        ]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package,
                                                   ref_id_to_remove)

        new_groups = self.remove_group_ref(package, ref_id_to_remove)

        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        # if takes package then adjust the reference.
        # safe to do this as take can only be in one takes package.
        delete_package = False
        if package.get(PACKAGE_TYPE) == TAKES_PACKAGE:
            new_sequence = package[SEQUENCE] - 1
            if new_sequence == 0:
                # remove the takes package.
                get_resource_service(ARCHIVE).delete_action(
                    {config.ID_FIELD: package[config.ID_FIELD]})
                delete_package = True
            else:
                updates[SEQUENCE] = new_sequence
                last_take_group = next(
                    reference for reference in next(
                        new_group.get(REFS) for new_group in new_groups
                        if new_group[GROUP_ID] == MAIN_GROUP)
                    if reference.get(SEQUENCE) == new_sequence)

                if last_take_group:
                    updates[LAST_TAKE] = last_take_group.get(RESIDREF)

        if not delete_package:
            resolve_document_version(updates, ARCHIVE, 'PATCH', package)
            get_resource_service(ARCHIVE).patch(package[config.ID_FIELD],
                                                updates)
            insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
Example #18
0
    def on_updated(self, updates, original):
        original = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] != ITEM_KILL and \
                original.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
            get_resource_service('archive_broadcast').on_broadcast_master_updated(updates[ITEM_OPERATION], original)

        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
        CropService().update_media_references(updates, original, True)
        superdesk.item_published.send(self, item=original)
        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service('archive_correct')
            processed_packages = []
            for package in packages:
                original_updates = {'operation': updates['operation'], ITEM_STATE: updates[ITEM_STATE]}
                if package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] and \
                        package.get(PACKAGE_TYPE, '') == '' and \
                        str(package[config.ID_FIELD]) not in processed_packages:
                    original_updates['groups'] = package['groups']

                    if updates.get('headline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'headline', updates.get('headline'))

                    if updates.get('slugline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'slugline', updates.get('slugline'))

                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
    def link_as_next_take(self, target, link):
        """
        # check if target has an associated takes package
        # if not, create it and add target as a take
        # check if the target is the last take, if not, resolve the last take
        # copy metadata from the target and add it as the next take
        # return the update link item
        """
        takes_package_id = self.get_take_package_id(target)
        archive_service = get_resource_service(ARCHIVE)
        takes_package = archive_service.find_one(req=None, _id=takes_package_id) if takes_package_id else {}

        if not link.get('_id'):
            self.__copy_metadata__(target, link, takes_package)
            archive_service.post([link])

        if not takes_package_id:
            takes_package_id = self.package_story_as_a_take(target, takes_package, link)
        else:
            self.__link_items__(takes_package, target, link)
            del takes_package['_id']
            resolve_document_version(takes_package, ARCHIVE, 'PATCH', takes_package)
            archive_service.patch(takes_package_id, takes_package)

        insert_into_versions(id_=takes_package_id)

        return link
Example #20
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        set_sign_off(archived_doc, original=original)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        return archived_doc
Example #21
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)

        send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage'))

        if archived_doc[config.CONTENT_STATE] not in ['published', 'scheduled', 'killed']:
            archived_doc[config.CONTENT_STATE] = 'submitted'

        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc['_id']
        archive_service.update(original['_id'], archived_doc, original)

        insert_into_versions(id_=original['_id'])

        return archived_doc
Example #22
0
    def on_updated(self, updates, original):
        original = super().find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] not in {ITEM_KILL, ITEM_TAKEDOWN} and \
                original.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
            get_resource_service('archive_broadcast').on_broadcast_master_updated(updates[ITEM_OPERATION], original)

        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
        CropService().update_media_references(updates, original, True)
        superdesk.item_published.send(self, item=original)
        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service('archive_correct')
            processed_packages = []
            for package in packages:
                original_updates = {'operation': updates['operation'], ITEM_STATE: updates[ITEM_STATE]}
                if package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] and \
                        package.get(PACKAGE_TYPE, '') == '' and \
                        str(package[config.ID_FIELD]) not in processed_packages:
                    original_updates['groups'] = package['groups']

                    if updates.get('headline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'headline', updates.get('headline'))

                    if updates.get('slugline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'slugline', updates.get('slugline'))

                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
Example #23
0
    def spike_item(self, original):
        """If Original item is re-write then it will remove the reference from the broadcast item.

        :param: dict original: original document
        """
        broadcast_items = [
            item
            for item in self.get_broadcast_items_from_master_story(original)
            if item.get(ITEM_STATE) not in PUBLISH_STATES
        ]
        spike_service = get_resource_service('archive_spike')

        for item in broadcast_items:
            id_ = item.get(config.ID_FIELD)
            try:
                self.packageService.remove_spiked_refs_from_package(id_)
                updates = {ITEM_STATE: CONTENT_STATE.SPIKED}
                resolve_document_version(updates, SOURCE, 'PATCH', item)
                spike_service.patch(id_, updates)
                insert_into_versions(id_=id_)
            except Exception:
                logger.exception(
                    message="Failed to spike the related broadcast item {}.".
                    format(id_))

        if original.get('rewrite_of') and original.get(
                ITEM_STATE) not in PUBLISH_STATES:
            self.remove_rewrite_refs(original)
Example #24
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = self.get_provider()
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")
            try:
                archived_doc = self.fetch(doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Example #25
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_moved = request.view_args['guid']

        guid_of_moved_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_moved)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' %
                                                      guid_of_item_to_be_moved)

            current_stage_of_item = archived_doc.get('task', {}).get('stage')
            if current_stage_of_item and str(current_stage_of_item) == str(doc.get('stage')):
                raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

            if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            original = dict(archived_doc)

            send_to(archived_doc, doc.get('desk'), doc.get('stage'))
            archived_doc[config.CONTENT_STATE] = 'submitted'
            resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

            del archived_doc['_id']
            archive_service.update(original['_id'], archived_doc, original)

            insert_into_versions(guid=original['_id'])

            guid_of_moved_items.append(archived_doc['guid'])

        return guid_of_moved_items
Example #26
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service('ingest_providers').find_one(
            source='aapmm', req=None)
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    "Destination desk cannot be empty.")

            archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid'])

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(
                    provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(dest_doc, doc.get('desk'), doc.get('stage'))
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Example #27
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service('ingest_providers').find_one(source='aapmm', req=None)
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")

            archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid'])

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage'))
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Example #28
0
    def package_story_as_a_take(self, target, takes_package, link):
        """
        This function create the takes package using the target item metadata and links the
        target and link together in the takes package as target as take1 and link as take2.
        If the link is not provided then only target is added to the takes package.
        :param dict target: Target item to be added to the takes package.
        :param dict takes_package: takes package.
        :param dict link: item to be linked.
        :return: Takes Package Id
        """
        takes_package[ITEM_TYPE] = CONTENT_TYPE.COMPOSITE
        takes_package[PACKAGE_TYPE] = TAKES_PACKAGE
        fields_for_creating_takes_package = self.fields_for_creating_take.copy()
        fields_for_creating_takes_package.extend(['publish_schedule', 'event_id', 'rewrite_of', 'task',
                                                  EMBARGO])

        for field in fields_for_creating_takes_package:
            takes_package[field] = target.get(field)
        takes_package.setdefault(config.VERSION, 1)

        create_root_group([takes_package])
        self.__link_items__(takes_package, target, link)

        ids = get_resource_service(ARCHIVE).post([takes_package])
        insert_into_versions(id_=ids[0])
        original_target = get_resource_service(ARCHIVE).find_one(req=None, _id=target['_id'])
        target[LINKED_IN_PACKAGES] = original_target[LINKED_IN_PACKAGES]

        return ids[0]
Example #29
0
 def on_created(self, docs):
     push_notification(self.datasource, created=1)
     for doc in docs:
         insert_into_versions(doc['_id'])
         if self.__is_assigned_to_a_desk(doc):
             add_activity(ACTIVITY_CREATE, 'added new task {{ subject }} of type {{ type }}', item=doc,
                          subject=get_subject(doc), type=doc['type'])
Example #30
0
    def remove_refs_in_package(self, package, ref_id_to_remove, processed_packages=None):
        """Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.

        Before removing checks if the package has been processed. If processed the package is skipped.

        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [ref['guid'] for group in groups
                           for ref in group[REFS] if ref.get('type') == CONTENT_TYPE.COMPOSITE]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package, ref_id_to_remove)

        new_groups = self.remove_group_ref(package, ref_id_to_remove)
        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        resolve_document_version(updates, ARCHIVE, 'PATCH', package)
        get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
        app.on_archive_item_updated(updates, package, ITEM_UNLINK)
        insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
Example #31
0
    def remove_refs_in_package(self,
                               package,
                               ref_id_to_remove,
                               processed_packages=None):
        """Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.

        Before removing checks if the package has been processed. If processed the package is skipped.

        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [
            ref['guid'] for group in groups for ref in group[REFS]
            if ref.get('type') == CONTENT_TYPE.COMPOSITE
        ]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package,
                                                   ref_id_to_remove)

        new_groups = self.remove_group_ref(package, ref_id_to_remove)
        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        resolve_document_version(updates, ARCHIVE, 'PATCH', package)
        get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
        app.on_archive_item_updated(updates, package, ITEM_UNLINK)
        insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
Example #32
0
    def create(self, docs, **kwargs):
        target_id = request.view_args['target_id']
        doc = docs[0]
        link_id = doc.get('link_id')
        desk_id = doc.get('desk')
        service = get_resource_service(ARCHIVE)
        target = service.find_one(req=None, _id=target_id)
        self._validate_link(target, target_id)
        link = {}

        if desk_id:
            link = {'task': {'desk': desk_id}}
            user = get_user()
            lookup = {'_id': desk_id, 'members.user': user['_id']}
            desk = get_resource_service('desks').find_one(req=None, **lookup)
            if not desk:
                raise SuperdeskApiError.forbiddenError("No privileges to create new take on requested desk.")

            link['task']['stage'] = desk['working_stage']

        if link_id:
            link = service.find_one(req=None, _id=link_id)

        linked_item = self.packageService.link_as_next_take(target, link)
        insert_into_versions(id_=linked_item[config.ID_FIELD])
        doc.update(linked_item)
        build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return [linked_item['_id']]
Example #33
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id

            desk_id = doc.get('desk')
            stage_id = doc.get('stage')

            ingest_service = get_resource_service('ingest')
            ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError(
                    _('Fail to found ingest item with _id: {id}').format(id=id_of_item_to_be_fetched))

            if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            if doc.get('macro'):  # there is a macro so transform it
                ingest_doc = get_resource_service('macros').execute_macro(
                    ingest_doc,
                    doc.get('macro'),
                    dest_desk_id=desk_id,
                    dest_stage_id=stage_id,
                )

            dest_doc = fetch_item(ingest_doc, desk_id, stage_id, state=doc.get(ITEM_STATE), target=doc.get('target'))

            id_of_fetched_items.append(dest_doc[config.ID_FIELD])
            ingest_service.patch(id_of_item_to_be_fetched, {'archived': dest_doc['versioncreated']})

            dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD]
            dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get('version'))
            dest_doc[INGEST_VERSION] = ingest_doc.get('version')

            self.__fetch_items_in_package(dest_doc, desk_id, stage_id,
                                          doc.get(ITEM_STATE, CONTENT_STATE.FETCHED))

            self.__fetch_associated_items(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED))

            desk = get_resource_service('desks').find_one(req=None, _id=desk_id)
            if desk and desk.get('default_content_profile'):
                dest_doc.setdefault('profile', desk['default_content_profile'])

            if dest_doc.get('type', 'text') in MEDIA_TYPES:
                dest_doc['profile'] = None

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc)
            doc.update(dest_doc)

        if kwargs.get('notify', True):
            ingest_doc.update({'task': dest_doc.get('task')})
            push_item_move_notification(ingest_doc, doc, 'item:fetch')

        return id_of_fetched_items
Example #34
0
    def _publish_package_items(self, package, updates):
        """
        Publishes all items of a package recursively then publishes the package itself
        :param package: package to publish
        :param updates: payload
        """
        items = self.package_service.get_residrefs(package)

        if len(items) == 0 and self.publish_type == ITEM_PUBLISH:
            raise SuperdeskApiError.badRequestError("Empty package cannot be published!")

        removed_items = []
        if self.publish_type == ITEM_CORRECT:
            removed_items, added_items = self._get_changed_items(items, updates)
            if len(removed_items) == len(items) and len(added_items) == 0:
                raise SuperdeskApiError.badRequestError("Corrected package cannot be empty!")
            items.extend(added_items)

        subscriber_items = {}
        if items:
            archive_publish = get_resource_service('archive_publish')
            for guid in items:
                package_item = super().find_one(req=None, _id=guid)

                if not package_item:
                    raise SuperdeskApiError.badRequestError(
                        "Package item with id: {} does not exist.".format(guid))

                if package_item[ITEM_STATE] not in PUBLISH_STATES:
                    # if the item is not published then publish it

                    if package_item[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                        # if the item is a package do recursion to publish
                        sub_updates = {i: updates[i] for i in ['state', 'operation'] if i in updates}
                        sub_updates['groups'] = list(package_item['groups'])
                        self._publish_package_items(package_item, sub_updates)
                        self._update_archive(original=package_item, updates=sub_updates,
                                             should_insert_into_versions=False)
                        self.update_published_collection(published_item_id=package_item[config.ID_FIELD])
                    else:
                        # publish the item
                        archive_publish.patch(id=package_item.pop(config.ID_FIELD), updates=package_item)

                    insert_into_versions(id_=guid)
                    package_item = super().find_one(req=None, _id=guid)

                subscribers = self._get_subscribers_for_package_item(package_item)
                self.package_service.update_field_in_package(updates, package_item[config.ID_FIELD],
                                                             config.VERSION, package_item[config.VERSION])

                if package_item[config.ID_FIELD] in removed_items:
                    digital_item_id = None
                else:
                    digital_item_id = self._get_digital_id_for_package_item(package_item)

                self._extend_subscriber_items(subscriber_items, subscribers, package_item, digital_item_id)

            self.publish_package(package, updates, target_subscribers=subscriber_items)
            return subscribers
Example #35
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get('_id') if id is None else id

            desk_id = doc.get('desk')
            stage_id = doc.get('stage')

            ingest_service = get_resource_service('ingest')
            ingest_doc = ingest_service.find_one(req=None,
                                                 _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError(
                    'Fail to found ingest item with _id: %s' %
                    id_of_item_to_be_fetched)

            if not is_workflow_state_transition_valid(
                    'fetch_from_ingest', ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            if doc.get('macro'):  # there is a macro so transform it
                ingest_doc = get_resource_service('macros').execute_macro(
                    ingest_doc, doc.get('macro'))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched,
                                 {'archived': archived})

            dest_doc = dict(ingest_doc)
            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc['_id'] = new_id
            dest_doc['guid'] = new_id
            dest_doc['destination_groups'] = doc.get('destination_groups')
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            send_to(dest_doc, desk_id, stage_id)
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id']

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(dest_doc, desk_id, stage_id,
                                          doc.get('state', STATE_FETCHED),
                                          doc.get('destination_groups'))

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            doc.update(dest_doc)

        if kwargs.get('notify', True):
            push_notification('item:fetch', fetched=1)

        return id_of_fetched_items
Example #36
0
    def delete(self, lookup):
        """
        Overriding to handle with Kill workflow in the Archived repo:
            1. Check if Article has an associated Digital Story and if Digital Story has more Takes.
               If both Digital Story and more Takes exists then all of them would be killed along with the one requested
            2. For each article being killed do the following:
                i.   Apply the Kill Template and create an entry in archive, archive_versions and published collections.
                ii.  Query the Publish Queue in Legal Archive and find the subscribers who received the article
                     previously and create transmission entries in Publish Queue.
                iii. Change the state of the article to Killed in Legal Archive.
                iv.  Delete all the published versions from Archived.
                v.   Send a broadcast email to all subscribers.
        :param lookup: query to find the article in archived repo
        :type lookup: dict
        """

        if app.testing and len(lookup) == 0:
            super().delete(lookup)
            return

        # Step 1
        articles_to_kill = self._find_articles_to_kill(lookup)
        articles_to_kill.sort(key=itemgetter(ITEM_TYPE), reverse=True)  # Needed because package has to be inserted last
        kill_service = KillPublishService()

        for article in articles_to_kill:
            # Step 2(i)
            to_apply_template = {'template_name': 'kill', 'item': article}
            get_resource_service('content_templates_apply').post([to_apply_template])
            article = to_apply_template['item']
            self._remove_and_set_kill_properties(article, articles_to_kill)

            # Step 2(ii)
            transmission_details = list(
                get_resource_service(LEGAL_PUBLISH_QUEUE_NAME).get(req=None,
                                                                   lookup={'item_id': article[config.ID_FIELD]}))

            if transmission_details:
                subscriber_ids = [t['_subscriber_id'] for t in transmission_details]
                query = {'$and': [{config.ID_FIELD: {'$in': subscriber_ids}}]}
                subscribers = list(get_resource_service('subscribers').get(req=None, lookup=query))

                kill_service.queue_transmission(article, subscribers)

            # Step 2(iii)
            import_into_legal_archive.apply_async(kwargs={'doc': article})

            # Step 2(iv)
            super().delete({'item_id': article[config.ID_FIELD]})

            # Step 2(i) - Creating entries in published collection
            docs = [article]
            get_resource_service(ARCHIVE).post(docs)
            insert_into_versions(doc=article)
            get_resource_service('published').post(docs)

            # Step 2(v)
            kill_service.broadcast_kill_email(article)
Example #37
0
    def remove_refs_in_package(self, package, ref_id_to_remove, processed_packages=None):
        """
        Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.
        Before removing checks if the package has been processed. If processed the package is skipped.
        In case of takes package, sequence is decremented and last_take field is updated.
        If sequence is zero then the takes package is deleted.
        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [ref['guid'] for group in groups
                           for ref in group[ASSOCIATIONS] if ref.get('type') == CONTENT_TYPE.COMPOSITE]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package, ref_id_to_remove)

        new_groups = [{GROUP_ID: group[GROUP_ID], ROLE: group.get(ROLE),
                       ASSOCIATIONS: [ref for ref in group[ASSOCIATIONS] if ref.get('guid') != ref_id_to_remove]}
                      for group in groups]
        new_root_refs = [{ID_REF: group[GROUP_ID]} for group in new_groups if group[GROUP_ID] != ROOT_GROUP]

        for group in new_groups:
            if group[GROUP_ID] == ROOT_GROUP:
                group[ASSOCIATIONS] = new_root_refs
                break

        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        # if takes package then adjust the reference.
        # safe to do this as take can only be in one takes package.
        delete_package = False
        if package.get(PACKAGE_TYPE) == TAKES_PACKAGE:
            new_sequence = package[SEQUENCE] - 1
            if new_sequence == 0:
                # remove the takes package.
                get_resource_service(ARCHIVE).delete_action({config.ID_FIELD: package[config.ID_FIELD]})
                delete_package = True
            else:
                updates[SEQUENCE] = new_sequence
                last_take_group = next(reference for reference in
                                       next(new_group.get(ASSOCIATIONS) for new_group in new_groups if
                                            new_group[GROUP_ID] == MAIN_GROUP)
                                       if reference.get(SEQUENCE) == new_sequence)

                if last_take_group:
                    updates[LAST_TAKE] = last_take_group.get(ITEM_REF)

        if not delete_package:
            resolve_document_version(updates, ARCHIVE, 'PATCH', package)
            get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
            insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
Example #38
0
    def test_remove_takes_package(self):
        """
        Tests the behavior of remove_expired() when just takes package expires
        """
        def expire(published_takes_pkg):
            published_service.update(
                published_takes_pkg[config.ID_FIELD],
                {'expiry': utcnow() + timedelta(minutes=-60)},
                published_takes_pkg)

            RemoveExpiredPublishContent().run()

            if published_takes_pkg[ITEM_STATE] == CONTENT_STATE.PUBLISHED:
                self.assertEqual(published_takes_pkg[ITEM_OPERATION],
                                 'publish')
            elif published_takes_pkg[ITEM_STATE] == CONTENT_STATE.KILLED:
                self.assertEqual(published_takes_pkg[ITEM_OPERATION], 'kill')

        doc = self.articles[0].copy()
        self._create_and_insert_into_versions(doc, False)

        published_version_number = doc[config.VERSION] + 1
        get_resource_service(ARCHIVE_PUBLISH).patch(
            id=doc[config.ID_FIELD],
            updates={
                ITEM_STATE: CONTENT_STATE.PUBLISHED,
                config.VERSION: published_version_number
            })
        insert_into_versions(id_=doc[config.ID_FIELD])

        published_version_number += 1
        get_resource_service(ARCHIVE_KILL).patch(id=doc[config.ID_FIELD],
                                                 updates={
                                                     ITEM_STATE:
                                                     CONTENT_STATE.KILLED,
                                                     config.VERSION:
                                                     published_version_number
                                                 })
        insert_into_versions(id_=doc[config.ID_FIELD])

        published_service = get_resource_service(PUBLISHED)
        items_in_published_repo = list(
            published_service.get_from_mongo(req=None, lookup=None))
        self.assertEqual(len(items_in_published_repo), 4)

        # Expiring the Takes Package whose state is Published
        published_takes_pkg = [
            g for g in items_in_published_repo
            if is_takes_package(g) and g[ITEM_STATE] == CONTENT_STATE.PUBLISHED
        ]
        expire(published_takes_pkg[0])

        # Expiring the Takes Package whose state is Killed
        published_takes_pkg = [
            g for g in items_in_published_repo
            if is_takes_package(g) and g[ITEM_STATE] == CONTENT_STATE.KILLED
        ]
        expire(published_takes_pkg[0])
Example #39
0
 def on_created(self, docs):
     push_notification(self.datasource, created=1)
     push_notification('task:new')
     for doc in docs:
         insert_into_versions(doc['_id'])
         if is_assigned_to_a_desk(doc):
             add_activity(ACTIVITY_CREATE, 'added new task {{ subject }} of type {{ type }}',
                          self.datasource, item=doc,
                          subject=get_subject(doc), type=doc[ITEM_TYPE])
Example #40
0
    def test_remove_published_and_killed_content_separately(self):
        doc = self.articles[0]
        original = doc.copy()

        updates = {'targeted_for': [{'name': 'New South Wales', 'allow': True}]}
        get_resource_service(ARCHIVE).patch(id=original[config.ID_FIELD], updates=updates)

        original.update(updates)
        self._create_and_insert_into_versions(original, False)

        published_version_number = original[config.VERSION] + 1
        get_resource_service(ARCHIVE_PUBLISH).patch(id=doc[config.ID_FIELD],
                                                    updates={ITEM_STATE: CONTENT_STATE.PUBLISHED,
                                                             config.VERSION: published_version_number})

        published_service = get_resource_service(PUBLISHED)
        published_items = published_service.get(req=None, lookup=None)
        self.assertEqual(1, published_items.count())

        article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        self.assertIsNotNone(article_in_production)
        self.assertEqual(article_in_production[ITEM_STATE], CONTENT_STATE.PUBLISHED)
        self.assertEqual(article_in_production[config.VERSION], published_version_number)
        insert_into_versions(doc=article_in_production)

        # Setting the expiry date of the published article to 1 hr back from now
        published_service.update_published_items(
            original[config.ID_FIELD], 'expiry', utcnow() + timedelta(minutes=-60))

        # Killing the published article and inserting into archive_versions as unittests use service directly
        published_version_number += 1
        get_resource_service(ARCHIVE_KILL).patch(id=doc[config.ID_FIELD],
                                                 updates={ITEM_STATE: CONTENT_STATE.KILLED,
                                                          config.VERSION: published_version_number})

        # Executing the Expiry Job for the Published Article and asserting the collections
        RemoveExpiredPublishContent().run()

        published_items = published_service.get(req=None, lookup=None)
        self.assertEqual(1, published_items.count())

        article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        self.assertIsNotNone(article_in_production)
        self.assertEqual(article_in_production[ITEM_STATE], CONTENT_STATE.KILLED)
        self.assertEqual(article_in_production[config.VERSION], published_version_number)
        insert_into_versions(doc=article_in_production)

        # Setting the expiry date of the killed article to 1 hr back from now and running the job again
        published_service.update_published_items(
            original[config.ID_FIELD], 'expiry', utcnow() + timedelta(minutes=-60))
        RemoveExpiredPublishContent().run()

        published_items = published_service.get_other_published_items(str(original[config.ID_FIELD]))
        self.assertEqual(0, published_items.count())

        article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        self.assertIsNone(article_in_production)
Example #41
0
def enqueue_item(published_item):
    """
    Creates the corresponding entries in the publish queue for the given item
    """
    published_item_id = ObjectId(published_item[config.ID_FIELD])
    published_service = get_resource_service(PUBLISHED)
    archive_service = get_resource_service(ARCHIVE)
    published_update = {QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS, 'last_queue_event': utcnow()}
    try:
        logger.info('Queueing item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))

        published_item = published_service.find_one(req=None, _id=published_item_id)
        if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
            logger.info('Queue State is not pending for published item {}. It is in {}'.
                        format(published_item_id, published_item.get(QUEUE_STATE)))
            return

        if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            # if scheduled then change the state to published
            # change the `version` and `versioncreated` for the item
            # in archive collection and published collection.
            versioncreated = utcnow()
            item_updates = {'versioncreated': versioncreated, ITEM_STATE: CONTENT_STATE.PUBLISHED}
            resolve_document_version(document=item_updates, resource=ARCHIVE,
                                     method='PATCH',
                                     latest_doc={config.VERSION: published_item[config.VERSION]})

            # update the archive collection
            archive_item = archive_service.find_one(req=None, _id=published_item['item_id'])
            archive_service.system_update(published_item['item_id'], item_updates, archive_item)
            # insert into version.
            insert_into_versions(published_item['item_id'], doc=None)
            # import to legal archive
            import_into_legal_archive.apply_async(countdown=3, kwargs={'item_id': published_item['item_id']})
            logger.info('Modified the version of scheduled item: {}'.format(published_item_id))

            logger.info('Publishing scheduled item_id: {}'.format(published_item_id))
            # update the published collection
            published_update.update(item_updates)
            published_item.update({'versioncreated': versioncreated,
                                   ITEM_STATE: CONTENT_STATE.PUBLISHED,
                                   config.VERSION: item_updates[config.VERSION]})

        published_service.patch(published_item_id, published_update)
        queued = get_enqueue_service(published_item[ITEM_OPERATION]).enqueue_item(published_item)
        # if the item is queued in the publish_queue then the state is "queued"
        # else the queue state is "queued_not_transmitted"
        queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
        published_service.patch(published_item_id, {QUEUE_STATE: queue_state})
        logger.info('Queued item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))
    except KeyError:
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.PENDING})
        logger.exception('No enqueue service found for operation %s', published_item[ITEM_OPERATION])
    except:
        published_service.patch(published_item_id, {QUEUE_STATE: PUBLISH_STATE.PENDING})
        raise
Example #42
0
    def _publish_package_items(self, package, updates):
        """
        Publishes all items of a package recursively then publishes the package itself
        :param package: package to publish
        :param updates: payload
        """
        items = self.package_service.get_residrefs(package)

        if len(items) == 0 and self.publish_type == ITEM_PUBLISH:
            raise SuperdeskApiError.badRequestError("Empty package cannot be published!")

        removed_items = []
        if self.publish_type in [ITEM_CORRECT, ITEM_KILL]:
            removed_items, added_items = self._get_changed_items(items, updates)
            # we raise error if correction is done on a empty package. Kill is fine.
            if len(removed_items) == len(items) and len(added_items) == 0 and self.publish_type == ITEM_CORRECT:
                raise SuperdeskApiError.badRequestError("Corrected package cannot be empty!")
            items.extend(added_items)

        if items:
            archive_publish = get_resource_service('archive_publish')
            for guid in items:
                package_item = super().find_one(req=None, _id=guid)

                if not package_item:
                    raise SuperdeskApiError.badRequestError(
                        "Package item with id: {} does not exist.".format(guid))

                if package_item[ITEM_STATE] not in PUBLISH_STATES:  # if the item is not published then publish it
                    if package_item[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                        # if the item is a package do recursion to publish
                        sub_updates = {i: updates[i] for i in ['state', 'operation'] if i in updates}
                        sub_updates['groups'] = list(package_item['groups'])
                        self._publish_package_items(package_item, sub_updates)
                        self._update_archive(original=package_item, updates=sub_updates,
                                             should_insert_into_versions=False)
                    else:
                        # publish the item
                        package_item[PUBLISHED_IN_PACKAGE] = package[config.ID_FIELD]
                        archive_publish.patch(id=package_item.pop(config.ID_FIELD), updates=package_item)

                    insert_into_versions(id_=guid)

                elif guid in removed_items:
                    # remove the package information from the package item.
                    linked_in_packages = [linked for linked in package_item.get(LINKED_IN_PACKAGES)
                                          if linked.get(PACKAGE) != package.get(config.ID_FIELD)]
                    super().system_update(guid, {LINKED_IN_PACKAGES: linked_in_packages}, package_item)

                package_item = super().find_one(req=None, _id=guid)
                self.package_service.update_field_in_package(updates, package_item[config.ID_FIELD],
                                                             config.VERSION, package_item[config.VERSION])

        updated = deepcopy(package)
        updated.update(updates)
        self.update_published_collection(published_item_id=package[config.ID_FIELD], updated=updated)
    def link_as_next_take(self, target, link):
        """Makes next take to target from given link.

        Check if target has an associated takes package. If not, create it and add target as a take.
        Check if the target is the last take, if not, resolve the last take. Copy metadata from the target and add it
        as the next take and return the update link item

        :return: the updated link item
        """

        takes_package_id = self.get_take_package_id(target)
        archive_service = get_resource_service(ARCHIVE)
        takes_package = archive_service.find_one(req=None, _id=takes_package_id) if takes_package_id else {}

        if not takes_package:
            # setting the sequence to 1 for target.
            updates = {SEQUENCE: 1}
            if target[ITEM_STATE] in [CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED,
                                      CONTENT_STATE.SCHEDULED, CONTENT_STATE.INGESTED]:
                raise SuperdeskApiError.forbiddenError("Item isn't in a valid state for creating takes.")
            else:
                archive_service.system_update(target.get(config.ID_FIELD), updates, target)

        link_updates = {}

        if not link.get(config.ID_FIELD):
            # A new story to be linked
            self.__copy_metadata__(target, link, takes_package, set_state=True)
            archive_service.post([link])
        else:
            self.__copy_metadata__(target, link_updates, takes_package, set_state=False)

        link.update(link_updates)

        if not takes_package_id:
            takes_package_id = self.package_story_as_a_take(target, takes_package, link)
        else:
            self.__link_items__(takes_package, target, link)
            del takes_package[config.ID_FIELD]
            takes_package.pop('unique_id', None)
            takes_package.pop('unique_name', None)
            takes_package.pop(PUBLISH_SCHEDULE, None)
            takes_package.pop(SCHEDULE_SETTINGS, None)

            resolve_document_version(takes_package, ARCHIVE, 'PATCH', takes_package)
            archive_service.patch(takes_package_id, takes_package)
            get_resource_service('archive_broadcast').on_broadcast_master_updated(ITEM_CREATE, target,
                                                                                  takes_package_id=takes_package_id)

        if link.get(SEQUENCE):
            link_updates.update({SEQUENCE: link[SEQUENCE]})
            archive_service.system_update(link[config.ID_FIELD], link_updates, link)

        insert_into_versions(id_=takes_package_id)
        return link
Example #44
0
    def link_as_next_take(self, target, link):
        """
        Check if target has an associated takes package. If not, create it and add target as a take.
        Check if the target is the last take, if not, resolve the last take. Copy metadata from the target and add it
        as the next take and return the update link item

        :return: the updated link item
        """

        takes_package_id = self.get_take_package_id(target)
        archive_service = get_resource_service(ARCHIVE)
        takes_package = archive_service.find_one(
            req=None, _id=takes_package_id) if takes_package_id else {}

        if not takes_package:
            # setting the sequence to 1 for target.
            updates = {SEQUENCE: 1}
            if target[ITEM_STATE] in [
                    CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED,
                    CONTENT_STATE.SCHEDULED, CONTENT_STATE.INGESTED
            ]:
                raise SuperdeskApiError.forbiddenError(
                    "Item isn't in a valid state for creating takes.")
            else:
                archive_service.system_update(target.get(config.ID_FIELD),
                                              updates, target)

        if not link.get(config.ID_FIELD):
            self.__copy_metadata__(target, link, takes_package)
            archive_service.post([link])

        if not takes_package_id:
            takes_package_id = self.package_story_as_a_take(
                target, takes_package, link)
        else:
            self.__link_items__(takes_package, target, link)
            del takes_package[config.ID_FIELD]
            takes_package.pop('unique_id', None)
            takes_package.pop('unique_name', None)
            takes_package.pop(PUBLISH_SCHEDULE, None)
            takes_package.pop(SCHEDULE_SETTINGS, None)

            resolve_document_version(takes_package, ARCHIVE, 'PATCH',
                                     takes_package)
            archive_service.patch(takes_package_id, takes_package)
            get_resource_service(
                'archive_broadcast').on_broadcast_master_updated(
                    ITEM_CREATE, target, takes_package_id=takes_package_id)

        if link.get(SEQUENCE):
            archive_service.system_update(link[config.ID_FIELD],
                                          {SEQUENCE: link[SEQUENCE]}, link)

        insert_into_versions(id_=takes_package_id)
        return link
Example #45
0
    def create(self, docs):
        service = get_resource_service(SOURCE)
        item_id = request.view_args["item_id"]
        item = service.find_one(req=None, _id=item_id)
        doc = docs[0]

        self._valid_broadcast_item(item)

        desk_id = doc.get("desk")
        desk = None

        if desk_id:
            desk = get_resource_service("desks").find_one(req=None,
                                                          _id=desk_id)

        doc.pop("desk", None)
        doc["task"] = {}
        if desk:
            doc["task"]["desk"] = desk.get(config.ID_FIELD)
            doc["task"]["stage"] = desk.get("working_stage")

        doc["task"]["user"] = get_user().get("_id")
        genre_list = get_resource_service("vocabularies").find_one(
            req=None, _id="genre") or {}
        broadcast_genre = [
            {
                "qcode": genre.get("qcode"),
                "name": genre.get("name")
            } for genre in genre_list.get("items", [])
            if genre.get("qcode") == BROADCAST_GENRE and genre.get("is_active")
        ]

        if not broadcast_genre:
            raise SuperdeskApiError.badRequestError(
                message=_("Cannot find the {genre} genre.").format(
                    genre=BROADCAST_GENRE))

        doc["broadcast"] = {
            "status": "",
            "master_id": item_id,
            "rewrite_id": item.get("rewritten_by")
        }

        doc["genre"] = broadcast_genre
        doc["family_id"] = item.get("family_id")

        for key in FIELDS_TO_COPY:
            doc[key] = item.get(key)

        resolve_document_version(document=doc, resource=SOURCE, method="POST")
        service.post(docs)
        insert_into_versions(id_=doc[config.ID_FIELD])
        build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return [doc[config.ID_FIELD]]
Example #46
0
    def create(self, docs, **kwargs):
        search_provider = get_resource_service('search_providers').find_one(
            search_provider=PROVIDER_NAME, req=None)

        if not search_provider or search_provider.get('is_closed', False):
            raise SuperdeskApiError.badRequestError(
                'No search provider found or the search provider is closed.')

        if 'config' in search_provider:
            self.backend.set_credentials(search_provider['config'])

        new_guids = []
        for doc in docs:
            if not doc.get(
                    'desk'):  # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    "Destination desk cannot be empty.")

            try:
                archived_doc = self.backend.find_one_raw(
                    doc['guid'], doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, search_provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc[config.ID_FIELD] = new_id
            generate_unique_id_and_name(dest_doc)

            if search_provider:
                dest_doc['ingest_provider'] = str(
                    search_provider[config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc,
                    update=None,
                    desk_id=doc.get('desk'),
                    stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc[config.ID_FIELD]
            dest_doc[FAMILY_ID] = archived_doc[config.ID_FIELD]
            dest_doc[ITEM_OPERATION] = ITEM_FETCH
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc[config.ID_FIELD])

            get_resource_service('search_providers').system_update(
                search_provider[config.ID_FIELD],
                {'last_item_update': utcnow()}, search_provider)

        return new_guids
Example #47
0
    def create(self, docs):
        service = get_resource_service(SOURCE)
        item_id = request.view_args['item_id']
        item = service.find_one(req=None, _id=item_id)
        doc = docs[0]

        self._valid_broadcast_item(item)

        desk_id = doc.get('desk')
        desk = None

        if desk_id:
            desk = get_resource_service('desks').find_one(req=None,
                                                          _id=desk_id)

        doc.pop('desk', None)
        doc['task'] = {}
        if desk:
            doc['task']['desk'] = desk.get(config.ID_FIELD)
            doc['task']['stage'] = desk.get('working_stage')

        doc['task']['user'] = get_user().get('_id')
        genre_list = get_resource_service('vocabularies').find_one(
            req=None, _id='genre') or {}
        broadcast_genre = [
            {
                'qcode': genre.get('qcode'),
                'name': genre.get('name')
            } for genre in genre_list.get('items', [])
            if genre.get('qcode') == BROADCAST_GENRE and genre.get('is_active')
        ]

        if not broadcast_genre:
            raise SuperdeskApiError.badRequestError(
                message=_("Cannot find the {genre} genre.").format(
                    genre=BROADCAST_GENRE))

        doc['broadcast'] = {
            'status': '',
            'master_id': item_id,
            'rewrite_id': item.get('rewritten_by')
        }

        doc['genre'] = broadcast_genre
        doc['family_id'] = item.get('family_id')

        for key in FIELDS_TO_COPY:
            doc[key] = item.get(key)

        resolve_document_version(document=doc, resource=SOURCE, method='POST')
        service.post(docs)
        insert_into_versions(id_=doc[config.ID_FIELD])
        build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return [doc[config.ID_FIELD]]
Example #48
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get('_id') if id is None else id

            desk_id = doc.get('desk')
            stage_id = doc.get('stage')

            ingest_service = get_resource_service('ingest')
            ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError('Fail to found ingest item with _id: %s' %
                                                      id_of_item_to_be_fetched)

            if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            if doc.get('macro'):  # there is a macro so transform it
                ingest_doc = get_resource_service('macros').execute_macro(ingest_doc, doc.get('macro'))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived})

            dest_doc = dict(ingest_doc)
            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc['_id'] = new_id
            dest_doc['guid'] = new_id
            dest_doc['destination_groups'] = doc.get('destination_groups')
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            send_to(dest_doc, desk_id, stage_id)
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id']

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(dest_doc, desk_id, stage_id,
                                          doc.get('state', STATE_FETCHED),
                                          doc.get('destination_groups'))

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            doc.update(dest_doc)

        if kwargs.get('notify', True):
            push_notification('item:fetch', fetched=1)

        return id_of_fetched_items
Example #49
0
    def on_updated(self, updates, original):
        original = super().find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] not in {ITEM_KILL, ITEM_TAKEDOWN
                                           } and original.get(ITEM_TYPE) in [
                                               CONTENT_TYPE.TEXT,
                                               CONTENT_TYPE.PREFORMATTED,
                                           ]:
            get_resource_service(
                "archive_broadcast").on_broadcast_master_updated(
                    updates[ITEM_OPERATION], original)

        get_resource_service("archive_broadcast").reset_broadcast_status(
            updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
        CropService().update_media_references(updates, original, True)

        # Do not send item if it is scheduled, on real publishing send item to internal destination
        if not updates.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            signals.item_published.send(self, item=original)

        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service("archive_correct")
            processed_packages = []
            for package in packages:
                original_updates = {
                    "operation": updates["operation"],
                    ITEM_STATE: updates[ITEM_STATE]
                }
                if (package[ITEM_STATE]
                        in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED]
                        and package.get(PACKAGE_TYPE, "") == ""
                        and str(package[config.ID_FIELD])
                        not in processed_packages):
                    original_updates["groups"] = package["groups"]

                    if updates.get("headline"):
                        self.package_service.update_field_in_package(
                            original_updates, original[config.ID_FIELD],
                            "headline", updates.get("headline"))

                    if updates.get("slugline"):
                        self.package_service.update_field_in_package(
                            original_updates, original[config.ID_FIELD],
                            "slugline", updates.get("slugline"))

                    archive_correct.patch(id=package[config.ID_FIELD],
                                          updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
Example #50
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError(
                'Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(
                doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(
                message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk',
                                                  archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc,
                desk_id=doc.get('task', {}).get('desk'),
                stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {
                CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED
        }:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Example #51
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get("_id") if id is None else id

            desk_id = doc.get("desk")
            stage_id = doc.get("stage")

            ingest_service = get_resource_service("ingest")
            ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError(
                    "Fail to found ingest item with _id: %s" % id_of_item_to_be_fetched
                )

            if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            if doc.get("macro"):  # there is a macro so transform it
                ingest_doc = get_resource_service("macros").execute_macro(ingest_doc, doc.get("macro"))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched, {"archived": archived})

            dest_doc = dict(ingest_doc)
            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc["_id"] = new_id
            dest_doc["guid"] = new_id
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id)
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc["_id"]
            dest_doc[ITEM_OPERATION] = ITEM_FETCH

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED))

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            doc.update(dest_doc)

        if kwargs.get("notify", True):
            push_notification("item:fetch", fetched=1)

        return id_of_fetched_items
Example #52
0
    def create(self, docs):
        ids = []
        production = get_resource_service('archive')
        assignments_service = get_resource_service('assignments')
        for doc in docs:
            assignment = assignments_service.find_one(req=None, _id=doc.pop('assignment_id'))
            item = get_item_from_assignment(assignment, doc.pop('template_name', None))
            item[config.VERSION] = 1
            item.setdefault('type', 'text')
            item['assignment_id'] = assignment[config.ID_FIELD]

            # create content
            ids = production.post([item])
            insert_into_versions(doc=item)

            # create delivery references
            get_resource_service('delivery').post([{
                'item_id': item[config.ID_FIELD],
                'assignment_id': assignment[config.ID_FIELD],
                'planning_id': assignment['planning_item'],
                'coverage_id': assignment['coverage_item']
            }])

            updates = {'assigned_to': deepcopy(assignment.get('assigned_to'))}
            updates['assigned_to']['user'] = str(item.get('task').get('user'))
            updates['assigned_to']['desk'] = str(item.get('task').get('desk'))
            updates['assigned_to']['state'] = ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS
            updates['assigned_to']['assignor_user'] = str(item.get('task').get('user'))
            updates['assigned_to']['assigned_date_user'] = utcnow()

            # set the assignment to in progress
            assignments_service.patch(assignment[config.ID_FIELD], updates)
            doc.update(item)
            ids.append(doc['_id'])

            # Send notification that the work has commenced
            # Determine the display name of the assignee
            assigned_to_user = superdesk.get_resource_service('users').find_one(req=None,
                                                                                _id=str(item.get('task').get('user')))
            assignee = assigned_to_user.get('display_name') if assigned_to_user else 'Unknown'
            PlanningNotifications().notify_assignment(target_desk=item.get('task').get('desk'),
                                                      target_user=str(item.get('task').get('user')),
                                                      message='assignment_commenced_msg',
                                                      assignee=assignee,
                                                      coverage_type=get_coverage_type_name(item.get('type', '')),
                                                      slugline=item.get('slugline'),
                                                      omit_user=True)
            # Save history
            get_resource_service('assignments_history').on_item_start_working(updates, assignment)
            # publishing planning item
            assignments_service.publish_planning(assignment['planning_item'])
        return ids
Example #53
0
    def create(self, docs, **kwargs):
        doc = docs[0] if len(docs) > 0 else {}
        original_id = request.view_args["original_id"]
        update_document = doc.get("update")

        archive_service = get_resource_service(ARCHIVE)
        original = archive_service.find_one(req=None, _id=original_id)
        self._validate_rewrite(original, update_document)

        rewrite = self._create_rewrite_article(original,
                                               existing_item=update_document,
                                               desk_id=doc.get("desk_id"))

        # sync editor state
        copy_fields(original, rewrite, ignore_empty=True)

        if update_document:
            # copy editor state from existing item to preserve those
            copy_fields(update_document, rewrite, ignore_empty=True)

        if rewrite.get("fields_meta"):
            generate_fields(rewrite, force=True)

        update_associations(rewrite)

        # signal
        item_rewrite.send(self, item=rewrite, original=original)

        if update_document:
            # process the existing story
            archive_service.patch(update_document[config.ID_FIELD], rewrite)
            app.on_archive_item_updated(rewrite, update_document, ITEM_LINK)
            rewrite[config.ID_FIELD] = update_document[config.ID_FIELD]
            ids = [update_document[config.ID_FIELD]]
        else:
            # Set the version.
            resolve_document_version(rewrite, ARCHIVE, "POST")
            ids = archive_service.post([rewrite])
            insert_into_versions(doc=rewrite)
            build_custom_hateoas(CUSTOM_HATEOAS, rewrite)

            app.on_archive_item_updated(
                {"rewrite_of": rewrite.get("rewrite_of")}, rewrite, ITEM_LINK)

        self._add_rewritten_flag(original, rewrite)
        get_resource_service("archive_broadcast").on_broadcast_master_updated(
            ITEM_CREATE, item=original, rewrite_id=ids[0])

        doc.clear()
        doc.update(rewrite)
        return ids
    def create(self, docs, **kwargs):
        for doc in docs:
            ingest_doc = superdesk.get_resource_service("ingest").find_one(req=None, _id=doc.get("guid"))
            if not ingest_doc:
                msg = "Fail to found ingest item with guid: %s" % doc.get("guid")
                raise SuperdeskApiError.notFoundError(msg)

            if not is_workflow_state_transition_valid("fetch_as_from_ingest", ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            archived = utcnow()
            superdesk.get_resource_service("ingest").patch(ingest_doc.get("_id"), {"archived": archived})
            doc["archived"] = archived

            archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get("guid"))
            if not archived_doc:
                dest_doc = dict(ingest_doc)
                dest_doc[config.VERSION] = 1
                send_to(dest_doc, doc.get("desk"))
                dest_doc[config.CONTENT_STATE] = STATE_FETCHED
                remove_unwanted(dest_doc)
                for ref in [
                    ref for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref
                ]:
                    ref["location"] = ARCHIVE
                    ref["guid"] = ref["residRef"]

                set_original_creator(dest_doc)
                if doc.get(PACKAGE):
                    links = dest_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    dest_doc[LINKED_IN_PACKAGES] = links
                superdesk.get_resource_service(ARCHIVE).post([dest_doc])
                insert_into_versions(dest_doc.get("guid"))
                desk = doc.get("desk")
                refs = [
                    {"guid": ref.get("residRef"), "desk": desk, PACKAGE: dest_doc.get("_id")}
                    for group in dest_doc.get("groups", [])
                    for ref in group.get("refs", [])
                    if "residRef" in ref
                ]
                if refs:
                    self.create(refs)
            else:
                if doc.get(PACKAGE):
                    links = archived_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get("_id"), {LINKED_IN_PACKAGES: links})

        return [doc.get("guid") for doc in docs]
Example #55
0
 def on_created(self, docs):
     push_notification(self.datasource, created=1)
     push_notification("task:new")
     for doc in docs:
         insert_into_versions(doc["_id"])
         if is_assigned_to_a_desk(doc):
             add_activity(
                 ACTIVITY_CREATE,
                 "added new task {{ subject }} of type {{ type }}",
                 self.datasource,
                 item=doc,
                 subject=get_subject(doc),
                 type=doc[ITEM_TYPE],
             )
Example #56
0
    def remove_refs_in_package(self, package, ref_id_to_remove, processed_packages=None):
        """
        Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.
        Before removing checks if the package has been processed. If processed the package is skipped.
        :return: package[config.ID_FIELD]
        """
        groups = package['groups']

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [ref['guid'] for group in groups for ref in group['refs'] if ref.get('type') == 'composite']
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package, ref_id_to_remove)

        new_groups = [{'id': group['id'], 'role': group.get('role'),
                       'refs': [ref for ref in group['refs'] if ref.get('guid') != ref_id_to_remove]}
                      for group in groups]
        new_root_refs = [{'idRef': group['id']} for group in new_groups if group['id'] != 'root']

        for group in new_groups:
            if group['id'] == 'root':
                group['refs'] = new_root_refs
                break

        updates = {config.LAST_UPDATED: utcnow(), 'groups': new_groups}

        # if takes package then adjust the reference.
        # safe to do this as take can only be in one takes package.
        if package.get(PACKAGE_TYPE) == TAKES_PACKAGE:
            new_sequence = package[SEQUENCE] - 1
            updates[SEQUENCE] = new_sequence
            last_take_group = next(reference for reference in
                                   next(new_group.get('refs') for new_group in new_groups if new_group['id'] == 'main')
                                   if reference.get(SEQUENCE) == new_sequence)

            if last_take_group:
                updates[LAST_TAKE] = last_take_group.get(ITEM_REF)

        resolve_document_version(updates, ARCHIVE, 'PATCH', package)

        get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
        insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
Example #57
0
 def kill_item(self, updates, original):
     """
     Kill the item after applying the template.
     :param dict item: Item
     :param str body_html: body_html of the original item that triggered the kill.
     """
     # apply the kill template
     original_copy = deepcopy(original)
     updates_data = self._apply_kill_template(original_copy)
     updates_data['body_html'] = updates.get('body_html', '')
     # resolve the document version
     resolve_document_version(document=updates_data, resource=ARCHIVE, method='PATCH', latest_doc=original)
     # kill the item
     self.patch(original.get(config.ID_FIELD), updates_data)
     # insert into versions
     insert_into_versions(id_=original[config.ID_FIELD])
Example #58
0
    def create(self, docs):
        service = get_resource_service(SOURCE)
        item_id = request.view_args['item_id']
        item = service.find_one(req=None, _id=item_id)
        doc = docs[0]

        self._valid_broadcast_item(item)

        desk_id = doc.get('desk')
        desk = None

        if desk_id:
            desk = get_resource_service('desks').find_one(req=None, _id=desk_id)

        doc.pop('desk', None)
        doc['task'] = {}
        if desk:
            doc['task']['desk'] = desk.get(config.ID_FIELD)
            doc['task']['stage'] = desk.get('working_stage')

        doc['task']['user'] = get_user().get('_id')
        genre_list = get_resource_service('vocabularies').find_one(req=None, _id='genre') or {}
        broadcast_genre = [{'qcode': genre.get('qcode'), 'name': genre.get('name')}
                           for genre in genre_list.get('items', [])
                           if genre.get('qcode') == BROADCAST_GENRE and genre.get('is_active')]

        if not broadcast_genre:
            raise SuperdeskApiError.badRequestError(message="Cannot find the {} genre.".format(BROADCAST_GENRE))

        doc['broadcast'] = {
            'status': '',
            'master_id': item_id,
            'takes_package_id': self.takesService.get_take_package_id(item),
            'rewrite_id': item.get('rewritten_by')
        }

        doc['genre'] = broadcast_genre
        doc['family_id'] = item.get('family_id')

        for key in FIELDS_TO_COPY:
            doc[key] = item.get(key)

        resolve_document_version(document=doc, resource=SOURCE, method='POST')
        service.post(docs)
        insert_into_versions(id_=doc[config.ID_FIELD])
        build_custom_hateoas(CUSTOM_HATEOAS, doc)
        return [doc[config.ID_FIELD]]