def update(self, id, updates, original):
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_SPIKE, original_state):
            raise InvalidStateTransitionError()

        user = get_user(required=True)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        task = item.get('task', {})

        updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get('desk'), stage_id=task.get('stage'))
        updates[REVERT_STATE] = item.get(ITEM_STATE, None)

        if original.get('rewrite_of'):
            updates['rewrite_of'] = None

        if original.get('rewritten_by'):
            updates['rewritten_by'] = None

        if original.get('broadcast'):
            updates['broadcast'] = None

        if original.get('rewrite_sequence'):
            updates['rewrite_sequence'] = None

        # remove any relation with linked items
        updates[ITEM_EVENT_ID] = generate_guid(type=GUID_TAG)

        # remove lock
        updates.update({
            'lock_user': None,
            'lock_session': None,
        })

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            # remove links from items in the package
            package_service = PackageService()
            items = package_service.get_item_refs(original)
            for item in items:
                package_item = get_resource_service(ARCHIVE).find_one(req=None, _id=item[GUID_FIELD])
                if package_item:
                    linked_in_packages = [linked for linked in package_item.get(LINKED_IN_PACKAGES, [])
                                          if linked.get(PACKAGE) != original.get(config.ID_FIELD)]
                    super().system_update(package_item[config.ID_FIELD],
                                          {LINKED_IN_PACKAGES: linked_in_packages},
                                          package_item)

            # keep the structure of old group in order to be able to unspike the package
            updates[DELETED_GROUPS] = original[GROUPS]
            # and remove all the items from the package
            updates['groups'] = []

        item = self.backend.update(self.datasource, id, updates, original)
        push_notification('item:spike', item=str(id), user=str(user.get(config.ID_FIELD)))

        history_updates = dict(updates)
        if original.get('task'):
            history_updates['task'] = original.get('task')
        app.on_archive_item_updated(history_updates, original, ITEM_SPIKE)
        self._removed_refs_from_package(id)
        return item
예제 #2
0
    def delete(self, lookup):
        target_id = request.view_args['target_id']
        archive_service = get_resource_service(ARCHIVE)
        target = archive_service.find_one(req=None, _id=target_id)
        updates = {}

        if target.get('rewrite_of'):
            # remove the rewrite info
            ArchiveSpikeService().update_rewrite(target)

        if not target.get('rewrite_of'):
            # there is nothing to do
            raise SuperdeskApiError.badRequestError("Only updates can be unlinked!")

        if target.get('rewrite_of'):
            updates['rewrite_of'] = None

        if target.get('anpa_take_key'):
            updates['anpa_take_key'] = None

        if target.get('rewrite_sequence'):
            updates['rewrite_sequence'] = None

        if target.get('sequence'):
            updates['sequence'] = None

        updates['event_id'] = generate_guid(type=GUID_TAG)

        archive_service.system_update(target_id, updates, target)
        user = get_user(required=True)
        push_notification('item:unlink', item=target_id, user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, target, ITEM_UNLINK)
예제 #3
0
    def on_created(self, docs):
        packages = [doc for doc in docs if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE]
        if packages:
            self.packageService.on_created(packages)

        profiles = set()
        for doc in docs:
            subject = get_subject(doc)
            if subject:
                msg = 'added new {{ type }} item about "{{ subject }}"'
            else:
                msg = 'added new {{ type }} item with empty header/title'
            add_activity(ACTIVITY_CREATE, msg,
                         self.datasource, item=doc, type=doc[ITEM_TYPE], subject=subject)

            if doc.get('profile'):
                profiles.add(doc['profile'])

            self.cropService.update_media_references(doc, {})
            if doc[ITEM_OPERATION] == ITEM_FETCH:
                app.on_archive_item_updated({'task': doc.get('task')}, doc, ITEM_FETCH)
            else:
                app.on_archive_item_updated({'task': doc.get('task')}, doc, ITEM_CREATE)

        get_resource_service('content_types').set_used(profiles)
        push_content_notification(docs)
예제 #4
0
    def create(self, docs, **kwargs):
        doc = docs[0] if len(docs) > 0 else {}
        original_id = request.view_args['original_id']
        update_document = doc.get('update')

        archive_service = get_resource_service(ARCHIVE)
        original = archive_service.find_one(req=None, _id=original_id)
        self._validate_rewrite(original, update_document)

        digital = TakesPackageService().get_take_package(original)
        rewrite = self._create_rewrite_article(original, digital,
                                               existing_item=update_document,
                                               desk_id=doc.get('desk_id'))

        if update_document:
            # process the existing story
            archive_service.patch(update_document[config.ID_FIELD], rewrite)
            app.on_archive_item_updated(rewrite, update_document, ITEM_LINK)
            rewrite[config.ID_FIELD] = update_document[config.ID_FIELD]
            ids = [update_document[config.ID_FIELD]]
        else:
            ids = archive_service.post([rewrite])
            build_custom_hateoas(CUSTOM_HATEOAS, rewrite)
            app.on_archive_item_updated({'rewrite_of': rewrite.get('rewrite_of')}, rewrite, ITEM_LINK)

        self._add_rewritten_flag(original, digital, rewrite)
        get_resource_service('archive_broadcast').on_broadcast_master_updated(ITEM_CREATE,
                                                                              item=original,
                                                                              rewrite_id=ids[0])

        doc.clear()
        doc.update(rewrite)
        return ids
예제 #5
0
    def remove_refs_in_package(self, package, ref_id_to_remove, processed_packages=None):
        """Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.

        Before removing checks if the package has been processed. If processed the package is skipped.

        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [ref['guid'] for group in groups
                           for ref in group[REFS] if ref.get('type') == CONTENT_TYPE.COMPOSITE]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package, ref_id_to_remove)

        new_groups = self.remove_group_ref(package, ref_id_to_remove)
        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        resolve_document_version(updates, ARCHIVE, 'PATCH', package)
        get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
        app.on_archive_item_updated(updates, package, ITEM_UNLINK)
        insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
예제 #6
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc,
             desk_id=doc.get('task', {}).get('desk'),
             stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {
             CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
             CONTENT_STATE.KILLED
     }:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc,
                            original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
     app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
예제 #7
0
    def _duplicate_item(self, original_doc, state=None):
        """Duplicates an item.

        Duplicates the 'original_doc' including it's version history. If the article being duplicated is contained
        in a desk then the article state is changed to Submitted.

        :return: guid of the duplicated article
        """

        new_doc = original_doc.copy()
        self._remove_after_copy(new_doc)
        on_duplicate_item(new_doc, original_doc)
        resolve_document_version(new_doc, SOURCE, 'PATCH', new_doc)

        if original_doc.get('task', {}).get('desk') is not None and new_doc.get(ITEM_STATE) != CONTENT_STATE.SUBMITTED:
            new_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED

        if state:
            new_doc[ITEM_STATE] = state

        convert_task_attributes_to_objectId(new_doc)
        get_model(ItemModel).create([new_doc])
        self._duplicate_versions(original_doc['_id'], new_doc)
        self._duplicate_history(original_doc['_id'], new_doc)
        app.on_archive_item_updated({'duplicate_id': new_doc['guid']}, original_doc, ITEM_DUPLICATE)
        app.on_archive_item_updated({'duplicate_id': original_doc['_id']}, new_doc, ITEM_DUPLICATED_FROM)

        return new_doc['guid']
예제 #8
0
    def create(self, docs, **kwargs):
        """Toggle highlight status for given highlight and item."""
        service = get_resource_service('archive')
        publishedService = get_resource_service('published')
        ids = []
        for doc in docs:
            item = service.find_one(req=None, _id=doc['marked_item'])
            if not item:
                ids.append(None)
                continue
            ids.append(item['_id'])
            highlights = item.get('highlights', [])
            if not highlights:
                highlights = []

            if doc['highlights'] not in highlights:
                highlights.append(doc['highlights'])
                highlight_on = True  # highlight toggled on
            else:
                highlights = [h for h in highlights if h != doc['highlights']]
                highlight_on = False  # highlight toggled off

            updates = {
                'highlights': highlights,
                '_updated': item['_updated'],
                '_etag': item['_etag']
            }
            service.update(item['_id'], updates, item)

            publishedItems = publishedService.find({'item_id': item['_id']})
            for publishedItem in publishedItems:
                if publishedItem['_current_version'] == item[
                        '_current_version'] or not highlight_on:
                    updates = {
                        'highlights': highlights,
                        '_updated': publishedItem['_updated'],
                        '_etag': publishedItem['_etag']
                    }
                    publishedService.update(publishedItem['_id'], updates,
                                            publishedItem)

            if highlight_on:
                app.on_archive_item_updated(
                    {
                        'highlight_id': doc['highlights'],
                        'highlight_name': get_highlight_name(doc['highlights'])
                    }, item, ITEM_MARK)
            else:
                app.on_archive_item_updated(
                    {
                        'highlight_id': doc['highlights'],
                        'highlight_name': get_highlight_name(doc['highlights'])
                    }, item, ITEM_UNMARK)

            push_notification('item:highlights',
                              marked=int(highlight_on),
                              item_id=item['_id'],
                              mark_id=str(doc['highlights']))

        return ids
예제 #9
0
    def delete(self, lookup):
        target_id = request.view_args["target_id"]
        archive_service = get_resource_service(ARCHIVE)
        target = archive_service.find_one(req=None, _id=target_id)
        updates = {}

        if target.get("rewrite_of"):
            # remove the rewrite info
            ArchiveSpikeService().update_rewrite(target)

        if not target.get("rewrite_of"):
            # there is nothing to do
            raise SuperdeskApiError.badRequestError(
                _("Only updates can be unlinked!"))

        if target.get("rewrite_of"):
            updates["rewrite_of"] = None

        if target.get("anpa_take_key"):
            updates["anpa_take_key"] = None

        if target.get("rewrite_sequence"):
            updates["rewrite_sequence"] = None

        if target.get("sequence"):
            updates["sequence"] = None

        updates["event_id"] = generate_guid(type=GUID_TAG)

        archive_service.system_update(target_id, updates, target)
        user = get_user(required=True)
        push_notification("item:unlink",
                          item=target_id,
                          user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, target, ITEM_UNLINK)
예제 #10
0
 def on_delete(self, doc):
     service = get_resource_service('archive')
     highlights_id = str(doc['_id'])
     query = {
         'query': {
             'filtered': {
                 'filter': {
                     'term': {
                         'highlights': highlights_id
                     }
                 }
             }
         }
     }
     req = init_parsed_request(query)
     proposedItems = service.get(req=req, lookup=None)
     for item in proposedItems:
         app.on_archive_item_updated(
             {
                 'highlight_id': highlights_id,
                 'highlight_name': get_highlight_name(highlights_id)
             }, item, ITEM_UNMARK)
         highlights = [
             h for h in item.get('highlights') if h != highlights_id
         ]
         service.update(item['_id'], {'highlights': highlights}, item)
예제 #11
0
    def on_created(self, docs):
        packages = [
            doc for doc in docs if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE
        ]
        if packages:
            self.packageService.on_created(packages)

        profiles = set()
        for doc in docs:
            subject = get_subject(doc)
            if subject:
                msg = 'added new {{ type }} item about "{{ subject }}"'
            else:
                msg = 'added new {{ type }} item with empty header/title'
            add_activity(ACTIVITY_CREATE,
                         msg,
                         self.datasource,
                         item=doc,
                         type=doc[ITEM_TYPE],
                         subject=subject)

            if doc.get('profile'):
                profiles.add(doc['profile'])

            self.cropService.update_media_references(doc, {})
            if doc[ITEM_OPERATION] == ITEM_FETCH:
                app.on_archive_item_updated({'task': doc.get('task')}, doc,
                                            ITEM_FETCH)
            else:
                app.on_archive_item_updated({'task': doc.get('task')}, doc,
                                            ITEM_CREATE)

        get_resource_service('content_types').set_used(profiles)
        push_content_notification(docs)
예제 #12
0
    def duplicate_item(self, original_doc, state=None, extra_fields=None, operation=None):
        """Duplicates an item.

        Duplicates the 'original_doc' including it's version history. If the article being duplicated is contained
        in a desk then the article state is changed to Submitted.

        :return: guid of the duplicated article
        """

        new_doc = original_doc.copy()
        self.remove_after_copy(new_doc, extra_fields)
        on_duplicate_item(new_doc, original_doc, operation)
        resolve_document_version(new_doc, SOURCE, 'PATCH', new_doc)

        if original_doc.get('task', {}).get('desk') is not None and new_doc.get(ITEM_STATE) != CONTENT_STATE.SUBMITTED:
            new_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED

        if state:
            new_doc[ITEM_STATE] = state

        convert_task_attributes_to_objectId(new_doc)
        get_model(ItemModel).create([new_doc])
        self._duplicate_versions(original_doc['_id'], new_doc)
        self._duplicate_history(original_doc['_id'], new_doc)
        app.on_archive_item_updated({'duplicate_id': new_doc['guid']}, original_doc, operation or ITEM_DUPLICATE)
        app.on_archive_item_updated({'duplicate_id': original_doc['_id']}, new_doc, operation or ITEM_DUPLICATED_FROM)

        return new_doc['guid']
예제 #13
0
    def remove_refs_in_package(self,
                               package,
                               ref_id_to_remove,
                               processed_packages=None):
        """Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.

        Before removing checks if the package has been processed. If processed the package is skipped.

        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [
            ref['guid'] for group in groups for ref in group[REFS]
            if ref.get('type') == CONTENT_TYPE.COMPOSITE
        ]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package,
                                                   ref_id_to_remove)

        new_groups = self.remove_group_ref(package, ref_id_to_remove)
        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        resolve_document_version(updates, ARCHIVE, 'PATCH', package)
        get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
        app.on_archive_item_updated(updates, package, ITEM_UNLINK)
        insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
예제 #14
0
    def create(self, docs, **kwargs):
        """Generate highlights text item for given package.

        If doc.preview is True it won't save the item, only return.
        """
        service = superdesk.get_resource_service('archive')
        for doc in docs:
            preview = doc.get('preview', False)
            package = service.find_one(req=None, _id=doc['package'])
            if not package:
                superdesk.abort(404)
            export = doc.get('export')
            template = get_template(package.get('highlight'))
            stringTemplate = None
            if template and 'body_html' in template.get('data', {}):
                stringTemplate = template['data']['body_html']

            doc.clear()
            doc[ITEM_TYPE] = CONTENT_TYPE.TEXT
            doc['family_id'] = package.get('guid')
            doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
            doc[config.VERSION] = 1

            for field in package:
                if field not in PACKAGE_FIELDS:
                    doc[field] = package[field]

            items = []
            for group in package.get('groups', []):
                for ref in group.get('refs', []):
                    if 'residRef' in ref:
                        item = service.find_one(req=None, _id=ref.get('residRef'))
                        if item:
                            if not (export or preview) and \
                                    (item.get('lock_session') or item.get('state') != 'published'):
                                message = 'Locked or not published items in highlight list.'
                                raise SuperdeskApiError.forbiddenError(message)

                            items.append(item)
                            if not preview:
                                app.on_archive_item_updated(
                                    {'highlight_id': package.get('highlight'),
                                     'highlight_name': get_highlight_name(package.get('highlight'))}, item,
                                    ITEM_EXPORT_HIGHLIGHT)

            if stringTemplate:
                doc['body_html'] = render_template_string(stringTemplate, package=package, items=items)
            else:
                doc['body_html'] = render_template('default_highlight_template.txt', package=package, items=items)
        if preview:
            return ['' for doc in docs]
        else:
            ids = service.post(docs, **kwargs)
            for id in ids:
                app.on_archive_item_updated(
                    {'highlight_id': package.get('highlight'),
                     'highlight_name': get_highlight_name(package.get('highlight'))}, {'_id': id},
                    ITEM_CREATE_HIGHLIGHT)
            return ids
예제 #15
0
    def update(self, id, updates, original):
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_SPIKE, original_state):
            raise InvalidStateTransitionError()

        user = get_user(required=True)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        task = item.get('task', {})

        updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get('desk'),
                                                 stage_id=task.get('stage'))
        updates[REVERT_STATE] = item.get(ITEM_STATE, None)

        if original.get('rewrite_of'):
            updates['rewrite_of'] = None

        if original.get('rewritten_by'):
            updates['rewritten_by'] = None

        if original.get('broadcast'):
            updates['broadcast'] = None

        if original.get('rewrite_sequence'):
            updates['rewrite_sequence'] = None

        # remove any relation with linked items
        updates[ITEM_EVENT_ID] = generate_guid(type=GUID_TAG)

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            # remove links from items in the package
            package_service = PackageService()
            items = package_service.get_item_refs(original)
            for item in items:
                package_item = get_resource_service(ARCHIVE).find_one(
                    req=None, _id=item[GUID_FIELD])
                if package_item:
                    linked_in_packages = [
                        linked
                        for linked in package_item.get(LINKED_IN_PACKAGES, [])
                        if linked.get(PACKAGE) != original.get(config.ID_FIELD)
                    ]
                    super().system_update(
                        package_item[config.ID_FIELD],
                        {LINKED_IN_PACKAGES: linked_in_packages}, package_item)

            # and remove all the items from the package
            updates['groups'] = []

        item = self.backend.update(self.datasource, id, updates, original)
        push_notification('item:spike',
                          item=str(id),
                          user=str(user.get(config.ID_FIELD)))

        history_updates = dict(updates)
        if original.get('task'):
            history_updates['task'] = original.get('task')
        app.on_archive_item_updated(history_updates, original, ITEM_SPIKE)
        self._removed_refs_from_package(id)
        return item
예제 #16
0
 def create(self, docs, **kwargs):
     doc = docs[0] if len(docs) > 0 else {}
     article_id = request.view_args['original_id']
     article_version = doc.get('version')
     article = self._validate_article(article_id, article_version)
     subscribers = self._validate_subscribers(doc.get('subscribers'), article)
     EnqueueService().resend(article, subscribers)
     app.on_archive_item_updated({'subscribers': doc.get('subscribers')}, article, ITEM_RESEND)
     return [article_id]
예제 #17
0
 def create(self, docs, **kwargs):
     doc = docs[0] if len(docs) > 0 else {}
     article_id = request.view_args["original_id"]
     article_version = doc.get("version")
     article = self._validate_article(article_id, article_version)
     subscribers = self._validate_subscribers(doc.get("subscribers"), article)
     get_enqueue_service(article.get(ITEM_OPERATION)).resend(article, subscribers)
     app.on_archive_item_updated({"subscribers": doc.get("subscribers")}, article, ITEM_RESEND)
     return [article_id]
예제 #18
0
    def create(self, docs, **kwargs):
        """Toggle highlight status for given highlight and item."""
        service = get_resource_service('archive')
        publishedService = get_resource_service('published')
        ids = []
        for doc in docs:
            item = service.find_one(req=None, _id=doc['marked_item'])
            if not item:
                ids.append(None)
                continue
            ids.append(item['_id'])
            highlights = item.get('highlights', [])
            if not highlights:
                highlights = []

            if doc['highlights'] not in highlights:
                highlights.append(doc['highlights'])
                highlight_on = True  # highlight toggled on
            else:
                highlights = [h for h in highlights if h != doc['highlights']]
                highlight_on = False  # highlight toggled off

            updates = {
                'highlights': highlights,
                '_updated': item['_updated'],
                '_etag': item['_etag']
            }
            service.update(item['_id'], updates, item)

            publishedItems = publishedService.find({'item_id': item['_id']})
            for publishedItem in publishedItems:
                if publishedItem['_current_version'] == item['_current_version'] or not highlight_on:
                    updates = {
                        'highlights': highlights,
                        '_updated': publishedItem['_updated'],
                        '_etag': publishedItem['_etag']
                    }
                    publishedService.update(publishedItem['_id'], updates, publishedItem)

            if highlight_on:
                app.on_archive_item_updated(
                    {'highlight_id': doc['highlights'], 'highlight_name': get_highlight_name(doc['highlights'])}, item,
                    ITEM_MARK)
            else:
                app.on_archive_item_updated(
                    {'highlight_id': doc['highlights'], 'highlight_name': get_highlight_name(doc['highlights'])}, item,
                    ITEM_UNMARK)

            push_notification(
                'item:highlights',
                marked=int(highlight_on),
                item_id=item['_id'],
                mark_id=str(doc['highlights']))

        return ids
예제 #19
0
 def on_delete(self, doc):
     service = get_resource_service('archive')
     highlights_id = str(doc['_id'])
     query = {'query': {'filtered': {'filter': {'term': {'highlights': highlights_id}}}}}
     req = init_parsed_request(query)
     proposedItems = service.get(req=req, lookup=None)
     for item in proposedItems:
         app.on_archive_item_updated(
             {'highlight_id': highlights_id, 'highlight_name': get_highlight_name(highlights_id)}, item, ITEM_UNMARK)
         highlights = [h for h in item.get('highlights') if h != highlights_id]
         service.update(item['_id'], {'highlights': highlights}, item)
예제 #20
0
    def remove_refs_in_package(self, package, ref_id_to_remove, processed_packages=None):
        """Removes residRef referenced by ref_id_to_remove from the package associations and returns the package id.

        Before removing checks if the package has been processed. If processed the package is skipped.
        In case of takes package, sequence is decremented and last_take field is updated.
        If sequence is zero then the takes package is deleted.

        :return: package[config.ID_FIELD]
        """
        groups = package[GROUPS]

        if processed_packages is None:
            processed_packages = []

        sub_package_ids = [ref['guid'] for group in groups
                           for ref in group[REFS] if ref.get('type') == CONTENT_TYPE.COMPOSITE]
        for sub_package_id in sub_package_ids:
            if sub_package_id not in processed_packages:
                sub_package = self.find_one(req=None, _id=sub_package_id)
                return self.remove_refs_in_package(sub_package, ref_id_to_remove)

        new_groups = self.remove_group_ref(package, ref_id_to_remove)

        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}

        # if takes package then adjust the reference.
        # safe to do this as take can only be in one takes package.
        delete_package = False
        if package.get(PACKAGE_TYPE) == TAKES_PACKAGE:
            new_sequence = package[SEQUENCE] - 1
            if new_sequence == 0:
                # remove the takes package.
                get_resource_service(ARCHIVE).delete_action({config.ID_FIELD: package[config.ID_FIELD]})
                delete_package = True
            else:
                updates[SEQUENCE] = new_sequence
                last_take_group = next(reference for reference in
                                       next(new_group.get(REFS) for new_group in new_groups if
                                            new_group[GROUP_ID] == MAIN_GROUP)
                                       if reference.get(SEQUENCE) == new_sequence)

                if last_take_group:
                    updates[LAST_TAKE] = last_take_group.get(RESIDREF)
                    last_take_item = get_resource_service(ARCHIVE).find_one(req=None, _id=updates[LAST_TAKE])
                    app.on_archive_item_updated({}, last_take_item, ITEM_UNLINK)

        if not delete_package:
            resolve_document_version(updates, ARCHIVE, 'PATCH', package)
            get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
            app.on_archive_item_updated(updates, package, ITEM_UNLINK)
            insert_into_versions(id_=package[config.ID_FIELD])

        sub_package_ids.append(package[config.ID_FIELD])
        return sub_package_ids
예제 #21
0
 def create(self, docs, **kwargs):
     doc = docs[0] if len(docs) > 0 else {}
     article_id = request.view_args['original_id']
     article_version = doc.get('version')
     article = self._validate_article(article_id, article_version)
     subscribers = self._validate_subscribers(doc.get('subscribers'),
                                              article)
     EnqueueService().resend(article, subscribers)
     app.on_archive_item_updated({'subscribers': doc.get('subscribers')},
                                 article, ITEM_RESEND)
     return [article_id]
예제 #22
0
    def create(self, docs, **kwargs):
        """Toggle marked desk status for given desk and item."""

        service = get_resource_service('archive')
        published_service = get_resource_service('published')
        ids = []
        for doc in docs:
            item = service.find_one(req=None, guid=doc['marked_item'])
            if not item:
                ids.append(None)
                continue
            ids.append(item['_id'])
            marked_desks = item.get('marked_desks', [])
            if not marked_desks:
                marked_desks = []

            existing_mark = next((m for m in marked_desks if m['desk_id'] == doc['marked_desk']), None)

            if existing_mark:
                # there is an existing mark so this is un-mark action
                marked_desks = [m for m in marked_desks if m['desk_id'] != doc['marked_desk']]
                marked_desks_on = False  # highlight toggled off
            else:
                # there is no existing mark so this is mark action
                user = get_user() or {}
                new_mark = {}
                new_mark['desk_id'] = doc['marked_desk']
                new_mark['user_marked'] = str(user.get(config.ID_FIELD, ''))
                new_mark['date_marked'] = utcnow()
                marked_desks.append(new_mark)
                marked_desks_on = True

            updates = {'marked_desks': marked_desks}
            service.system_update(item['_id'], updates, item)

            publishedItems = published_service.find({'item_id': item['_id']})
            for publishedItem in publishedItems:
                if publishedItem['_current_version'] == item['_current_version'] or not marked_desks_on:
                    updates = {'marked_desks': marked_desks}
                    published_service.system_update(publishedItem['_id'], updates, publishedItem)

            push_notification(
                'item:marked_desks',
                marked=int(marked_desks_on),
                item_id=item['_id'],
                mark_id=str(doc['marked_desk']))

            if marked_desks_on:
                app.on_archive_item_updated({'desk_id': doc['marked_desk']}, item, ITEM_MARK)
            else:
                app.on_archive_item_updated({'desk_id': doc['marked_desk']}, item, ITEM_UNMARK)

        return ids
예제 #23
0
    def _add_rewritten_flag(self, original, rewrite):
        """Adds rewritten_by field to the existing published items.

        :param dict original: item on which rewrite is triggered
        :param dict rewrite: rewritten document
        """
        get_resource_service('published').update_published_items(original[config.ID_FIELD],
                                                                 'rewritten_by', rewrite[config.ID_FIELD])

        # modify the original item as well.
        get_resource_service(ARCHIVE).system_update(original[config.ID_FIELD],
                                                    {'rewritten_by': rewrite[config.ID_FIELD]}, original)
        app.on_archive_item_updated({'rewritten_by': rewrite[config.ID_FIELD]}, original, ITEM_REWRITE)
    def _add_rewritten_flag(self, original, rewrite):
        """Adds rewritten_by field to the existing published items.

        :param dict original: item on which rewrite is triggered
        :param dict rewrite: rewritten document
        """
        get_resource_service('published').update_published_items(original[config.ID_FIELD],
                                                                 'rewritten_by', rewrite[config.ID_FIELD])

        # modify the original item as well.
        get_resource_service(ARCHIVE).system_update(original[config.ID_FIELD],
                                                    {'rewritten_by': rewrite[config.ID_FIELD]}, original)
        app.on_archive_item_updated({'rewritten_by': rewrite[config.ID_FIELD]}, original, ITEM_REWRITE)
예제 #25
0
    def create(self, docs, **kwargs):
        doc = docs[0] if len(docs) > 0 else {}
        original_id = request.view_args["original_id"]
        update_document = doc.get("update")

        archive_service = get_resource_service(ARCHIVE)
        original = archive_service.find_one(req=None, _id=original_id)
        self._validate_rewrite(original, update_document)

        rewrite = self._create_rewrite_article(original,
                                               existing_item=update_document,
                                               desk_id=doc.get("desk_id"))

        # sync editor state
        copy_fields(original, rewrite, ignore_empty=True)

        if update_document:
            # copy editor state from existing item to preserve those
            copy_fields(update_document, rewrite, ignore_empty=True)

        if rewrite.get("fields_meta"):
            generate_fields(rewrite, force=True)

        update_associations(rewrite)

        # signal
        item_rewrite.send(self, item=rewrite, original=original)

        if update_document:
            # process the existing story
            archive_service.patch(update_document[config.ID_FIELD], rewrite)
            app.on_archive_item_updated(rewrite, update_document, ITEM_LINK)
            rewrite[config.ID_FIELD] = update_document[config.ID_FIELD]
            ids = [update_document[config.ID_FIELD]]
        else:
            # Set the version.
            resolve_document_version(rewrite, ARCHIVE, "POST")
            ids = archive_service.post([rewrite])
            insert_into_versions(doc=rewrite)
            build_custom_hateoas(CUSTOM_HATEOAS, rewrite)

            app.on_archive_item_updated(
                {"rewrite_of": rewrite.get("rewrite_of")}, rewrite, ITEM_LINK)

        self._add_rewritten_flag(original, rewrite)
        get_resource_service("archive_broadcast").on_broadcast_master_updated(
            ITEM_CREATE, item=original, rewrite_id=ids[0])

        doc.clear()
        doc.update(rewrite)
        return ids
예제 #26
0
    def update_rewrite(self, original):
        """Removes the reference from the rewritten story in published collection."""
        if original.get('rewrite_of') and original.get(ITEM_EVENT_ID):
            clear_rewritten_flag(original.get(ITEM_EVENT_ID), original[config.ID_FIELD], 'rewritten_by')

        # write the rewritten_by to the story before spiked
        archive_service = get_resource_service(ARCHIVE)

        if original.get('rewritten_by'):
            # you are spike the story from which the rewrite was triggered.
            # in this case both rewrite_of and rewritten_by are published.
            rewrite_id = original.get('rewritten_by')
            rewritten_by = archive_service.find_one(req=None, _id=rewrite_id)
            archive_service.system_update(rewrite_id, {'rewrite_of': None, 'rewrite_sequence': 0}, rewritten_by)
            app.on_archive_item_updated({'rewrite_of': None, 'rewrite_sequence': 0}, original, ITEM_UNLINK)
예제 #27
0
    def update_rewrite(self, original):
        """Removes the reference from the rewritten story in published collection."""
        if original.get("rewrite_of") and original.get(ITEM_EVENT_ID):
            clear_rewritten_flag(original.get(ITEM_EVENT_ID), original[config.ID_FIELD], "rewritten_by")

        # write the rewritten_by to the story before spiked
        archive_service = get_resource_service(ARCHIVE)

        if original.get("rewritten_by"):
            # you are spike the story from which the rewrite was triggered.
            # in this case both rewrite_of and rewritten_by are published.
            rewrite_id = original.get("rewritten_by")
            rewritten_by = archive_service.find_one(req=None, _id=rewrite_id)
            archive_service.system_update(rewrite_id, {"rewrite_of": None, "rewrite_sequence": 0}, rewritten_by)
            app.on_archive_item_updated({"rewrite_of": None, "rewrite_sequence": 0}, original, ITEM_UNLINK)
예제 #28
0
    def update(self, id, updates, original):
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_UNSPIKE, original_state):
            raise InvalidStateTransitionError()

        user = get_user(required=True)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)

        self.set_unspike_updates(item, updates)
        self.backend.update(self.datasource, id, updates, original)

        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        push_notification('item:unspike', item=str(id), user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, original, ITEM_UNSPIKE)

        return item
예제 #29
0
    def update(self, id, updates, original):
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_UNSPIKE, original_state):
            raise InvalidStateTransitionError()

        user = get_user(required=True)
        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)

        self.set_unspike_updates(item, updates)
        self.backend.update(self.datasource, id, updates, original)

        item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
        push_notification("item:unspike", item=str(id), user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, original, ITEM_UNSPIKE)

        return item
예제 #30
0
    def _update_archive(self, original, updates, versioned_doc=None, should_insert_into_versions=True):
        """Updates the articles into archive collection and inserts the latest into archive_versions.

        Also clears autosaved versions if any.

        :param: versioned_doc: doc which can be inserted into archive_versions
        :param: should_insert_into_versions if True inserts the latest document into versions collection
        """
        self.backend.update(self.datasource, original[config.ID_FIELD], updates, original)
        app.on_archive_item_updated(updates, original, updates[ITEM_OPERATION])

        if should_insert_into_versions:
            if versioned_doc is None:
                insert_into_versions(id_=original[config.ID_FIELD])
            else:
                insert_into_versions(doc=versioned_doc)

        get_component(ItemAutosave).clear(original[config.ID_FIELD])
예제 #31
0
    def create(self, docs, **kwargs):
        doc = docs[0] if len(docs) > 0 else {}
        original_id = request.view_args['original_id']
        update_document = doc.get('update')

        archive_service = get_resource_service(ARCHIVE)
        original = archive_service.find_one(req=None, _id=original_id)
        self._validate_rewrite(original, update_document)

        rewrite = self._create_rewrite_article(original,
                                               existing_item=update_document,
                                               desk_id=doc.get('desk_id'))

        if 'body_html' in rewrite:
            if 'fields_meta' in original:
                rewrite['fields_meta'] = original['fields_meta']
            update_associations(rewrite)

        # signal
        item_rewrite.send(self, item=rewrite, original=original)

        if update_document:
            # process the existing story
            archive_service.patch(update_document[config.ID_FIELD], rewrite)
            app.on_archive_item_updated(rewrite, update_document, ITEM_LINK)
            rewrite[config.ID_FIELD] = update_document[config.ID_FIELD]
            ids = [update_document[config.ID_FIELD]]
        else:
            # Set the version.
            resolve_document_version(rewrite, ARCHIVE, "POST")
            ids = archive_service.post([rewrite])
            insert_into_versions(doc=rewrite)
            build_custom_hateoas(CUSTOM_HATEOAS, rewrite)

            app.on_archive_item_updated(
                {'rewrite_of': rewrite.get('rewrite_of')}, rewrite, ITEM_LINK)

        self._add_rewritten_flag(original, rewrite)
        get_resource_service('archive_broadcast').on_broadcast_master_updated(
            ITEM_CREATE, item=original, rewrite_id=ids[0])

        doc.clear()
        doc.update(rewrite)
        return ids
예제 #32
0
    def _update_archive(self, original, updates, versioned_doc=None, should_insert_into_versions=True):
        """Updates the articles into archive collection and inserts the latest into archive_versions.

        Also clears autosaved versions if any.

        :param: versioned_doc: doc which can be inserted into archive_versions
        :param: should_insert_into_versions if True inserts the latest document into versions collection
        """

        self.backend.update(self.datasource, original[config.ID_FIELD], updates, original)
        app.on_archive_item_updated(updates, original, updates[ITEM_OPERATION])

        if should_insert_into_versions:
            if versioned_doc is None:
                insert_into_versions(id_=original[config.ID_FIELD])
            else:
                insert_into_versions(doc=versioned_doc)

        get_component(ItemAutosave).clear(original[config.ID_FIELD])
예제 #33
0
    def _move(self, archived_doc, doc):
        archive_service = get_resource_service(ARCHIVE)
        original = deepcopy(archived_doc)
        user = get_user()
        send_to(
            doc=archived_doc,
            desk_id=doc.get("task", {}).get("desk"),
            stage_id=doc.get("task", {}).get("stage"),
            user_id=user.get(config.ID_FIELD),
        )
        if archived_doc[ITEM_STATE] not in ({
                CONTENT_STATE.PUBLISHED,
                CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED,
                CONTENT_STATE.RECALLED,
                CONTENT_STATE.CORRECTION,
        }):
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE
        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, "PATCH", original)

        del archived_doc[config.ID_FIELD]
        del archived_doc[config.ETAG]  # force etag update
        archived_doc["versioncreated"] = utcnow()

        signals.item_move.send(self, item=archived_doc, original=original)
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])
        push_item_move_notification(original, archived_doc)
        app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)

        # make sure `item._id` is there in signal
        moved_item = archived_doc.copy()
        moved_item[config.ID_FIELD] = original[config.ID_FIELD]
        signals.item_moved.send(self, item=moved_item, original=original)
예제 #34
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc, original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
     app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
    def create(self, docs, **kwargs):
        doc = docs[0] if len(docs) > 0 else {}
        original_id = request.view_args['original_id']
        update_document = doc.get('update')

        archive_service = get_resource_service(ARCHIVE)
        original = archive_service.find_one(req=None, _id=original_id)
        self._validate_rewrite(original, update_document)

        rewrite = self._create_rewrite_article(original,
                                               existing_item=update_document,
                                               desk_id=doc.get('desk_id'))

        if 'body_html' in rewrite:
            if 'fields_meta' in original:
                rewrite['fields_meta'] = original['fields_meta']
            update_associations(rewrite)

        if update_document:
            # process the existing story
            archive_service.patch(update_document[config.ID_FIELD], rewrite)
            app.on_archive_item_updated(rewrite, update_document, ITEM_LINK)
            rewrite[config.ID_FIELD] = update_document[config.ID_FIELD]
            ids = [update_document[config.ID_FIELD]]
        else:
            # Set the version.
            resolve_document_version(rewrite, ARCHIVE, "POST")
            ids = archive_service.post([rewrite])
            insert_into_versions(doc=rewrite)
            build_custom_hateoas(CUSTOM_HATEOAS, rewrite)

            app.on_archive_item_updated({'rewrite_of': rewrite.get('rewrite_of')}, rewrite, ITEM_LINK)

        self._add_rewritten_flag(original, rewrite)
        get_resource_service('archive_broadcast').on_broadcast_master_updated(ITEM_CREATE,
                                                                              item=original,
                                                                              rewrite_id=ids[0])

        doc.clear()
        doc.update(rewrite)
        return ids
예제 #36
0
    def delete(self, lookup):
        target_id = request.view_args['target_id']
        archive_service = get_resource_service(ARCHIVE)
        target = archive_service.find_one(req=None, _id=target_id)
        self._validate_unlink(target)
        updates = {}

        takes_package = TakesPackageService().get_take_package(target)

        if takes_package and TakesPackageService().is_last_takes_package_item(target):
            # remove the take link
            PackageService().remove_refs_in_package(takes_package, target_id)

        if target.get('rewrite_of'):
            # remove the rewrite info
            ArchiveSpikeService().update_rewrite(target)

        if not takes_package and not target.get('rewrite_of'):
            # there is nothing to do
            raise SuperdeskApiError.badRequestError("Only takes and updates can be unlinked!")

        if target.get('rewrite_of'):
            updates['rewrite_of'] = None

        if target.get('anpa_take_key'):
            updates['anpa_take_key'] = None

        if target.get('rewrite_sequence'):
            updates['rewrite_sequence'] = None

        if target.get('sequence'):
            updates['sequence'] = None

        updates['event_id'] = generate_guid(type=GUID_TAG)

        archive_service.system_update(target_id, updates, target)
        user = get_user(required=True)
        push_notification('item:unlink', item=target_id, user=str(user.get(config.ID_FIELD)))
        app.on_archive_item_updated(updates, target, ITEM_UNLINK)
예제 #37
0
def clear_rewritten_flag(event_id, rewrite_id, rewrite_field):
    """Clears rewritten_by or rewrite_of field from the existing published and archive items.

    :param str event_id: event id of the document
    :param str rewrite_id: rewrite id of the document
    :param str rewrite_field: field name 'rewrite_of' or 'rewritten_by'
    """
    publish_service = get_resource_service('published')
    archive_service = get_resource_service(ARCHIVE)

    published_rewritten_stories = publish_service.get_rewritten_items_by_event_story(event_id,
                                                                                     rewrite_id,
                                                                                     rewrite_field)
    processed_items = set()
    for doc in published_rewritten_stories:
        doc_id = doc.get(config.ID_FIELD)
        publish_service.update_published_items(doc_id, rewrite_field, None)
        if doc_id not in processed_items:
            # clear the flag from the archive as well.
            archive_item = archive_service.find_one(req=None, _id=doc_id)
            archive_service.system_update(doc_id, {rewrite_field: None}, archive_item)
            processed_items.add(doc_id)
            app.on_archive_item_updated({rewrite_field: None}, archive_item, ITEM_UNLINK)
예제 #38
0
def clear_rewritten_flag(event_id, rewrite_id, rewrite_field):
    """Clears rewritten_by or rewrite_of field from the existing published and archive items.

    :param str event_id: event id of the document
    :param str rewrite_id: rewrite id of the document
    :param str rewrite_field: field name 'rewrite_of' or 'rewritten_by'
    """
    publish_service = get_resource_service('published')
    archive_service = get_resource_service(ARCHIVE)

    published_rewritten_stories = publish_service.get_rewritten_items_by_event_story(event_id,
                                                                                     rewrite_id,
                                                                                     rewrite_field)
    processed_items = set()
    for doc in published_rewritten_stories:
        doc_id = doc.get(config.ID_FIELD)
        publish_service.update_published_items(doc_id, rewrite_field, None)
        if doc_id not in processed_items:
            # clear the flag from the archive as well.
            archive_item = archive_service.find_one(req=None, _id=doc_id)
            archive_service.system_update(doc_id, {rewrite_field: None}, archive_item)
            processed_items.add(doc_id)
            app.on_archive_item_updated({rewrite_field: None}, archive_item, ITEM_UNLINK)
예제 #39
0
    def enqueue_item(self, published_item):
        """
        Creates the corresponding entries in the publish queue for the given item
        """
        published_item_id = ObjectId(published_item[config.ID_FIELD])
        published_service = get_resource_service(PUBLISHED)
        archive_service = get_resource_service(ARCHIVE)
        published_update = {
            QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS,
            'last_queue_event': utcnow()
        }
        try:
            logger.info('Queueing item with id: {} and item_id: {}'.format(
                published_item_id, published_item['item_id']))

            published_item = published_service.find_one(req=None,
                                                        _id=published_item_id)
            if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
                logger.info(
                    'Queue State is not pending for published item {}. It is in {}'
                    .format(published_item_id,
                            published_item.get(QUEUE_STATE)))
                return

            if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
                # if scheduled then change the state to published
                # change the `version` and `versioncreated` for the item
                # in archive collection and published collection.
                versioncreated = utcnow()
                item_updates = {
                    'versioncreated': versioncreated,
                    ITEM_STATE: CONTENT_STATE.PUBLISHED
                }
                resolve_document_version(document=item_updates,
                                         resource=ARCHIVE,
                                         method='PATCH',
                                         latest_doc={
                                             config.VERSION:
                                             published_item[config.VERSION]
                                         })

                # update the archive collection
                archive_item = archive_service.find_one(
                    req=None, _id=published_item['item_id'])
                archive_service.system_update(published_item['item_id'],
                                              item_updates, archive_item)
                # insert into version.
                insert_into_versions(published_item['item_id'], doc=None)
                # update archive history
                app.on_archive_item_updated(item_updates, archive_item,
                                            ITEM_PUBLISH)
                # import to legal archive
                import_into_legal_archive.apply_async(
                    countdown=3, kwargs={'item_id': published_item['item_id']})
                logger.info(
                    'Modified the version of scheduled item: {}'.format(
                        published_item_id))

                logger.info('Publishing scheduled item_id: {}'.format(
                    published_item_id))
                # update the published collection
                published_update.update(item_updates)
                published_item.update({
                    'versioncreated':
                    versioncreated,
                    ITEM_STATE:
                    CONTENT_STATE.PUBLISHED,
                    config.VERSION:
                    item_updates[config.VERSION]
                })
                # send a notification to the clients
                push_content_notification([{
                    '_id':
                    str(published_item['item_id']),
                    'task':
                    published_item.get('task', None)
                }])
                #  apply internal destinations
                signals.item_published.send(self,
                                            item=archive_service.find_one(
                                                req=None,
                                                _id=published_item['item_id']))

            published_service.patch(published_item_id, published_update)
            # queue the item for publishing
            try:
                queued = get_enqueue_service(
                    published_item[ITEM_OPERATION]).enqueue_item(
                        published_item, None)
            except KeyError as key_error:
                error_updates = {
                    QUEUE_STATE: PUBLISH_STATE.ERROR,
                    ERROR_MESSAGE: str(key_error)
                }
                published_service.patch(published_item_id, error_updates)
                logger.exception('No enqueue service found for operation %s',
                                 published_item[ITEM_OPERATION])
                raise

            # if the item is queued in the publish_queue then the state is "queued"
            # else the queue state is "queued_not_transmitted"
            queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
            published_service.patch(published_item_id,
                                    {QUEUE_STATE: queue_state})
            logger.info('Queued item with id: {} and item_id: {}'.format(
                published_item_id, published_item['item_id']))
        except ConnectionTimeout as error:  # recoverable, set state to pending and retry next time
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.PENDING,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
        except SoftTimeLimitExceeded as error:
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.PENDING,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
        except Exception as error:
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.ERROR,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
    def link_as_next_take(self, target, link):
        """Makes next take to target from given link.

        Check if target has an associated takes package. If not, create it and add target as a take.
        Check if the target is the last take, if not, resolve the last take. Copy metadata from the target and add it
        as the next take and return the update link item

        :return: the updated link item
        """

        takes_package_id = self.get_take_package_id(target)
        archive_service = get_resource_service(ARCHIVE)
        takes_package = archive_service.find_one(
            req=None, _id=takes_package_id) if takes_package_id else {}

        if not takes_package:
            # setting the sequence to 1 for target.
            updates = {SEQUENCE: 1}
            if target[ITEM_STATE] in [
                    CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED,
                    CONTENT_STATE.SCHEDULED, CONTENT_STATE.INGESTED
            ]:
                raise SuperdeskApiError.forbiddenError(
                    "Item isn't in a valid state for creating takes.")
            else:
                archive_service.system_update(target.get(config.ID_FIELD),
                                              updates, target)

        link_updates = {}

        if not link.get(config.ID_FIELD):
            # A new story to be linked
            self.__copy_metadata__(target, link, takes_package, set_state=True)
            link[ITEM_OPERATION] = ITEM_CREATE
            archive_service.post([link])
        else:
            self.__copy_metadata__(target,
                                   link_updates,
                                   takes_package,
                                   set_state=False)

        link.update(link_updates)

        if not takes_package_id:
            takes_package_id = self.package_story_as_a_take(
                target, takes_package, link)
        else:
            original_takes_package = deepcopy(takes_package)
            self.__link_items__(takes_package, target, link)
            del takes_package[config.ID_FIELD]
            takes_package.pop('unique_id', None)
            takes_package.pop('unique_name', None)
            takes_package.pop(PUBLISH_SCHEDULE, None)
            takes_package.pop(SCHEDULE_SETTINGS, None)

            resolve_document_version(takes_package, ARCHIVE, 'PATCH',
                                     takes_package)
            archive_service.patch(takes_package_id, takes_package)
            app.on_archive_item_updated(link_updates, original_takes_package,
                                        ITEM_LINK)
            get_resource_service(
                'archive_broadcast').on_broadcast_master_updated(
                    ITEM_CREATE, target, takes_package_id=takes_package_id)

        if link.get(SEQUENCE):
            link_updates.update({SEQUENCE: link[SEQUENCE]})
            archive_service.system_update(link[config.ID_FIELD], link_updates,
                                          link)
            app.on_archive_item_updated({'linked_to': target[config.ID_FIELD]},
                                        link, ITEM_LINK)

        insert_into_versions(id_=takes_package_id)

        if RE_OPENS.lower() in link.get('anpa_take_key', '').lower():
            app.on_archive_item_updated({'new_take_id': link[config.ID_FIELD]},
                                        target, ITEM_REOPEN)
        else:
            app.on_archive_item_updated({'new_take_id': link[config.ID_FIELD]},
                                        target, ITEM_TAKE)

        return link
예제 #41
0
    def update(self, id, updates, original):
        original_state = original[ITEM_STATE]
        if not is_workflow_state_transition_valid(ITEM_SPIKE, original_state):
            raise InvalidStateTransitionError()

        archive_service = get_resource_service(ARCHIVE)
        published_service = get_resource_service("published")

        user = get_user(required=True)
        item = archive_service.find_one(req=None, _id=id)
        task = item.get("task", {})

        updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get("desk"),
                                                 stage_id=task.get("stage"))
        updates[REVERT_STATE] = item.get(ITEM_STATE, None)

        if original.get("rewrite_of"):
            updates["rewrite_of"] = None

        if original.get("rewritten_by"):
            updates["rewritten_by"] = None

        if original.get("broadcast"):
            updates["broadcast"] = None

        if original.get("rewrite_sequence"):
            updates["rewrite_sequence"] = None

        if original.get("marked_for_user"):
            # remove marked_for_user on spike and keep it as previous_marked_user for history
            updates["previous_marked_user"] = original["marked_for_user"]
            updates["marked_for_user"] = None
            updates["marked_for_sign_off"] = None

        if original.get("translation_id") and original.get("translated_from"):
            # remove translations info from the translated item on spike
            updates["translated_from"] = None
            updates["translation_id"] = None

            id_to_remove = original.get(config.ID_FIELD)

            # Remove the translated item from the list of translations in the original item
            # where orignal item can be in archive or in both archive and published resource as well
            translated_from = archive_service.find_one(
                req=None, _id=original.get("translated_from"))
            translated_from_id = translated_from.get(config.ID_FIELD)
            self._remove_translations(archive_service, translated_from,
                                      id_to_remove)

            if translated_from.get("state") in PUBLISH_STATES:
                published_items = list(
                    published_service.get_from_mongo(
                        req=None, lookup={"item_id": translated_from_id}))

                if published_items:
                    for item in published_items:
                        self._remove_translations(published_service, item,
                                                  id_to_remove)

        # remove any relation with linked items
        updates[ITEM_EVENT_ID] = generate_guid(type=GUID_TAG)

        # remove lock
        updates.update({
            "lock_user": None,
            "lock_session": None,
        })

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            # remove links from items in the package
            package_service = PackageService()
            items = package_service.get_item_refs(original)
            for item in items:
                package_item = archive_service.find_one(req=None,
                                                        _id=item[GUID_FIELD])
                if package_item:
                    linked_in_packages = [
                        linked
                        for linked in package_item.get(LINKED_IN_PACKAGES, [])
                        if linked.get(PACKAGE) != original.get(config.ID_FIELD)
                    ]
                    super().system_update(
                        package_item[config.ID_FIELD],
                        {LINKED_IN_PACKAGES: linked_in_packages}, package_item)

            # keep the structure of old group in order to be able to unspike the package
            updates[DELETED_GROUPS] = original[GROUPS]
            # and remove all the items from the package
            updates["groups"] = []

        item = self.backend.update(self.datasource, id, updates, original)
        push_notification("item:spike",
                          item=str(id),
                          user=str(user.get(config.ID_FIELD)))

        history_updates = dict(updates)
        if original.get("task"):
            history_updates["task"] = original.get("task")
        app.on_archive_item_updated(history_updates, original, ITEM_SPIKE)
        self._removed_refs_from_package(id)
        return item
예제 #42
0
    def create(self, docs, **kwargs):
        """Toggle marked desk status for given desk and item."""

        service = get_resource_service("archive")
        published_service = get_resource_service("published")
        ids = []
        for doc in docs:
            item = service.find_one(req=None, guid=doc["marked_item"])
            if not item:
                ids.append(None)
                continue
            ids.append(item["_id"])
            marked_desks = item.get("marked_desks", [])
            if not marked_desks:
                marked_desks = []

            existing_mark = next(
                (m
                 for m in marked_desks if m["desk_id"] == doc["marked_desk"]),
                None)

            if existing_mark:
                # there is an existing mark so this is un-mark action
                marked_desks = [
                    m for m in marked_desks
                    if m["desk_id"] != doc["marked_desk"]
                ]
                marked_desks_on = False  # highlight toggled off
            else:
                # there is no existing mark so this is mark action
                user = get_user() or {}
                new_mark = {}
                new_mark["desk_id"] = doc["marked_desk"]
                new_mark["user_marked"] = str(user.get(config.ID_FIELD, ""))
                new_mark["date_marked"] = utcnow()
                marked_desks.append(new_mark)
                marked_desks_on = True

            updates = {"marked_desks": marked_desks}
            service.system_update(item["_id"], updates, item)

            publishedItems = published_service.find({"item_id": item["_id"]})
            for publishedItem in publishedItems:
                if publishedItem["_current_version"] == item[
                        "_current_version"] or not marked_desks_on:
                    updates = {"marked_desks": marked_desks}
                    published_service.system_update(publishedItem["_id"],
                                                    updates, publishedItem)

            push_notification("item:marked_desks",
                              marked=int(marked_desks_on),
                              item_id=item["_id"],
                              mark_id=str(doc["marked_desk"]))

            if marked_desks_on:
                app.on_archive_item_updated({"desk_id": doc["marked_desk"]},
                                            item, ITEM_MARK)
            else:
                app.on_archive_item_updated({"desk_id": doc["marked_desk"]},
                                            item, ITEM_UNMARK)

        return ids
예제 #43
0
    def create(self, docs, **kwargs):
        """Generate highlights text item for given package.

        If doc.preview is True it won't save the item, only return.
        """
        service = superdesk.get_resource_service('archive')
        for doc in docs:
            preview = doc.get('preview', False)
            package = service.find_one(req=None, _id=doc['package'])
            if not package:
                superdesk.abort(404)
            export = doc.get('export')
            template = get_template(package.get('highlight'))
            stringTemplate = None
            if template and template.get('data') and template['data'].get(
                    'body_html'):
                stringTemplate = template['data']['body_html']

            doc.clear()
            doc[ITEM_TYPE] = CONTENT_TYPE.TEXT
            doc['family_id'] = package.get('guid')
            doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
            doc[config.VERSION] = 1

            for field in package:
                if field not in PACKAGE_FIELDS:
                    doc[field] = package[field]

            items = []
            for group in package.get('groups', []):
                for ref in group.get('refs', []):
                    if 'residRef' in ref:
                        item = service.find_one(req=None,
                                                _id=ref.get('residRef'))
                        if item:
                            if not (export or preview) and \
                                    (item.get('lock_session') or item.get('state') != 'published'):
                                message = 'Locked or not published items in highlight list.'
                                raise SuperdeskApiError.forbiddenError(message)

                            items.append(item)
                            if not preview:
                                app.on_archive_item_updated(
                                    {
                                        'highlight_id':
                                        package.get('highlight'),
                                        'highlight_name':
                                        get_highlight_name(
                                            package.get('highlight'))
                                    }, item, ITEM_EXPORT_HIGHLIGHT)

            if stringTemplate:
                doc['body_html'] = render_template_string(stringTemplate,
                                                          package=package,
                                                          items=items)
            else:
                doc['body_html'] = render_template(
                    'default_highlight_template.txt',
                    package=package,
                    items=items)
        if preview:
            return ['' for doc in docs]
        else:
            ids = service.post(docs, **kwargs)
            for id in ids:
                app.on_archive_item_updated(
                    {
                        'highlight_id':
                        package.get('highlight'),
                        'highlight_name':
                        get_highlight_name(package.get('highlight'))
                    }, {'_id': id}, ITEM_CREATE_HIGHLIGHT)
            return ids
예제 #44
0
def enqueue_item(published_item):
    """
    Creates the corresponding entries in the publish queue for the given item
    """
    published_item_id = ObjectId(published_item[config.ID_FIELD])
    published_service = get_resource_service(PUBLISHED)
    archive_service = get_resource_service(ARCHIVE)
    published_update = {QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS, 'last_queue_event': utcnow()}
    try:
        logger.info('Queueing item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))

        published_item = published_service.find_one(req=None, _id=published_item_id)
        if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
            logger.info('Queue State is not pending for published item {}. It is in {}'.
                        format(published_item_id, published_item.get(QUEUE_STATE)))
            return

        if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            # if scheduled then change the state to published
            # change the `version` and `versioncreated` for the item
            # in archive collection and published collection.
            versioncreated = utcnow()
            item_updates = {'versioncreated': versioncreated, ITEM_STATE: CONTENT_STATE.PUBLISHED}
            resolve_document_version(document=item_updates, resource=ARCHIVE,
                                     method='PATCH',
                                     latest_doc={config.VERSION: published_item[config.VERSION]})

            # update the archive collection
            archive_item = archive_service.find_one(req=None, _id=published_item['item_id'])
            archive_service.system_update(published_item['item_id'], item_updates, archive_item)
            # insert into version.
            insert_into_versions(published_item['item_id'], doc=None)
            # update archive history
            app.on_archive_item_updated(item_updates, archive_item, ITEM_PUBLISH)
            # import to legal archive
            import_into_legal_archive.apply_async(countdown=3, kwargs={'item_id': published_item['item_id']})
            logger.info('Modified the version of scheduled item: {}'.format(published_item_id))

            logger.info('Publishing scheduled item_id: {}'.format(published_item_id))
            # update the published collection
            published_update.update(item_updates)
            published_item.update({'versioncreated': versioncreated,
                                   ITEM_STATE: CONTENT_STATE.PUBLISHED,
                                   config.VERSION: item_updates[config.VERSION]})
            # send a notification to the clients
            push_content_notification(
                [{'_id': str(published_item['item_id']), 'task': published_item.get('task', None)}])

        published_service.patch(published_item_id, published_update)
        # queue the item for publishing
        queued = get_enqueue_service(published_item[ITEM_OPERATION]).enqueue_item(published_item)

        # if the item is queued in the publish_queue then the state is "queued"
        # else the queue state is "queued_not_transmitted"
        queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
        published_service.patch(published_item_id, {QUEUE_STATE: queue_state})
        logger.info('Queued item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))
    except KeyError as key_error:
        error_updates = {QUEUE_STATE: PUBLISH_STATE.ERROR, ERROR_MESSAGE: str(key_error)}
        published_service.patch(published_item_id, error_updates)
        logger.exception('No enqueue service found for operation %s', published_item[ITEM_OPERATION])
    except ConnectionTimeout as error:  # recoverable, set state to pending and retry next time
        error_updates = {QUEUE_STATE: PUBLISH_STATE.PENDING, ERROR_MESSAGE: str(error)}
        published_service.patch(published_item_id, error_updates)
        raise
    except Exception as error:
        error_updates = {QUEUE_STATE: PUBLISH_STATE.ERROR, ERROR_MESSAGE: str(error)}
        published_service.patch(published_item_id, error_updates)
        raise
예제 #45
0
    def update_rewrite(self, original):
        """Removes the reference from the rewritten story in published collection."""
        rewrite_service = ArchiveRewriteService()
        if original.get('rewrite_of') and original.get(ITEM_EVENT_ID):
            rewrite_service._clear_rewritten_flag(original.get(ITEM_EVENT_ID),
                                                  original[config.ID_FIELD],
                                                  'rewritten_by')

        # write the rewritten_by to the take before spiked
        archive_service = get_resource_service(ARCHIVE)
        published_service = get_resource_service('published')
        takes_service = TakesPackageService()
        takes_package = takes_service.get_take_package(original)

        if takes_package and takes_package.get(
                SEQUENCE, 0) > 1 and original.get('rewritten_by'):
            # get the rewritten by
            rewritten_by = archive_service.find_one(
                req=None, _id=original.get('rewritten_by'))
            # get the take
            take_id = takes_service.get_take_by_take_no(
                original,
                take_no=takes_package.get(SEQUENCE) - 1,
                package=takes_package)
            take = archive_service.find_one(req=None, _id=take_id)

            # update the take and takes package with rewritten_by
            if take.get('rewritten_by') != rewritten_by[config.ID_FIELD]:
                if take.get(ITEM_STATE) in PUBLISH_STATES:
                    published_service.update_published_items(
                        take_id, 'rewritten_by', rewritten_by[config.ID_FIELD])

                archive_service.system_update(
                    take[config.ID_FIELD],
                    {'rewritten_by': rewritten_by[config.ID_FIELD]}, take)

            if takes_package.get('rewritten_by') != rewritten_by[
                    config.ID_FIELD]:
                if takes_package.get(ITEM_STATE) in PUBLISH_STATES:
                    published_service.update_published_items(
                        takes_package.get(config.ID_FIELD), 'rewritten_by',
                        rewritten_by[config.ID_FIELD])

                archive_service.system_update(
                    takes_package[config.ID_FIELD],
                    {'rewritten_by': rewritten_by[config.ID_FIELD]},
                    takes_package)

            if rewritten_by.get('rewrite_of') != takes_package.get(
                    config.ID_FIELD):
                archive_service.system_update(
                    rewritten_by[config.ID_FIELD],
                    {'rewrite_of': takes_package.get(config.ID_FIELD)},
                    rewritten_by)
        elif original.get('rewritten_by') or (
                takes_package and takes_package.get('rewritten_by')):
            # you are spike the story from which the rewrite was triggered.
            # in this case both rewrite_of and rewritten_by are published.
            rewrite_id = original.get('rewritten_by') or takes_package.get(
                'rewritten_by')
            rewritten_by = archive_service.find_one(req=None, _id=rewrite_id)
            archive_service.system_update(rewrite_id, {
                'rewrite_of': None,
                'rewrite_sequence': 0
            }, rewritten_by)
            app.on_archive_item_updated(
                {
                    'rewrite_of': None,
                    'rewrite_sequence': 0
                }, original, ITEM_UNLINK)
예제 #46
0
    def on_update(self, updates, original):
        remove_correction = request.args.get("remove_correction") == "true"
        self._validate_correction(original)
        archive_service = get_resource_service(ARCHIVE)
        published_service = get_resource_service("published")
        archive_item = archive_service.find_one(req=None,
                                                _id=original.get(
                                                    config.ID_FIELD))

        if remove_correction:
            published_article = published_service.find_one(
                req=None,
                guid=original.get("guid"),
                state=CONTENT_STATE.BEING_CORRECTED)

        elif original.get("state") == CONTENT_STATE.CORRECTED:
            published_article = published_service.find_one(
                req=None,
                guid=original.get("guid"),
                correction_sequence=original.get("correction_sequence"),
                state=CONTENT_STATE.CORRECTED,
            )
        else:
            published_article = published_service.find_one(
                req=None, guid=original.get("guid"))

        # updates for item in archive.
        if not remove_correction:
            archive_item_updates = {
                ITEM_STATE: CONTENT_STATE.CORRECTION,
                "operation": CONTENT_STATE.CORRECTION
            }
        elif remove_correction and archive_item.get("correction_sequence"):
            archive_item_updates = {
                ITEM_STATE: CONTENT_STATE.CORRECTED,
                "operation": ITEM_CANCEL_CORRECTION
            }
        else:
            archive_item_updates = {
                ITEM_STATE: CONTENT_STATE.PUBLISHED,
                "operation": ITEM_CANCEL_CORRECTION
            }

        # updates for item in published.
        if not remove_correction:
            published_item_updates = {
                ITEM_STATE: CONTENT_STATE.BEING_CORRECTED,
                "operation": CONTENT_STATE.BEING_CORRECTED,
            }
        elif remove_correction and published_article.get(
                "correction_sequence"):
            published_item_updates = {
                ITEM_STATE: CONTENT_STATE.CORRECTED,
                "operation": "correct"
            }
        else:
            published_item_updates = {
                ITEM_STATE: CONTENT_STATE.PUBLISHED,
                "operation": ITEM_CANCEL_CORRECTION
            }

        # clear publishing schedule when we create correction
        if archive_item.get("publish_schedule"):
            archive_item_updates.update({
                "publish_schedule": None,
                "schedule_settings": {}
            })

        # modify item in archive.
        archive_service.system_update(archive_item.get(config.ID_FIELD),
                                      archive_item_updates, archive_item)
        app.on_archive_item_updated(archive_item_updates, archive_item,
                                    ITEM_CORRECTION)

        # modify item in published.
        published_service.patch(id=published_article.get(config.ID_FIELD),
                                updates=published_item_updates)

        user = get_user(required=True)
        push_notification("item:correction",
                          item=original.get(config.ID_FIELD),
                          user=str(user.get(config.ID_FIELD)))
예제 #47
0
    def create(self, docs, **kwargs):
        """Generate highlights text item for given package.

        If doc.preview is True it won't save the item, only return.
        """
        service = superdesk.get_resource_service("archive")
        for doc in docs:
            preview = doc.get("preview", False)
            package = service.find_one(req=None, _id=doc["package"])
            if not package:
                superdesk.abort(404)
            export = doc.get("export")
            template = get_template(package.get("highlight"))
            stringTemplate = None
            if template and template.get("data") and template["data"].get(
                    "body_html"):
                stringTemplate = template["data"]["body_html"]

            doc.clear()
            doc[ITEM_TYPE] = CONTENT_TYPE.TEXT
            doc["family_id"] = package.get("guid")
            doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
            doc[config.VERSION] = 1

            for field in package:
                if field not in PACKAGE_FIELDS:
                    doc[field] = package[field]

            items = []
            for group in package.get("groups", []):
                for ref in group.get("refs", []):
                    if "residRef" in ref:
                        item = service.find_one(req=None,
                                                _id=ref.get("residRef"))
                        if item:
                            if not (export or preview) and (
                                    item.get("lock_session")
                                    or item.get("state") != "published"):
                                message = "Locked or not published items in highlight list."
                                raise SuperdeskApiError.forbiddenError(message)

                            items.append(item)
                            if not preview:
                                app.on_archive_item_updated(
                                    {
                                        "highlight_id":
                                        package.get("highlight"),
                                        "highlight_name":
                                        get_highlight_name(
                                            package.get("highlight")),
                                    },
                                    item,
                                    ITEM_EXPORT_HIGHLIGHT,
                                )

            if stringTemplate:
                doc["body_html"] = render_template_string(stringTemplate,
                                                          package=package,
                                                          items=items)
            else:
                doc["body_html"] = render_template(
                    "default_highlight_template.txt",
                    package=package,
                    items=items)
        if preview:
            return ["" for doc in docs]
        else:
            ids = service.post(docs, **kwargs)
            for id in ids:
                app.on_archive_item_updated(
                    {
                        "highlight_id":
                        package.get("highlight"),
                        "highlight_name":
                        get_highlight_name(package.get("highlight")),
                    },
                    {"_id": id},
                    ITEM_CREATE_HIGHLIGHT,
                )
            return ids
예제 #48
0
    def create(self, docs, **kwargs):
        """Toggle marked desk status for given desk and item."""

        service = get_resource_service('archive')
        published_service = get_resource_service('published')
        ids = []
        for doc in docs:
            item = service.find_one(req=None, guid=doc['marked_item'])
            if not item:
                ids.append(None)
                continue
            ids.append(item['_id'])
            marked_desks = item.get('marked_desks', [])
            if not marked_desks:
                marked_desks = []

            existing_mark = next(
                (m
                 for m in marked_desks if m['desk_id'] == doc['marked_desk']),
                None)

            if existing_mark:
                # there is an existing mark so this is un-mark action
                marked_desks = [
                    m for m in marked_desks
                    if m['desk_id'] != doc['marked_desk']
                ]
                marked_desks_on = False  # highlight toggled off
            else:
                # there is no existing mark so this is mark action
                new_mark = {}
                new_mark['desk_id'] = doc['marked_desk']
                new_mark['user_marked'] = str(
                    get_user(True).get(config.ID_FIELD, ''))
                new_mark['date_marked'] = utcnow()
                marked_desks.append(new_mark)
                marked_desks_on = True

            updates = {'marked_desks': marked_desks}
            service.system_update(item['_id'], updates, item)

            publishedItems = published_service.find({'item_id': item['_id']})
            for publishedItem in publishedItems:
                if publishedItem['_current_version'] == item[
                        '_current_version'] or not marked_desks_on:
                    updates = {'marked_desks': marked_desks}
                    published_service.system_update(publishedItem['_id'],
                                                    updates, publishedItem)

            push_notification('item:marked_desks',
                              marked=int(marked_desks_on),
                              item_id=item['_id'],
                              mark_id=str(doc['marked_desk']))

            if marked_desks_on:
                app.on_archive_item_updated({'desk_id': doc['marked_desk']},
                                            item, ITEM_MARK)
            else:
                app.on_archive_item_updated({'desk_id': doc['marked_desk']},
                                            item, ITEM_UNMARK)

        return ids