Ejemplo n.º 1
0
    def on_updated(self, updates, original):
        original = super().find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] not in {ITEM_KILL, ITEM_TAKEDOWN} and \
                original.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
            get_resource_service('archive_broadcast').on_broadcast_master_updated(updates[ITEM_OPERATION], original)

        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
        CropService().update_media_references(updates, original, True)
        superdesk.item_published.send(self, item=original)
        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service('archive_correct')
            processed_packages = []
            for package in packages:
                original_updates = {'operation': updates['operation'], ITEM_STATE: updates[ITEM_STATE]}
                if package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] and \
                        package.get(PACKAGE_TYPE, '') == '' and \
                        str(package[config.ID_FIELD]) not in processed_packages:
                    original_updates['groups'] = package['groups']

                    if updates.get('headline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'headline', updates.get('headline'))

                    if updates.get('slugline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'slugline', updates.get('slugline'))

                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
Ejemplo n.º 2
0
    def on_updated(self, updates, original):
        original = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] != ITEM_KILL and \
                original.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
            get_resource_service('archive_broadcast').on_broadcast_master_updated(updates[ITEM_OPERATION], original)

        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
        CropService().update_media_references(updates, original, True)
        superdesk.item_published.send(self, item=original)
        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service('archive_correct')
            processed_packages = []
            for package in packages:
                original_updates = {'operation': updates['operation'], ITEM_STATE: updates[ITEM_STATE]}
                if package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] and \
                        package.get(PACKAGE_TYPE, '') == '' and \
                        str(package[config.ID_FIELD]) not in processed_packages:
                    original_updates['groups'] = package['groups']

                    if updates.get('headline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'headline', updates.get('headline'))

                    if updates.get('slugline'):
                        self.package_service.update_field_in_package(original_updates, original[config.ID_FIELD],
                                                                     'slugline', updates.get('slugline'))

                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
    def _translate_item(self, guid, language, task=None, service=None, state=None, **kwargs):
        if not service:
            service = ARCHIVE
        archive_service = get_resource_service(service)
        macros_service = get_resource_service("macros")
        published_service = get_resource_service("published")

        item = archive_service.find_one(req=None, guid=guid)
        if not item:
            raise SuperdeskApiError.notFoundError(_("Fail to found item with guid: {guid}").format(guid=guid))

        if not is_workflow_state_transition_valid("translate", item[ITEM_STATE]):
            raise InvalidStateTransitionError()

        if item.get("language") == language:
            return guid

        if package_service.is_package(item):
            refs = package_service.get_item_refs(item)
            for ref in refs:
                ref[RESIDREF] = self._translate_item(ref[RESIDREF], language, service=ref.get("location"), task=task)

        if not item.get("translation_id"):
            item["translation_id"] = item["guid"]

        macros_service.execute_translation_macro(item, item.get("language", None), language)

        item["language"] = language
        item["translated_from"] = guid
        item["versioncreated"] = utcnow()
        item["firstcreated"] = utcnow()
        if task:
            item["task"] = task

        extra_fields = ["translation_id", "translated_from"]

        UPDATE_TRANSLATION_METADATA_MACRO = app.config.get("UPDATE_TRANSLATION_METADATA_MACRO")

        if UPDATE_TRANSLATION_METADATA_MACRO and macros_service.get_macro_by_name(UPDATE_TRANSLATION_METADATA_MACRO):
            macros_service.execute_macro(item, UPDATE_TRANSLATION_METADATA_MACRO)

        translation_guid = archive_service.duplicate_item(
            item, extra_fields=extra_fields, state=state, operation="translate"
        )

        item.setdefault("translations", []).append(translation_guid)

        updates = {
            "translation_id": item["translation_id"],
            "translations": item["translations"],
        }

        archive_service.system_update(item["_id"], updates, item)
        published_service.update_published_items(item["_id"], "translation_id", item["_id"])
        published_service.update_published_items(item["_id"], "translations", item["translations"])

        if kwargs.get("notify", True):
            push_content_notification([item])

        return translation_guid
Ejemplo n.º 4
0
    def create(self, docs, **kwargs):
        guid_of_translated_items = []

        for doc in docs:
            guid_of_item_to_be_translated = doc.get('guid')
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_translated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' %
                                                      guid_of_item_to_be_translated)

            if not is_workflow_state_transition_valid('translate', archived_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            get_resource_service('macros').execute_translation_macro(
                archived_doc, archived_doc.get('language', None), doc.get('language'))
            archived_doc['language'] = doc.get('language')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_translated_items.append(new_guid)

            if kwargs.get('notify', True):
                push_content_notification([archived_doc])

        return guid_of_translated_items
Ejemplo n.º 5
0
    def on_created(self, docs):
        packages = [doc for doc in docs if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE]
        if packages:
            self.packageService.on_created(packages)

        profiles = set()
        for doc in docs:
            subject = get_subject(doc)
            if subject:
                msg = 'added new {{ type }} item about "{{ subject }}"'
            else:
                msg = 'added new {{ type }} item with empty header/title'
            add_activity(ACTIVITY_CREATE, msg,
                         self.datasource, item=doc, type=doc[ITEM_TYPE], subject=subject)

            if doc.get('profile'):
                profiles.add(doc['profile'])

            self.cropService.update_media_references(doc, {})
            if doc[ITEM_OPERATION] == ITEM_FETCH:
                app.on_archive_item_updated({'task': doc.get('task')}, doc, ITEM_FETCH)
            else:
                app.on_archive_item_updated({'task': doc.get('task')}, doc, ITEM_CREATE)

        get_resource_service('content_types').set_used(profiles)
        push_content_notification(docs)
Ejemplo n.º 6
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' %
                                                      guid_of_item_to_be_duplicated)

            current_desk_of_item = archived_doc.get('task', {}).get('desk')
            if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')):
                raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.')

            if not is_workflow_state_transition_valid('duplicate', archived_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Ejemplo n.º 7
0
 def on_replaced(self, document, original):
     get_component(ItemAutosave).clear(original['_id'])
     add_activity(ACTIVITY_UPDATE, 'replaced item {{ type }} about {{ subject }}',
                  self.datasource, item=original,
                  type=original['type'], subject=get_subject(original))
     push_content_notification([document, original])
     self.cropService.update_media_references(document, original)
Ejemplo n.º 8
0
 def on_replaced(self, document, original):
     get_component(ItemAutosave).clear(original['_id'])
     add_activity(ACTIVITY_UPDATE, 'replaced item {{ type }} about {{ subject }}',
                  self.datasource, item=original,
                  type=original['type'], subject=get_subject(original))
     push_content_notification([document, original])
     self.cropService.update_media_references(document, original)
Ejemplo n.º 9
0
    def _remove_marked_user(self, item):
        """Remove the marked_for_user from all the published items having same 'item_id' as item being killed."""
        item_id = item.get('_id')
        if not item_id:
            return

        updates = {'marked_for_user': None}
        published_service = get_resource_service(PUBLISHED)

        published_items = list(
            published_service.get_from_mongo(req=None,
                                             lookup={'item_id': item_id}))
        if not published_items:
            return

        for item in published_items:
            if item and item.get('marked_for_user'):
                updated = item.copy()
                updated.update(updates)

                published_service.system_update(ObjectId(item.get('_id')),
                                                updates, item)
                # send notifications so that list can be updated in the client
                get_resource_service('archive').handle_mark_user_notifications(
                    updates, item, False)
                push_content_notification([updated, item])
Ejemplo n.º 10
0
    def on_created(self, docs):
        packages = [
            doc for doc in docs if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE
        ]
        if packages:
            self.packageService.on_created(packages)

        profiles = set()
        for doc in docs:
            subject = get_subject(doc)
            if subject:
                msg = 'added new {{ type }} item about "{{ subject }}"'
            else:
                msg = 'added new {{ type }} item with empty header/title'
            add_activity(ACTIVITY_CREATE,
                         msg,
                         self.datasource,
                         item=doc,
                         type=doc[ITEM_TYPE],
                         subject=subject)

            if doc.get('profile'):
                profiles.add(doc['profile'])

            self.cropService.update_media_references(doc, {})
            if doc[ITEM_OPERATION] == ITEM_FETCH:
                app.on_archive_item_updated({'task': doc.get('task')}, doc,
                                            ITEM_FETCH)
            else:
                app.on_archive_item_updated({'task': doc.get('task')}, doc,
                                            ITEM_CREATE)

        get_resource_service('content_types').set_used(profiles)
        push_content_notification(docs)
Ejemplo n.º 11
0
    def on_created(self, docs):
        packages = [
            doc for doc in docs if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE
        ]
        if packages:
            self.packageService.on_created(packages)

        profiles = set()
        for doc in docs:
            subject = get_subject(doc)
            if subject:
                msg = 'added new {{ type }} item about "{{ subject }}"'
            else:
                msg = 'added new {{ type }} item with empty header/title'
            add_activity(ACTIVITY_CREATE,
                         msg,
                         self.datasource,
                         item=doc,
                         type=doc[ITEM_TYPE],
                         subject=subject)

            if doc.get('profile'):
                profiles.add(doc['profile'])

        get_resource_service('content_types').set_used(profiles)
        push_content_notification(docs)
Ejemplo n.º 12
0
    def on_updated(self, updates, original):
        get_component(ItemAutosave).clear(original['_id'])

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self.packageService.on_updated(updates, original)

        updated = copy(original)
        updated.update(updates)

        if config.VERSION in updates:
            add_activity(
                ACTIVITY_UPDATE,
                'created new version {{ version }} for item {{ type }} about "{{ subject }}"',
                self.datasource,
                item=updated,
                version=updates[config.VERSION],
                subject=get_subject(updates, original),
                type=updated[ITEM_TYPE])

        push_content_notification([updated, original])
        get_resource_service('archive_broadcast').reset_broadcast_status(
            updates, original)

        if updates.get('profile'):
            get_resource_service('content_types').set_used(
                [updates.get('profile')])

        self.cropService.update_media_references(updates, original)
Ejemplo n.º 13
0
    def unlock(self, item_filter, user_id, session_id, etag):
        item_model = get_model(ItemModel)
        item = item_model.find_one(item_filter)

        if not item:
            raise SuperdeskApiError.notFoundError()

        if not item.get(LOCK_USER):
            raise SuperdeskApiError.badRequestError(message="Item is not locked.")

        can_user_unlock, error_message = self.can_unlock(item, user_id)

        if can_user_unlock:
            self.app.on_item_unlock(item, user_id)

            # delete the item if nothing is saved so far
            # version 0 created on lock item
            if item.get(config.VERSION, 0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT:
                superdesk.get_resource_service('archive').delete_action(lookup={'_id': item['_id']})
                push_content_notification([item])
            else:
                updates = {LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'force_unlock': True}
                item_model.update(item_filter, updates)
                self.app.on_item_unlocked(item, user_id)

            push_notification('item:unlock',
                              item=str(item_filter.get(config.ID_FIELD)),
                              item_version=str(item.get(config.VERSION)),
                              state=item.get(ITEM_STATE),
                              user=str(user_id), lock_session=str(session_id))
        else:
            raise SuperdeskApiError.forbiddenError(message=error_message)

        item = item_model.find_one(item_filter)
        return item
Ejemplo n.º 14
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(
                req=None, _id=guid_of_item_to_be_duplicated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError(
                    'Fail to found item with guid: %s' %
                    guid_of_item_to_be_duplicated)

            current_desk_of_item = archived_doc.get('task', {}).get('desk')
            if current_desk_of_item is None or str(
                    current_desk_of_item) != str(doc.get('desk')):
                raise SuperdeskApiError.preconditionFailedError(
                    message='Duplicate is allowed within the same desk.')

            if not is_workflow_state_transition_valid(
                    'duplicate', archived_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Ejemplo n.º 15
0
    def create(self, docs, **kwargs):
        guid_of_translated_items = []

        for doc in docs:
            guid_of_item_to_be_translated = doc.get('guid')
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(
                req=None, _id=guid_of_item_to_be_translated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError(
                    'Fail to found item with guid: %s' %
                    guid_of_item_to_be_translated)

            if not is_workflow_state_transition_valid(
                    'translate', archived_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            get_resource_service('macros').execute_translation_macro(
                archived_doc, archived_doc.get('language', None),
                doc.get('language'))
            archived_doc['language'] = doc.get('language')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_translated_items.append(new_guid)

            if kwargs.get('notify', True):
                push_content_notification([archived_doc])

        return guid_of_translated_items
Ejemplo n.º 16
0
 def on_updated(self, updates, original):
     if "marked_for_user" in updates:
         updated = original.copy()
         updated.update(updates)
         # Send notification on mark-for-user operation
         get_resource_service("archive").handle_mark_user_notifications(updates, original)
         push_content_notification([updated, original])  # see SDBELGA-192
Ejemplo n.º 17
0
    def create(self, docs):
        ids = []
        production = get_resource_service('archive')
        assignments_service = get_resource_service('assignments')
        items = []

        for doc in docs:
            assignment = assignments_service.find_one(
                req=None, _id=doc.pop('assignment_id'))
            item = production.find_one(req=None, _id=doc.pop('item_id'))
            # Boolean set to true if the unlink is as the result of spiking the content item
            spike = doc.pop('spike', False)

            # Set the state to 'assigned' if the item is 'submitted'
            updates = {'assigned_to': deepcopy(assignment.get('assigned_to'))}
            updates['assigned_to'][
                'state'] = ASSIGNMENT_WORKFLOW_STATE.ASSIGNED
            assignments_service.patch(assignment[config.ID_FIELD], updates)

            production.system_update(item[config.ID_FIELD],
                                     {'assignment_id': None}, item)

            get_resource_service('delivery').delete_action(
                lookup={
                    'assignment_id': assignment[config.ID_FIELD],
                    'item_id': item[config.ID_FIELD]
                })

            doc.update(item)
            ids.append(doc[config.ID_FIELD])
            items.append(item)

            user = get_user()
            PlanningNotifications().notify_assignment(
                target_desk=item.get('task').get('desk'),
                message='{{actioning_user}} has {{action}} '
                'a {{coverage_type}} coverage for \"{{slugline}}\"',
                actioning_user=user.get('display_name',
                                        user.get('username', 'Unknown')),
                action='unlinked' if not spike else 'spiked',
                coverage_type=get_coverage_type_name(item.get('type', '')),
                slugline=item.get('slugline'),
                omit_user=True)

            push_content_notification(items)
            push_notification('content:unlink',
                              item=str(item[config.ID_FIELD]),
                              assignment=str(assignment[config.ID_FIELD]))

        assignment_history_service = get_resource_service(
            'assignments_history')
        if spike:
            get_resource_service('assignments_history').on_item_content_unlink(
                updates, assignment, ASSIGNMENT_HISTORY_ACTIONS.SPIKE_UNLINK)
        else:
            assignment_history_service.on_item_content_unlink(
                updates, assignment)

        return ids
Ejemplo n.º 18
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)
            archived_doc = {}

            if doc.get('type') == 'archived':
                archived_service = get_resource_service('archived')
                req = ParsedRequest()
                query = {
                    'query': {
                        'filtered': {
                            'filter': {
                                'bool': {
                                    'must': [{
                                        'term': {
                                            'item_id': doc.get('item_id')
                                        }
                                    }]
                                }
                            }
                        }
                    },
                    "sort": [{
                        "_current_version": "desc"
                    }],
                    "size": 1
                }
                req.args = {'source': json.dumps(query)}
                archived_docs = archived_service.get(req=req, lookup=None)
                if archived_docs.count() > 0:
                    archived_doc = archived_docs[0]

            else:
                archived_doc = archive_service.find_one(
                    req=None, _id=guid_of_item_to_be_duplicated)

            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            # reset timestamps
            archived_doc['versioncreated'] = archived_doc[
                'firstcreated'] = utcnow()
            archived_doc['firstpublished'] = None

            send_to(doc=archived_doc,
                    desk_id=doc.get('desk'),
                    stage_id=doc.get('stage'),
                    default_stage='working_stage')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
    def _translate_item(self,
                        guid,
                        language,
                        task=None,
                        service=None,
                        **kwargs):
        if not service:
            service = ARCHIVE
        archive_service = get_resource_service(service)
        macros_service = get_resource_service('macros')
        published_service = get_resource_service('published')

        item = archive_service.find_one(req=None, _id=guid)
        if not item:
            raise SuperdeskApiError.notFoundError(
                'Fail to found item with guid: %s' % guid)

        if not is_workflow_state_transition_valid('translate',
                                                  item[ITEM_STATE]):
            raise InvalidStateTransitionError()

        if item.get('language') == language:
            return guid

        if package_service.is_package(item):
            refs = package_service.get_item_refs(item)
            for ref in refs:
                ref[RESIDREF] = self._translate_item(
                    ref[RESIDREF],
                    language,
                    service=ref.get('location'),
                    task=task)
        if not item.get('translation_id'):
            archive_service.system_update(item['_id'],
                                          {'translation_id': item['_id']},
                                          item)
            item['translation_id'] = item['_id']
            published_service.update_published_items(item['_id'],
                                                     'translation_id',
                                                     item['_id'])

        macros_service.execute_translation_macro(item,
                                                 item.get('language', None),
                                                 language)

        item['language'] = language
        item['translated_from'] = guid
        item['versioncreated'] = utcnow()
        item['firstcreated'] = utcnow()
        if task:
            item['task'] = task

        _id = archive_service.duplicate_item(item, operation='translate')

        if kwargs.get('notify', True):
            push_content_notification([item])

        return _id
Ejemplo n.º 20
0
    def unlock(self, item_filter, user_id, session_id, etag):
        item_model = get_model(ItemModel)
        item = item_model.find_one(item_filter)

        if not item:
            raise SuperdeskApiError.notFoundError()

        if not item.get(LOCK_USER):
            raise SuperdeskApiError.badRequestError(
                message=_("Item is not locked."))

        can_user_unlock, error_message = self.can_unlock(item, user_id)

        if can_user_unlock:
            self.app.on_item_unlock(item, user_id)
            updates = {}

            # delete the item if nothing is saved so far
            # version 0 created on lock item
            if item.get(config.VERSION,
                        0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT:
                if item.get(ITEM_TYPE) == CONTENT_TYPE.COMPOSITE:
                    # if item is composite then update referenced items in package.
                    PackageService().update_groups({}, item)

                superdesk.get_resource_service("archive").delete_action(
                    lookup={"_id": item["_id"]})
                push_content_notification([item])
            else:
                updates = {}
                set_unlock_updates(updates)
                autosave = superdesk.get_resource_service(
                    "archive_autosave").find_one(req=None, _id=item["_id"])
                if autosave and item[ITEM_STATE] not in PUBLISH_STATES:
                    if not hasattr(
                            flask.g,
                            "user"):  # user is not set when session expires
                        flask.g.user = superdesk.get_resource_service(
                            "users").find_one(req=None, _id=user_id)
                    autosave.update(updates)
                    resolve_document_version(autosave, "archive", "PATCH",
                                             item)
                    superdesk.get_resource_service("archive").patch(
                        item["_id"], autosave)
                    item = superdesk.get_resource_service("archive").find_one(
                        req=None, _id=item["_id"])
                    insert_versioning_documents("archive", item)
                else:
                    item_model.update(item_filter, updates)
                    item = item_model.find_one(item_filter)
                self.app.on_item_unlocked(item, user_id)

            push_unlock_notification(item, user_id, session_id)
        else:
            raise SuperdeskApiError.forbiddenError(message=error_message)

        return item
Ejemplo n.º 21
0
    def on_deleted(self, doc):
        if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self.packageService.on_deleted(doc)

        remove_media_files(doc)

        add_activity(ACTIVITY_DELETE, 'removed item {{ type }} about {{ subject }}',
                     self.datasource, item=doc,
                     type=doc[ITEM_TYPE], subject=get_subject(doc))
        push_content_notification([doc])
Ejemplo n.º 22
0
    def on_updated(self, updates, original):
        original = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] != ITEM_KILL and \
                original.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
            get_resource_service('archive_broadcast').on_broadcast_master_updated(updates[ITEM_OPERATION], original)

        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
Ejemplo n.º 23
0
    def remove_marks(self, desk_id):
        """Remove "mark for desk" attribute

        :param ObjectId desk_id: id of the desk being re-opened
        """
        for service_name in ("archive", "published"):
            service = get_resource_service(service_name)
            marked_items = service.find({"task.desk": desk_id, "marked_desks": {"$exists": True, "$ne": []}})
            for item in marked_items:
                marked_desks = [m for m in item["marked_desks"] if "user_marked" in m]
                service.system_update(item["_id"], {"marked_desks": marked_desks}, item)
                push_content_notification([item])
Ejemplo n.º 24
0
    def on_updated(self, updates, original):
        original = super().find_one(req=None, _id=original[config.ID_FIELD])
        updates.update(original)

        if updates[ITEM_OPERATION] not in {ITEM_KILL, ITEM_TAKEDOWN
                                           } and original.get(ITEM_TYPE) in [
                                               CONTENT_TYPE.TEXT,
                                               CONTENT_TYPE.PREFORMATTED,
                                           ]:
            get_resource_service(
                "archive_broadcast").on_broadcast_master_updated(
                    updates[ITEM_OPERATION], original)

        get_resource_service("archive_broadcast").reset_broadcast_status(
            updates, original)
        push_content_notification([updates])
        self._import_into_legal_archive(updates)
        CropService().update_media_references(updates, original, True)

        # Do not send item if it is scheduled, on real publishing send item to internal destination
        if not updates.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            signals.item_published.send(self, item=original)

        packages = self.package_service.get_packages(original[config.ID_FIELD])
        if packages and packages.count() > 0:
            archive_correct = get_resource_service("archive_correct")
            processed_packages = []
            for package in packages:
                original_updates = {
                    "operation": updates["operation"],
                    ITEM_STATE: updates[ITEM_STATE]
                }
                if (package[ITEM_STATE]
                        in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED]
                        and package.get(PACKAGE_TYPE, "") == ""
                        and str(package[config.ID_FIELD])
                        not in processed_packages):
                    original_updates["groups"] = package["groups"]

                    if updates.get("headline"):
                        self.package_service.update_field_in_package(
                            original_updates, original[config.ID_FIELD],
                            "headline", updates.get("headline"))

                    if updates.get("slugline"):
                        self.package_service.update_field_in_package(
                            original_updates, original[config.ID_FIELD],
                            "slugline", updates.get("slugline"))

                    archive_correct.patch(id=package[config.ID_FIELD],
                                          updates=original_updates)
                    insert_into_versions(id_=package[config.ID_FIELD])
                    processed_packages.append(package[config.ID_FIELD])
Ejemplo n.º 25
0
    def unlock(self, item_filter, user_id, session_id, etag):
        item_model = get_model(ItemModel)
        item = item_model.find_one(item_filter)

        if not item:
            raise SuperdeskApiError.notFoundError()

        if not item.get(LOCK_USER):
            raise SuperdeskApiError.badRequestError(message="Item is not locked.")

        can_user_unlock, error_message = self.can_unlock(item, user_id)

        if can_user_unlock:
            self.app.on_item_unlock(item, user_id)
            updates = {}

            # delete the item if nothing is saved so far
            # version 0 created on lock item
            if item.get(config.VERSION, 0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT:
                if item.get(ITEM_TYPE) == CONTENT_TYPE.COMPOSITE:
                    # if item is composite then update referenced items in package.
                    PackageService().update_groups({}, item)

                superdesk.get_resource_service('archive').delete_action(lookup={'_id': item['_id']})
                push_content_notification([item])
            else:
                updates = {LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None,
                           'lock_action': None, 'force_unlock': True}
                autosave = superdesk.get_resource_service('archive_autosave').find_one(req=None, _id=item['_id'])
                if autosave and item[ITEM_STATE] not in PUBLISH_STATES:
                    if not hasattr(flask.g, 'user'):  # user is not set when session expires
                        flask.g.user = superdesk.get_resource_service('users').find_one(req=None, _id=user_id)
                    autosave.update(updates)
                    resolve_document_version(autosave, 'archive', 'PATCH', item)
                    superdesk.get_resource_service('archive').patch(item['_id'], autosave)
                    item = superdesk.get_resource_service('archive').find_one(req=None, _id=item['_id'])
                    insert_versioning_documents('archive', item)
                else:
                    item_model.update(item_filter, updates)
                    item = item_model.find_one(item_filter)
                self.app.on_item_unlocked(item, user_id)

            push_notification('item:unlock',
                              item=str(item_filter.get(config.ID_FIELD)),
                              item_version=str(item.get(config.VERSION)),
                              state=item.get(ITEM_STATE),
                              user=str(user_id), lock_session=str(session_id),
                              _etag=item.get(config.ETAG))
        else:
            raise SuperdeskApiError.forbiddenError(message=error_message)

        return item
Ejemplo n.º 26
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError(
                'Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(
                doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(
                message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk',
                                                  archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc,
                desk_id=doc.get('task', {}).get('desk'),
                stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {
                CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED
        }:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Ejemplo n.º 27
0
    def on_created(self, docs):
        packages = [doc for doc in docs if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE]
        if packages:
            self.packageService.on_created(packages)

        for doc in docs:
            subject = get_subject(doc)
            if subject:
                msg = 'added new {{ type }} item about "{{ subject }}"'
            else:
                msg = 'added new {{ type }} item with empty header/title'
            add_activity(ACTIVITY_CREATE, msg,
                         self.datasource, item=doc, type=doc[ITEM_TYPE], subject=subject)
        push_content_notification(docs)
Ejemplo n.º 28
0
    def on_deleted(self, doc):
        """Validate we can safely delete the Assignment item

        Make sure to clean up the Archive, Delivery and Planning items by:
            * Remove 'assignment_id' from Archive item (if linked)
            * Delete the Delivery record associated with the Assignment & Archive items (if linked)
            * Removing 'assigned_to' dictionary from the associated Coverage
        """
        archive_service = get_resource_service('archive')
        delivery_service = get_resource_service('delivery')
        planning_service = get_resource_service('planning')
        assignment_id = doc.get(config.ID_FIELD)

        # If we have a Content Item linked, then we need to remove the
        # assignment_id from it and remove the delivery record
        # Then send a notification that the content has been updated
        archive_item = archive_service.find_one(req=None,
                                                assignment_id=assignment_id)
        if archive_item:
            archive_service.system_update(archive_item[config.ID_FIELD],
                                          {'assignment_id': None},
                                          archive_item)

            delivery_service.delete_action(
                lookup={
                    'assignment_id': assignment_id,
                    'item_id': archive_item[config.ID_FIELD]
                })

            # Push content nofitication so connected clients can update the
            # content views (i.e. removes the Calendar icon from Monitoring)
            push_content_notification([archive_item])

        # Remove assignment information from coverage
        updated_planning = planning_service.remove_assignment(
            doc, unlock_planning=True)

        # Finally send a notification to connected clients that the Assignment
        # has been removed
        push_notification(
            'assignments:removed',
            item=archive_item[config.ID_FIELD] if archive_item else None,
            assignment=assignment_id,
            planning=doc.get('planning_item'),
            coverage=doc.get('coverage_item'),
            planning_etag=updated_planning.get(config.ETAG),
            session=get_auth()['_id'])

        # publish planning
        self.publish_planning(doc.get('planning_item'))
Ejemplo n.º 29
0
    def test_push_content_notification(self, push_notification):
        foo1 = {"_id": "foo", "task": {"desk": "sports", "stage": "inbox"}}
        foo2 = {"_id": "foo", "task": {"desk": "news", "stage": "todo"}}
        foo3 = {"_id": "foo"}

        push_content_notification([foo1, foo2, foo3])
        push_notification.assert_called_once_with("content:update",
                                                  user="",
                                                  items={"foo": 1},
                                                  desks={
                                                      "sports": 1,
                                                      "news": 1
                                                  },
                                                  stages={
                                                      "inbox": 1,
                                                      "todo": 1
                                                  })
Ejemplo n.º 30
0
    def test_push_content_notification(self, push_notification):
        foo1 = {'_id': 'foo', 'task': {'desk': 'sports', 'stage': 'inbox'}}
        foo2 = {'_id': 'foo', 'task': {'desk': 'news', 'stage': 'todo'}}
        foo3 = {'_id': 'foo'}

        push_content_notification([foo1, foo2, foo3])
        push_notification.assert_called_once_with('content:update',
                                                  user='',
                                                  items={'foo': 1},
                                                  desks={
                                                      'sports': 1,
                                                      'news': 1
                                                  },
                                                  stages={
                                                      'inbox': 1,
                                                      'todo': 1
                                                  })
Ejemplo n.º 31
0
    def on_updated(self, updates, original):
        get_component(ItemAutosave).clear(original['_id'])

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self.packageService.on_updated(updates, original)

        ArchiveCropService().delete_replaced_crop_files(updates, original)

        updated = copy(original)
        updated.update(updates)

        if config.VERSION in updates:
            add_activity(ACTIVITY_UPDATE, 'created new version {{ version }} for item {{ type }} about "{{ subject }}"',
                         self.datasource, item=updated,
                         version=updates[config.VERSION], subject=get_subject(updates, original),
                         type=updated[ITEM_TYPE])

        push_content_notification([updated, original])
Ejemplo n.º 32
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Ejemplo n.º 33
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            send_to(doc=archived_doc, desk_id=doc.get('desk'), default_stage='working_stage')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
    def _translate_item(self, guid, language, task=None, service=None, **kwargs):
        if not service:
            service = ARCHIVE
        archive_service = get_resource_service(service)
        macros_service = get_resource_service('macros')

        item = archive_service.find_one(req=None, _id=guid)
        if not item:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid)

        if not is_workflow_state_transition_valid('translate', item[ITEM_STATE]):
            raise InvalidStateTransitionError()

        if item.get('language') == language:
            return guid

        if package_service.is_package(item):
            refs = package_service.get_item_refs(item)
            for ref in refs:
                ref[RESIDREF] = self._translate_item(ref[RESIDREF], language,
                                                     service=ref.get('location'),
                                                     task=task)
        if not item.get('translation_id'):
            archive_service.system_update(item['_id'], {'translation_id': item['_id']}, item)
            item['translation_id'] = item['_id']

        macros_service.execute_translation_macro(
            item, item.get('language', None), language)

        item['language'] = language
        item['translated_from'] = guid
        item['versioncreated'] = utcnow()
        item['firstcreated'] = utcnow()
        if task:
            item['task'] = task

        _id = archive_service.duplicate_item(item, operation='translate')

        if kwargs.get('notify', True):
            push_content_notification([item])

        return _id
Ejemplo n.º 35
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Ejemplo n.º 36
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args["guid"]

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            archived_doc["versioncreated"] = utcnow()
            send_to(doc=archived_doc, desk_id=doc.get("desk"), default_stage="working_stage")
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get("notify", True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Ejemplo n.º 37
0
    def remove_marks(self, desk_id):
        """Remove "mark for desk" attribute

        :param ObjectId desk_id: id of the desk being re-opened
        """
        for service_name in ('archive', 'published'):
            service = get_resource_service(service_name)
            marked_items = service.find({
                'task.desk': desk_id,
                'marked_desks': {
                    '$exists': True,
                    '$ne': []
                }
            })
            for item in marked_items:
                marked_desks = [
                    m for m in item['marked_desks'] if 'user_marked' in m
                ]
                service.system_update(item['_id'],
                                      {'marked_desks': marked_desks}, item)
                push_content_notification([item])
Ejemplo n.º 38
0
    def on_updated(self, updates, original):
        get_component(ItemAutosave).clear(original['_id'])

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self.packageService.on_updated(updates, original)

        ArchiveCropService().delete_replaced_crop_files(updates, original)

        updated = copy(original)
        updated.update(updates)

        if config.VERSION in updates:
            add_activity(
                ACTIVITY_UPDATE,
                'created new version {{ version }} for item {{ type }} about "{{ subject }}"',
                self.datasource,
                item=updated,
                version=updates[config.VERSION],
                subject=get_subject(updates, original),
                type=updated[ITEM_TYPE])

        push_content_notification([updated, original])
Ejemplo n.º 39
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)
            archived_doc = {}

            if doc.get('type') == 'archived':
                archived_service = get_resource_service('archived')
                req = ParsedRequest()
                query = {'query':
                         {'filtered':
                          {'filter':
                           {'bool':
                            {'must': [
                                {'term': {'item_id': doc.get('item_id')}}
                            ]}}}}, "sort": [{"_current_version": "desc"}], "size": 1}
                req.args = {'source': json.dumps(query)}
                archived_docs = archived_service.get(req=req, lookup=None)
                if archived_docs.count() > 0:
                    archived_doc = archived_docs[0]

            else:
                archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)

            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            archived_doc['versioncreated'] = utcnow()
            send_to(doc=archived_doc, desk_id=doc.get('desk'), default_stage='working_stage')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Ejemplo n.º 40
0
    def on_updated(self, updates, original):
        get_component(ItemAutosave).clear(original['_id'])

        if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            self.packageService.on_updated(updates, original)

        updated = copy(original)
        updated.update(updates)

        if config.VERSION in updates:
            add_activity(ACTIVITY_UPDATE,
                         'created new version {{ version }} for item {{ type }} about "{{ subject }}"',
                         self.datasource, item=updated,
                         version=updates[config.VERSION], subject=get_subject(updates, original),
                         type=updated[ITEM_TYPE])

        push_content_notification([updated, original])
        get_resource_service('archive_broadcast').reset_broadcast_status(updates, original)

        if updates.get('profile'):
            get_resource_service('content_types').set_used([updates.get('profile')])

        self.cropService.update_media_references(updates, original)
    def create(self, docs):
        ids = []
        production = get_resource_service('archive')
        assignments_service = get_resource_service('assignments')
        assignments_complete = get_resource_service('assignments_complete')
        items = []

        for doc in docs:
            assignment = assignments_service.find_one(
                req=None, _id=doc.pop('assignment_id'))
            item = production.find_one(req=None, _id=doc.pop('item_id'))
            reassign = doc.pop('reassign')

            # set the state to in progress if item in published state
            updates = {'assigned_to': deepcopy(assignment.get('assigned_to'))}
            updates['assigned_to']['state'] = ASSIGNMENT_WORKFLOW_STATE.COMPLETED if \
                item.get(ITEM_STATE) in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] else \
                ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS

            # on fulfiling the assignment the user is assigned the assignment, for add to planning it is not
            if reassign:
                user = get_user()
                if user and str(user.get(config.ID_FIELD)) != (
                        assignment.get('assigned_to') or {}).get('user'):
                    updates['assigned_to']['user'] = str(
                        user.get(config.ID_FIELD))

            if item.get(ITEM_STATE) in [
                    CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED
            ]:
                assignments_complete.update(assignment[config.ID_FIELD],
                                            updates, assignment)
            else:
                assignments_service.patch(assignment[config.ID_FIELD], updates)

            # reference the item to the assignment
            production.system_update(
                item[config.ID_FIELD],
                {'assignment_id': assignment[config.ID_FIELD]}, item)

            # if the item is publish then update those items as well
            if item.get(ITEM_STATE) in PUBLISH_STATES:
                get_resource_service('published').update_published_items(
                    item[config.ID_FIELD], 'assignment_id',
                    assignment[config.ID_FIELD])

            get_resource_service('delivery').post([{
                'item_id':
                item[config.ID_FIELD],
                'assignment_id':
                assignment[config.ID_FIELD],
                'planning_id':
                assignment['planning_item'],
                'coverage_id':
                assignment['coverage_item']
            }])
            item['assignment_id'] = assignment[config.ID_FIELD]

            # Save assignment history
            assignment_history_service = get_resource_service(
                'assignments_history')
            assignment_history_service.on_item_content_link(
                updates, assignment)

            doc.update(item)
            ids.append(doc[config.ID_FIELD])
            items.append(item)

            if item.get(ITEM_STATE) not in [
                    CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED
            ]:
                # publishing planning item
                assignments_service.publish_planning(
                    assignment['planning_item'])

        push_content_notification(items)
        push_notification('content:link',
                          item=str(item[config.ID_FIELD]),
                          assignment=str(assignment[config.ID_FIELD]))
        return ids
Ejemplo n.º 42
0
    def enqueue_item(self, published_item):
        """
        Creates the corresponding entries in the publish queue for the given item
        """
        published_item_id = ObjectId(published_item[config.ID_FIELD])
        published_service = get_resource_service(PUBLISHED)
        archive_service = get_resource_service(ARCHIVE)
        published_update = {
            QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS,
            'last_queue_event': utcnow()
        }
        try:
            logger.info('Queueing item with id: {} and item_id: {}'.format(
                published_item_id, published_item['item_id']))

            published_item = published_service.find_one(req=None,
                                                        _id=published_item_id)
            if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
                logger.info(
                    'Queue State is not pending for published item {}. It is in {}'
                    .format(published_item_id,
                            published_item.get(QUEUE_STATE)))
                return

            if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
                # if scheduled then change the state to published
                # change the `version` and `versioncreated` for the item
                # in archive collection and published collection.
                versioncreated = utcnow()
                item_updates = {
                    'versioncreated': versioncreated,
                    ITEM_STATE: CONTENT_STATE.PUBLISHED
                }
                resolve_document_version(document=item_updates,
                                         resource=ARCHIVE,
                                         method='PATCH',
                                         latest_doc={
                                             config.VERSION:
                                             published_item[config.VERSION]
                                         })

                # update the archive collection
                archive_item = archive_service.find_one(
                    req=None, _id=published_item['item_id'])
                archive_service.system_update(published_item['item_id'],
                                              item_updates, archive_item)
                # insert into version.
                insert_into_versions(published_item['item_id'], doc=None)
                # update archive history
                app.on_archive_item_updated(item_updates, archive_item,
                                            ITEM_PUBLISH)
                # import to legal archive
                import_into_legal_archive.apply_async(
                    countdown=3, kwargs={'item_id': published_item['item_id']})
                logger.info(
                    'Modified the version of scheduled item: {}'.format(
                        published_item_id))

                logger.info('Publishing scheduled item_id: {}'.format(
                    published_item_id))
                # update the published collection
                published_update.update(item_updates)
                published_item.update({
                    'versioncreated':
                    versioncreated,
                    ITEM_STATE:
                    CONTENT_STATE.PUBLISHED,
                    config.VERSION:
                    item_updates[config.VERSION]
                })
                # send a notification to the clients
                push_content_notification([{
                    '_id':
                    str(published_item['item_id']),
                    'task':
                    published_item.get('task', None)
                }])
                #  apply internal destinations
                signals.item_published.send(self,
                                            item=archive_service.find_one(
                                                req=None,
                                                _id=published_item['item_id']))

            published_service.patch(published_item_id, published_update)
            # queue the item for publishing
            try:
                queued = get_enqueue_service(
                    published_item[ITEM_OPERATION]).enqueue_item(
                        published_item, None)
            except KeyError as key_error:
                error_updates = {
                    QUEUE_STATE: PUBLISH_STATE.ERROR,
                    ERROR_MESSAGE: str(key_error)
                }
                published_service.patch(published_item_id, error_updates)
                logger.exception('No enqueue service found for operation %s',
                                 published_item[ITEM_OPERATION])
                raise

            # if the item is queued in the publish_queue then the state is "queued"
            # else the queue state is "queued_not_transmitted"
            queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
            published_service.patch(published_item_id,
                                    {QUEUE_STATE: queue_state})
            logger.info('Queued item with id: {} and item_id: {}'.format(
                published_item_id, published_item['item_id']))
        except ConnectionTimeout as error:  # recoverable, set state to pending and retry next time
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.PENDING,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
        except SoftTimeLimitExceeded as error:
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.PENDING,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
        except Exception as error:
            error_updates = {
                QUEUE_STATE: PUBLISH_STATE.ERROR,
                ERROR_MESSAGE: str(error)
            }
            published_service.patch(published_item_id, error_updates)
            raise
Ejemplo n.º 43
0
    def create(self, docs):
        ids = []
        production = get_resource_service('archive')
        assignments_service = get_resource_service('assignments')
        assignments_complete = get_resource_service('assignments_complete')
        items = []
        deliveries = []
        published_updated_items = []

        for doc in docs:
            assignment = assignments_service.find_one(
                req=None, _id=doc.pop('assignment_id'))
            assignments_service.validate_assignment_action(assignment)
            item_id = doc.pop('item_id')
            actioned_item = production.find_one(req=None, _id=item_id)
            related_items = get_related_items(actioned_item)
            reassign = doc.pop('reassign')
            updates = {'assigned_to': deepcopy(assignment.get('assigned_to'))}

            for item in related_items:
                if not item.get('assignment_id'):
                    # Add a delivery for all items in published collection
                    deliveries.append({
                        'item_id':
                        item[config.ID_FIELD],
                        'assignment_id':
                        assignment.get(config.ID_FIELD),
                        'planning_id':
                        assignment['planning_item'],
                        'coverage_id':
                        assignment['coverage_item'],
                        'item_state':
                        item.get('state'),
                        'sequence_no':
                        item.get('rewrite_sequence', 0),
                        'publish_time':
                        item.get('firstpublished')
                    })

                    # Update archive/published collection with assignment linking
                    update_assignment_on_link_unlink(
                        assignment[config.ID_FIELD], item,
                        published_updated_items)

                    ids.append(item.get(config.ID_FIELD))
                    items.append(item)

        # Create all deliveries
        if len(deliveries) > 0:
            get_resource_service('delivery').post(deliveries)

        # Update assignments, assignment history and publish planning
        # set the state to in progress if no item in the updates chain has ever been published
        already_completed = assignment['assigned_to'][
            'state'] == ASSIGNMENT_WORKFLOW_STATE.COMPLETED
        updates['assigned_to']['state'] = ASSIGNMENT_WORKFLOW_STATE.COMPLETED if \
            actioned_item.get(ITEM_STATE) in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] else \
            ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS

        # on fulfiling the assignment the user is assigned the assignment, for add to planning it is not
        if reassign:
            user = get_user()
            if user and str(user.get(config.ID_FIELD)) != \
                    (assignment.get('assigned_to') or {}).get('user'):
                updates['assigned_to']['user'] = str(user.get(config.ID_FIELD))

            # if the item & assignment are'nt on the same desk, move the assignment to the item desk
            if (assignment.get('assigned_to') or {}).get('desk') != str(
                    actioned_item.get('task').get('desk')):
                updates['assigned_to']['desk'] = str(
                    actioned_item.get('task').get('desk'))

        # If assignment is already complete, no need to update it again
        if not already_completed:
            if actioned_item.get(ITEM_STATE) in [
                    CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED
            ]:
                assignments_complete.update(assignment[config.ID_FIELD],
                                            updates, assignment)
            else:
                assignments_service.patch(assignment[config.ID_FIELD], updates)

        actioned_item['assignment_id'] = assignment[config.ID_FIELD]
        doc.update(actioned_item)

        # Save assignment history
        # Update assignment history with all items affected
        if len(ids) > 0:
            updates['assigned_to']['item_ids'] = ids
            assignment_history_service = get_resource_service(
                'assignments_history')
            assignment_history_service.on_item_content_link(
                updates, assignment)
            if actioned_item.get(ITEM_STATE) not in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] or \
                    already_completed:
                # publishing planning item
                assignments_service.publish_planning(
                    assignment['planning_item'])

        # Send notifications
        push_content_notification(items)
        push_notification('content:link',
                          item=str(actioned_item[config.ID_FIELD]),
                          assignment=assignment[config.ID_FIELD])
        return ids
Ejemplo n.º 44
0
def enqueue_item(published_item):
    """
    Creates the corresponding entries in the publish queue for the given item
    """
    published_item_id = ObjectId(published_item[config.ID_FIELD])
    published_service = get_resource_service(PUBLISHED)
    archive_service = get_resource_service(ARCHIVE)
    published_update = {QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS, 'last_queue_event': utcnow()}
    try:
        logger.info('Queueing item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))

        published_item = published_service.find_one(req=None, _id=published_item_id)
        if published_item.get(QUEUE_STATE) != PUBLISH_STATE.PENDING:
            logger.info('Queue State is not pending for published item {}. It is in {}'.
                        format(published_item_id, published_item.get(QUEUE_STATE)))
            return

        if published_item.get(ITEM_STATE) == CONTENT_STATE.SCHEDULED:
            # if scheduled then change the state to published
            # change the `version` and `versioncreated` for the item
            # in archive collection and published collection.
            versioncreated = utcnow()
            item_updates = {'versioncreated': versioncreated, ITEM_STATE: CONTENT_STATE.PUBLISHED}
            resolve_document_version(document=item_updates, resource=ARCHIVE,
                                     method='PATCH',
                                     latest_doc={config.VERSION: published_item[config.VERSION]})

            # update the archive collection
            archive_item = archive_service.find_one(req=None, _id=published_item['item_id'])
            archive_service.system_update(published_item['item_id'], item_updates, archive_item)
            # insert into version.
            insert_into_versions(published_item['item_id'], doc=None)
            # update archive history
            app.on_archive_item_updated(item_updates, archive_item, ITEM_PUBLISH)
            # import to legal archive
            import_into_legal_archive.apply_async(countdown=3, kwargs={'item_id': published_item['item_id']})
            logger.info('Modified the version of scheduled item: {}'.format(published_item_id))

            logger.info('Publishing scheduled item_id: {}'.format(published_item_id))
            # update the published collection
            published_update.update(item_updates)
            published_item.update({'versioncreated': versioncreated,
                                   ITEM_STATE: CONTENT_STATE.PUBLISHED,
                                   config.VERSION: item_updates[config.VERSION]})
            # send a notification to the clients
            push_content_notification(
                [{'_id': str(published_item['item_id']), 'task': published_item.get('task', None)}])

        published_service.patch(published_item_id, published_update)
        # queue the item for publishing
        queued = get_enqueue_service(published_item[ITEM_OPERATION]).enqueue_item(published_item)

        # if the item is queued in the publish_queue then the state is "queued"
        # else the queue state is "queued_not_transmitted"
        queue_state = PUBLISH_STATE.QUEUED if queued else PUBLISH_STATE.QUEUED_NOT_TRANSMITTED
        published_service.patch(published_item_id, {QUEUE_STATE: queue_state})
        logger.info('Queued item with id: {} and item_id: {}'.format(published_item_id, published_item['item_id']))
    except KeyError as key_error:
        error_updates = {QUEUE_STATE: PUBLISH_STATE.ERROR, ERROR_MESSAGE: str(key_error)}
        published_service.patch(published_item_id, error_updates)
        logger.exception('No enqueue service found for operation %s', published_item[ITEM_OPERATION])
    except ConnectionTimeout as error:  # recoverable, set state to pending and retry next time
        error_updates = {QUEUE_STATE: PUBLISH_STATE.PENDING, ERROR_MESSAGE: str(error)}
        published_service.patch(published_item_id, error_updates)
        raise
    except Exception as error:
        error_updates = {QUEUE_STATE: PUBLISH_STATE.ERROR, ERROR_MESSAGE: str(error)}
        published_service.patch(published_item_id, error_updates)
        raise