def handle_item_published(sender, item, **extra):
    macros_service = get_resource_service('macros')
    archive_service = get_resource_service('archive')
    filters_service = get_resource_service('content_filters')
    destinations_service = get_resource_service(NAME)

    for dest in destinations_service.get(req=None, lookup={'is_active': True}):
        if dest.get('desk') == item.get('task').get('desk'):
            # item desk and internal destination are same then don't execute
            continue

        if dest.get('filter'):
            content_filter = filters_service.find_one(req=None, _id=dest['filter'])
            if not content_filter:  # error state sort of, not sure what to do
                continue
            if not filters_service.does_match(content_filter, item):
                continue

        new_item = deepcopy(item)
        send_to(new_item, desk_id=dest['desk'], stage_id=dest.get('stage'))

        if dest.get('macro'):
            macro = macros_service.get_macro_by_name(dest['macro'])
            try:
                macro['callback'](new_item)
            except StopDuplication:
                continue

        extra_fields = [PUBLISH_SCHEDULE, SCHEDULE_SETTINGS]
        archive_service.duplicate_content(new_item, state='routed', extra_fields=extra_fields)
Пример #2
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        set_sign_off(archived_doc, original=original)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        return archived_doc
Пример #3
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' %
                                                      guid_of_item_to_be_duplicated)

            current_desk_of_item = archived_doc.get('task', {}).get('desk')
            if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')):
                raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.')

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            task = archived_doc.get('task', {})
            push_notification(
                'content:update',
                duplicated=1,
                item=str(new_guid),
                desk=str(task.get('desk', '')),
                stage=str(task.get('stage', ''))
            )

        return guid_of_duplicated_items
Пример #4
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(
                req=None, _id=guid_of_item_to_be_duplicated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError(
                    'Fail to found item with guid: %s' %
                    guid_of_item_to_be_duplicated)

            current_desk_of_item = archived_doc.get('task', {}).get('desk')
            if current_desk_of_item is None or str(
                    current_desk_of_item) != str(doc.get('desk')):
                raise SuperdeskApiError.preconditionFailedError(
                    message='Duplicate is allowed within the same desk.')

            if not is_workflow_state_transition_valid(
                    'duplicate', archived_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #5
0
    def _create_rewrite_article(self, original, digital):
        """
        Creates a new story and sets the metadata from original and digital
        :param original: original story
        :param digital: digital version of the story
        :return:new story
        """
        rewrite = dict()
        fields = [
            'family_id', 'abstract', 'anpa_category', 'pubstatus', 'slugline',
            'urgency', 'subject', 'priority', 'byline', 'dateline', 'headline',
            'event_id'
        ]

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        if digital:
            # check if there's digital
            rewrite['rewrite_of'] = digital['_id']
        else:
            # if not use original's id
            rewrite['rewrite_of'] = original['_id']

        send_to(doc=rewrite, desk_id=original['task']['desk'])
        rewrite['state'] = 'in_progress'
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite
Пример #6
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service('ingest_providers').find_one(
            source='aapmm', req=None)
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    "Destination desk cannot be empty.")

            archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid'])

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(
                    provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(dest_doc, doc.get('desk'), doc.get('stage'))
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Пример #7
0
def fetch_item(doc, desk_id, stage_id, state=None, target=None):
    dest_doc = dict(doc)

    if target:
        # set target subscriber info
        dest_doc.update(target)

    new_id = generate_guid(type=GUID_TAG)
    if doc.get("guid"):
        dest_doc.setdefault("uri", doc[GUID_FIELD])

    dest_doc[config.ID_FIELD] = new_id
    dest_doc[GUID_FIELD] = new_id
    generate_unique_id_and_name(dest_doc)

    # avoid circular import
    from apps.tasks import send_to

    dest_doc[config.VERSION] = 1
    dest_doc["versioncreated"] = utcnow()
    send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id)
    dest_doc[ITEM_STATE] = state or CONTENT_STATE.FETCHED

    dest_doc[FAMILY_ID] = doc[config.ID_FIELD]
    dest_doc[INGEST_ID] = doc[config.ID_FIELD]
    dest_doc[ITEM_OPERATION] = ITEM_FETCH

    remove_unwanted(dest_doc)
    set_original_creator(dest_doc)
    return dest_doc
    def _create_rewrite_article(self, original, digital):
        """
        Creates a new story and sets the metadata from original and digital
        :param original: original story
        :param digital: digital version of the story
        :return:new story
        """
        rewrite = dict()
        fields = ['family_id', 'abstract', 'anpa_category', 'pubstatus', 'slugline', 'urgency', 'subject', 'priority',
                  'byline', 'dateline', 'headline', 'event_id']

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        if digital:
            # check if there's digital
            rewrite['rewrite_of'] = digital['_id']
        else:
            # if not use original's id
            rewrite['rewrite_of'] = original['_id']

        send_to(doc=rewrite, desk_id=original['task']['desk'])
        rewrite['state'] = 'in_progress'
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite
Пример #9
0
    def create(self, docs, **kwargs):
        for doc in docs:
            ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid'))
            if not ingest_doc:
                msg = 'Fail to found ingest item with guid: %s' % doc.get('guid')
                raise SuperdeskError(payload=msg)

            if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            mark_ingest_as_archived(ingest_doc=ingest_doc)

            archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid'))
            if not archived_doc:
                create_from_ingest_doc(doc, ingest_doc)
                send_to(doc, doc.get('desk'))
                superdesk.get_resource_service(ARCHIVE).post([doc])

            task = archive_item.delay(doc.get('guid'), ingest_doc.get('ingest_provider'), get_user())

            doc['task_id'] = task.id
            if task.state not in ('PROGRESS', states.SUCCESS, states.FAILURE) and not task.result:
                update_status(task.id, 0, 0)

        return [doc.get('guid') for doc in docs]
Пример #10
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service('ingest_providers').find_one(source='aapmm', req=None)
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")

            archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid'])

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage'))
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Пример #11
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc,
             desk_id=doc.get('task', {}).get('desk'),
             stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {
             CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
             CONTENT_STATE.KILLED
     }:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc,
                            original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
     app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
Пример #12
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' %
                                                      guid_of_item_to_be_duplicated)

            current_desk_of_item = archived_doc.get('task', {}).get('desk')
            if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')):
                raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.')

            if not is_workflow_state_transition_valid('duplicate', archived_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #13
0
def handle_item_published(sender, item, **extra):
    macros_service = get_resource_service('macros')
    archive_service = get_resource_service('archive')
    filters_service = get_resource_service('content_filters')
    destinations_service = get_resource_service(NAME)

    for dest in destinations_service.get(req=None, lookup={'is_active': True}):
        if dest.get('desk') == item.get('task').get('desk'):
            # item desk and internal destination are same then don't execute
            continue

        if dest.get('filter'):
            content_filter = filters_service.find_one(req=None,
                                                      _id=dest['filter'])
            if not content_filter:  # error state sort of, not sure what to do
                continue
            if not filters_service.does_match(content_filter, item):
                continue

        new_item = deepcopy(item)
        send_to(new_item, desk_id=dest['desk'], stage_id=dest.get('stage'))

        if dest.get('macro'):
            macro = macros_service.get_macro_by_name(dest['macro'])
            try:
                macro['callback'](new_item)
            except StopDuplication:
                continue

        extra_fields = [PUBLISH_SCHEDULE, SCHEDULE_SETTINGS]
        archive_service.duplicate_content(new_item,
                                          state='routed',
                                          extra_fields=extra_fields)
Пример #14
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service("ingest_providers").find_one(source="aapmm", req=None)
        if provider and "config" in provider and "username" in provider["config"]:
            self.backend.set_credentials(provider["config"]["username"], provider["config"]["password"])
        for doc in docs:
            if not doc.get("desk"):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")
            try:
                archived_doc = self.backend.find_one_raw(doc["guid"], doc["guid"])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc["_id"] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc["ingest_provider"] = str(provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get("desk"), stage_id=doc.get("stage"))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc["_id"]
            dest_doc[FAMILY_ID] = archived_doc["_id"]
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get("_id"))

        return new_guids
Пример #15
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)

        send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage'))

        if archived_doc[config.CONTENT_STATE] != 'published':
            archived_doc[config.CONTENT_STATE] = 'submitted'

        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc['_id']
        archive_service.update(original['_id'], archived_doc, original)

        insert_into_versions(guid=original['_id'])

        return archived_doc
Пример #16
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = self.get_provider()
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")
            try:
                archived_doc = self.fetch(doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Пример #17
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_moved = request.view_args['guid']

        guid_of_moved_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_moved)
            if not archived_doc:
                raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' %
                                                      guid_of_item_to_be_moved)

            current_stage_of_item = archived_doc.get('task', {}).get('stage')
            if current_stage_of_item and str(current_stage_of_item) == str(doc.get('stage')):
                raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

            if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            original = dict(archived_doc)

            send_to(archived_doc, doc.get('desk'), doc.get('stage'))
            archived_doc[config.CONTENT_STATE] = 'submitted'
            resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

            del archived_doc['_id']
            archive_service.update(original['_id'], archived_doc, original)

            insert_into_versions(guid=original['_id'])

            guid_of_moved_items.append(archived_doc['guid'])

        return guid_of_moved_items
Пример #18
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]):
            raise InvalidStateTransitionError()

        original = dict(archived_doc)

        send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage'))

        if archived_doc[config.CONTENT_STATE] not in ['published', 'scheduled', 'killed']:
            archived_doc[config.CONTENT_STATE] = 'submitted'

        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc['_id']
        archive_service.update(original['_id'], archived_doc, original)

        insert_into_versions(id_=original['_id'])

        return archived_doc
Пример #19
0
    def _create_rewrite_article(self, original, digital):
        """
        Creates a new story and sets the metadata from original and digital
        :param original: original story
        :param digital: digital version of the story
        :return:new story
        """
        rewrite = dict()
        fields = [
            'family_id', 'abstract', 'anpa_category', 'pubstatus', 'slugline',
            'urgency', 'subject', 'priority', 'byline', 'dateline', 'headline',
            'event_id', 'place', 'flags', 'genre', 'body_footer'
        ]

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        if digital:  # check if there's digital
            rewrite['rewrite_of'] = digital[config.ID_FIELD]
        else:  # if not use original's id
            rewrite['rewrite_of'] = original[config.ID_FIELD]

        send_to(doc=rewrite, desk_id=original['task']['desk'])
        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite
Пример #20
0
def handle_item_published(sender, item, **extra):
    macros_service = get_resource_service('macros')
    archive_service = get_resource_service('archive')
    filters_service = get_resource_service('content_filters')
    destinations_service = get_resource_service(NAME)

    for dest in destinations_service.get(req=None, lookup={'is_active': True}):
        if dest.get('filter'):
            content_filter = filters_service.find_one(req=None,
                                                      _id=dest['filter'])
            if not content_filter:  # error state sort of, not sure what to do
                continue
            if not filters_service.does_match(content_filter, item):
                continue

        new_item = deepcopy(item)

        if dest.get('macro'):
            macro = macros_service.get_macro_by_name(dest['macro'])
            macro['callback'](new_item)

        if dest.get('desk'):
            send_to(new_item, desk_id=dest['desk'], stage_id=dest.get('stage'))

        archive_service.duplicate_content(new_item, state='routed')
Пример #21
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get('_id') if id is None else id

            desk_id = doc.get('desk')
            stage_id = doc.get('stage')

            ingest_service = get_resource_service('ingest')
            ingest_doc = ingest_service.find_one(req=None,
                                                 _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError(
                    'Fail to found ingest item with _id: %s' %
                    id_of_item_to_be_fetched)

            if not is_workflow_state_transition_valid(
                    'fetch_from_ingest', ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            if doc.get('macro'):  # there is a macro so transform it
                ingest_doc = get_resource_service('macros').execute_macro(
                    ingest_doc, doc.get('macro'))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched,
                                 {'archived': archived})

            dest_doc = dict(ingest_doc)
            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc['_id'] = new_id
            dest_doc['guid'] = new_id
            dest_doc['destination_groups'] = doc.get('destination_groups')
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            send_to(dest_doc, desk_id, stage_id)
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id']

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(dest_doc, desk_id, stage_id,
                                          doc.get('state', STATE_FETCHED),
                                          doc.get('destination_groups'))

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            doc.update(dest_doc)

        if kwargs.get('notify', True):
            push_notification('item:fetch', fetched=1)

        return id_of_fetched_items
Пример #22
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)
            archived_doc = {}

            if doc.get('type') == 'archived':
                archived_service = get_resource_service('archived')
                req = ParsedRequest()
                query = {
                    'query': {
                        'filtered': {
                            'filter': {
                                'bool': {
                                    'must': [{
                                        'term': {
                                            'item_id': doc.get('item_id')
                                        }
                                    }]
                                }
                            }
                        }
                    },
                    "sort": [{
                        "_current_version": "desc"
                    }],
                    "size": 1
                }
                req.args = {'source': json.dumps(query)}
                archived_docs = archived_service.get(req=req, lookup=None)
                if archived_docs.count() > 0:
                    archived_doc = archived_docs[0]

            else:
                archived_doc = archive_service.find_one(
                    req=None, _id=guid_of_item_to_be_duplicated)

            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            # reset timestamps
            archived_doc['versioncreated'] = archived_doc[
                'firstcreated'] = utcnow()
            archived_doc['firstpublished'] = None

            send_to(doc=archived_doc,
                    desk_id=doc.get('desk'),
                    stage_id=doc.get('stage'),
                    default_stage='working_stage')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #23
0
    def create(self, docs, **kwargs):
        search_provider = get_resource_service('search_providers').find_one(
            search_provider=PROVIDER_NAME, req=None)

        if not search_provider or search_provider.get('is_closed', False):
            raise SuperdeskApiError.badRequestError(
                'No search provider found or the search provider is closed.')

        if 'config' in search_provider:
            self.backend.set_credentials(search_provider['config'])

        new_guids = []
        for doc in docs:
            if not doc.get(
                    'desk'):  # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    "Destination desk cannot be empty.")

            try:
                archived_doc = self.backend.find_one_raw(
                    doc['guid'], doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, search_provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc[config.ID_FIELD] = new_id
            generate_unique_id_and_name(dest_doc)

            if search_provider:
                dest_doc['ingest_provider'] = str(
                    search_provider[config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc,
                    update=None,
                    desk_id=doc.get('desk'),
                    stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc[config.ID_FIELD]
            dest_doc[FAMILY_ID] = archived_doc[config.ID_FIELD]
            dest_doc[ITEM_OPERATION] = ITEM_FETCH
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc[config.ID_FIELD])

            get_resource_service('search_providers').system_update(
                search_provider[config.ID_FIELD],
                {'last_item_update': utcnow()}, search_provider)

        return new_guids
Пример #24
0
 def send_to_original_desk(self, updates, original):
     if (
         app.config.get("CORRECTIONS_WORKFLOW")
         and original.get("state") == "correction"
         and original.get("task", {}).get("desk_history")
     ):
         send_to(
             doc=updates,
             desk_id=(original["task"]["desk_history"][0]),
             default_stage="working_stage",
             user_id=get_user_id(),
         )
Пример #25
0
def handle_item_published(sender, item, **extra):
    macros_service = get_resource_service("macros")
    archive_service = get_resource_service("archive")
    filters_service = get_resource_service("content_filters")
    destinations_service = get_resource_service(NAME)

    for dest in destinations_service.get(req=None, lookup={"is_active": True}):
        if dest.get("desk") == item.get("task").get("desk"):
            # item desk and internal destination are same then don't execute
            continue

        if dest.get("filter"):
            content_filter = filters_service.find_one(req=None,
                                                      _id=dest["filter"])
            if not content_filter:  # error state sort of, not sure what to do
                continue
            if not filters_service.does_match(content_filter, item):
                continue

        if dest.get("send_after_schedule",
                    False) and item.get("state") != "published":
            # if send_after_schedule is set to True and item state is other than published
            # then don't execute
            continue
        elif item.get("state") == "published":
            item[PUBLISH_SCHEDULE] = None
            item[SCHEDULE_SETTINGS] = {}

        new_item = deepcopy(item)
        send_to(new_item, desk_id=dest["desk"], stage_id=dest.get("stage"))

        if dest.get("macro"):
            macro = macros_service.get_macro_by_name(dest["macro"])
            if not macro:
                logger.warning(
                    "macro %s not found for internal destination %s",
                    dest["macro"], dest["name"])
            else:
                try:
                    macro["callback"](
                        new_item,
                        dest_desk_id=dest.get("desk"),
                        dest_stage_id=dest.get("stage"),
                    )
                except StopDuplication:
                    continue

        extra_fields = [PUBLISH_SCHEDULE, SCHEDULE_SETTINGS]
        next_id = archive_service.duplicate_item(new_item,
                                                 state="routed",
                                                 extra_fields=extra_fields)
        next_item = archive_service.find_one(req=None, _id=next_id)
        item_routed.send(sender, item=next_item)
Пример #26
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get('_id') if id is None else id

            desk_id = doc.get('desk')
            stage_id = doc.get('stage')

            ingest_service = get_resource_service('ingest')
            ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError('Fail to found ingest item with _id: %s' %
                                                      id_of_item_to_be_fetched)

            if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            if doc.get('macro'):  # there is a macro so transform it
                ingest_doc = get_resource_service('macros').execute_macro(ingest_doc, doc.get('macro'))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived})

            dest_doc = dict(ingest_doc)
            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc['_id'] = new_id
            dest_doc['guid'] = new_id
            dest_doc['destination_groups'] = doc.get('destination_groups')
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            send_to(dest_doc, desk_id, stage_id)
            dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id']

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(dest_doc, desk_id, stage_id,
                                          doc.get('state', STATE_FETCHED),
                                          doc.get('destination_groups'))

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            doc.update(dest_doc)

        if kwargs.get('notify', True):
            push_notification('item:fetch', fetched=1)

        return id_of_fetched_items
Пример #27
0
def handle_item_published(sender, item, **extra):
    macros_service = get_resource_service('macros')
    archive_service = get_resource_service('archive')
    filters_service = get_resource_service('content_filters')
    destinations_service = get_resource_service(NAME)

    for dest in destinations_service.get(req=None, lookup={'is_active': True}):
        if dest.get('desk') == item.get('task').get('desk'):
            # item desk and internal destination are same then don't execute
            continue

        if dest.get('filter'):
            content_filter = filters_service.find_one(req=None,
                                                      _id=dest['filter'])
            if not content_filter:  # error state sort of, not sure what to do
                continue
            if not filters_service.does_match(content_filter, item):
                continue

        if dest.get('send_after_schedule',
                    False) and item.get('state') != 'published':
            # if send_after_schedule is set to True and item state is other than published
            # then don't execute
            continue
        elif item.get('state') == 'published':
            item[PUBLISH_SCHEDULE] = None
            item[SCHEDULE_SETTINGS] = {}

        new_item = deepcopy(item)
        send_to(new_item, desk_id=dest['desk'], stage_id=dest.get('stage'))

        if dest.get('macro'):
            macro = macros_service.get_macro_by_name(dest['macro'])
            if not macro:
                logger.warning(
                    'macro %s not found for internal destination %s',
                    dest['macro'], dest['name'])
            else:
                try:
                    macro['callback'](
                        new_item,
                        dest_desk_id=dest.get('desk'),
                        dest_stage_id=dest.get('stage'),
                    )
                except StopDuplication:
                    continue

        extra_fields = [PUBLISH_SCHEDULE, SCHEDULE_SETTINGS]
        next_id = archive_service.duplicate_content(new_item,
                                                    state='routed',
                                                    extra_fields=extra_fields)
        next_item = archive_service.find_one(req=None, _id=next_id)
        item_routed.send(sender, item=next_item)
Пример #28
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError(
                'Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(
                doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(
                message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk',
                                                  archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc,
                desk_id=doc.get('task', {}).get('desk'),
                stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {
                CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED
        }:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Пример #29
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get("_id") if id is None else id

            desk_id = doc.get("desk")
            stage_id = doc.get("stage")

            ingest_service = get_resource_service("ingest")
            ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError(
                    "Fail to found ingest item with _id: %s" % id_of_item_to_be_fetched
                )

            if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            if doc.get("macro"):  # there is a macro so transform it
                ingest_doc = get_resource_service("macros").execute_macro(ingest_doc, doc.get("macro"))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched, {"archived": archived})

            dest_doc = dict(ingest_doc)
            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc["_id"] = new_id
            dest_doc["guid"] = new_id
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id)
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc["_id"]
            dest_doc[ITEM_OPERATION] = ITEM_FETCH

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED))

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            doc.update(dest_doc)

        if kwargs.get("notify", True):
            push_notification("item:fetch", fetched=1)

        return id_of_fetched_items
Пример #30
0
    def create(self, docs, **kwargs):
        for doc in docs:
            ingest_doc = superdesk.get_resource_service("ingest").find_one(req=None, _id=doc.get("guid"))
            if not ingest_doc:
                msg = "Fail to found ingest item with guid: %s" % doc.get("guid")
                raise SuperdeskApiError.notFoundError(msg)

            if not is_workflow_state_transition_valid("fetch_as_from_ingest", ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            archived = utcnow()
            superdesk.get_resource_service("ingest").patch(ingest_doc.get("_id"), {"archived": archived})
            doc["archived"] = archived

            archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get("guid"))
            if not archived_doc:
                dest_doc = dict(ingest_doc)
                dest_doc[config.VERSION] = 1
                send_to(dest_doc, doc.get("desk"))
                dest_doc[config.CONTENT_STATE] = STATE_FETCHED
                remove_unwanted(dest_doc)
                for ref in [
                    ref for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref
                ]:
                    ref["location"] = ARCHIVE
                    ref["guid"] = ref["residRef"]

                set_original_creator(dest_doc)
                if doc.get(PACKAGE):
                    links = dest_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    dest_doc[LINKED_IN_PACKAGES] = links
                superdesk.get_resource_service(ARCHIVE).post([dest_doc])
                insert_into_versions(dest_doc.get("guid"))
                desk = doc.get("desk")
                refs = [
                    {"guid": ref.get("residRef"), "desk": desk, PACKAGE: dest_doc.get("_id")}
                    for group in dest_doc.get("groups", [])
                    for ref in group.get("refs", [])
                    if "residRef" in ref
                ]
                if refs:
                    self.create(refs)
            else:
                if doc.get(PACKAGE):
                    links = archived_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get("_id"), {LINKED_IN_PACKAGES: links})

        return [doc.get("guid") for doc in docs]
Пример #31
0
    def _create_rewrite_article(self, original, digital, new_file=True):
        """
        Creates a new story and sets the metadata from original and digital
        :param original: original story
        :param digital: digital version of the story
        :param new_file: False if an existing file is used as update
        :return:new story
        """
        rewrite = dict()

        fields = ['family_id', 'event_id', 'flags']

        if new_file:
            if 'profile' in original:
                content_type = get_resource_service('content_types').find_one(
                    req=None, _id=original['profile'])
                extended_fields = list(content_type['schema'].keys())
                # extra fields needed.
                extended_fields.extend(['profile', 'associations'])
            else:
                extended_fields = [
                    'abstract', 'anpa_category', 'pubstatus', 'slugline',
                    'urgency', 'subject', 'priority', 'byline', 'dateline',
                    'headline', 'place', 'genre', 'body_footer',
                    'company_codes', 'keywords'
                ]

            fields.extend(extended_fields)

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        if digital:  # check if there's digital
            rewrite['rewrite_of'] = digital[config.ID_FIELD]
        else:  # if not use original's id
            rewrite['rewrite_of'] = original[config.ID_FIELD]

        if new_file:
            # send the document to the desk only if a new rewrite is created
            send_to(doc=rewrite,
                    desk_id=original['task']['desk'],
                    default_stage='working_stage')

        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite
Пример #32
0
    def create(self, docs, **kwargs):
        for doc in docs:
            ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid'))
            if not ingest_doc:
                msg = 'Fail to found ingest item with guid: %s' % doc.get('guid')
                raise SuperdeskApiError.notFoundError(msg)

            if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]):
                raise InvalidStateTransitionError()

            archived = utcnow()
            superdesk.get_resource_service('ingest').patch(ingest_doc.get('_id'), {'archived': archived})
            doc['archived'] = archived

            archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid'))
            if not archived_doc:
                dest_doc = dict(ingest_doc)
                dest_doc[config.VERSION] = 1
                send_to(dest_doc, doc.get('desk'))
                dest_doc[config.CONTENT_STATE] = STATE_FETCHED
                remove_unwanted(dest_doc)
                for ref in [ref for group in dest_doc.get('groups', [])
                            for ref in group.get('refs', []) if 'residRef' in ref]:
                    ref['location'] = ARCHIVE
                    ref['guid'] = ref['residRef']

                set_original_creator(dest_doc)
                if doc.get(PACKAGE):
                    links = dest_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    dest_doc[LINKED_IN_PACKAGES] = links
                superdesk.get_resource_service(ARCHIVE).post([dest_doc])
                insert_into_versions(dest_doc.get('guid'))
                desk = doc.get('desk')
                refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')}
                        for group in dest_doc.get('groups', [])
                        for ref in group.get('refs', []) if 'residRef' in ref]
                if refs:
                    self.create(refs)
            else:
                if doc.get(PACKAGE):
                    links = archived_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get('_id'), {LINKED_IN_PACKAGES: links})

        return [doc.get('guid') for doc in docs]
Пример #33
0
    def ingest_items_for(self, desk, no_of_stories, skip_index):
        desk_id = desk['_id']
        stage_id = desk['incoming_stage']

        bucket_size = min(100, no_of_stories)

        no_of_buckets = len(range(0, no_of_stories, bucket_size))

        for x in range(0, no_of_buckets):
            skip = x * bucket_size * skip_index
            logger.info('Page : {}, skip: {}'.format(x + 1, skip))
            cursor = get_resource_service('published').get_from_mongo(None, {})
            cursor.skip(skip)
            cursor.limit(bucket_size)
            items = list(cursor)
            logger.info('Inserting {} items'.format(len(items)))
            archive_items = []

            for item in items:
                dest_doc = dict(item)
                new_id = generate_guid(type=GUID_TAG)
                dest_doc[app.config['ID_FIELD']] = new_id
                dest_doc['guid'] = new_id
                generate_unique_id_and_name(dest_doc)

                dest_doc[app.config['VERSION']] = 1
                dest_doc[ITEM_STATE] = CONTENT_STATE.FETCHED
                user_id = desk.get('members', [{'user': None}])[0].get('user')
                dest_doc['original_creator'] = user_id
                dest_doc['version_creator'] = user_id

                from apps.tasks import send_to
                send_to(dest_doc,
                        desk_id=desk_id,
                        stage_id=stage_id,
                        user_id=user_id)
                dest_doc[app.config[
                    'VERSION']] = 1  # Above step increments the version and needs to reset
                dest_doc[FAMILY_ID] = item['_id']

                remove_unwanted(dest_doc)
                archive_items.append(dest_doc)

            get_resource_service(ARCHIVE).post(archive_items)
            for item in archive_items:
                insert_into_versions(id_=item[app.config['ID_FIELD']])
Пример #34
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = self.get_provider()
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    _("Destination desk cannot be empty."))
            try:
                archived_doc = self.fetch(doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(
                    provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc,
                    update=None,
                    desk_id=doc.get('desk'),
                    stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            dest_doc[ITEM_OPERATION] = ITEM_FETCH
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        if new_guids:
            get_resource_service('search_providers').system_update(
                provider.get(config.ID_FIELD), {'last_item_update': utcnow()},
                provider)

        return new_guids
Пример #35
0
    def create(self, docs, **kwargs):
        new_guids = []
        provider = get_resource_service('ingest_providers').find_one(
            source='aapmm', req=None)
        if provider and 'config' in provider and 'username' in provider[
                'config']:
            self.backend.set_credentials(provider['config']['username'],
                                         provider['config']['password'])
        for doc in docs:
            if not doc.get('desk'):
                # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError(
                    "Destination desk cannot be empty.")
            try:
                archived_doc = self.backend.find_one_raw(
                    doc['guid'], doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc['_id'] = new_id
            generate_unique_id_and_name(dest_doc)

            if provider:
                dest_doc['ingest_provider'] = str(
                    provider[superdesk.config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc,
                    update=None,
                    desk_id=doc.get('desk'),
                    stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc['_id']
            dest_doc[FAMILY_ID] = archived_doc['_id']
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc.get('_id'))

        return new_guids
Пример #36
0
    def create(self, docs, **kwargs):
        search_provider = get_resource_service('search_providers').find_one(search_provider=PROVIDER_NAME, req=None)

        if not search_provider or search_provider.get('is_closed', False):
            raise SuperdeskApiError.badRequestError('No search provider found or the search provider is closed.')

        if 'config' in search_provider:
            self.backend.set_credentials(search_provider['config'])

        new_guids = []
        for doc in docs:
            if not doc.get('desk'):  # if no desk is selected then it is bad request
                raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.")

            try:
                archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid'])
            except FileNotFoundError as ex:
                raise ProviderError.externalProviderError(ex, search_provider)

            dest_doc = dict(archived_doc)
            new_id = generate_guid(type=GUID_TAG)
            new_guids.append(new_id)
            dest_doc[config.ID_FIELD] = new_id
            generate_unique_id_and_name(dest_doc)

            if search_provider:
                dest_doc['ingest_provider'] = str(search_provider[config.ID_FIELD])

            dest_doc[config.VERSION] = 1
            send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage'))
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = archived_doc[config.ID_FIELD]
            dest_doc[FAMILY_ID] = archived_doc[config.ID_FIELD]
            dest_doc[ITEM_OPERATION] = ITEM_FETCH
            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)

            superdesk.get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(dest_doc[config.ID_FIELD])

            get_resource_service('search_providers').system_update(search_provider[config.ID_FIELD],
                                                                   {'last_item_update': utcnow()}, search_provider)

        return new_guids
Пример #37
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            send_to(doc=archived_doc, desk_id=doc.get('desk'), default_stage='working_stage')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #38
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            send_to(doc=archived_doc, desk_id=doc.get('desk'))
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #39
0
 def _move(self, archived_doc, doc):
     archive_service = get_resource_service(ARCHIVE)
     original = deepcopy(archived_doc)
     user = get_user()
     send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
             user_id=user.get(config.ID_FIELD))
     if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
         archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
     archived_doc[ITEM_OPERATION] = ITEM_MOVE
     # set the change in desk type when content is moved.
     self.set_change_in_desk_type(archived_doc, original)
     archived_doc.pop(SIGN_OFF, None)
     set_sign_off(archived_doc, original=original)
     convert_task_attributes_to_objectId(archived_doc)
     resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)
     del archived_doc[config.ID_FIELD]
     archive_service.update(original[config.ID_FIELD], archived_doc, original)
     insert_into_versions(id_=original[config.ID_FIELD])
     push_item_move_notification(original, archived_doc)
Пример #40
0
    def _move(self, archived_doc, doc):
        archive_service = get_resource_service(ARCHIVE)
        original = deepcopy(archived_doc)
        user = get_user()
        send_to(
            doc=archived_doc,
            desk_id=doc.get("task", {}).get("desk"),
            stage_id=doc.get("task", {}).get("stage"),
            user_id=user.get(config.ID_FIELD),
        )
        if archived_doc[ITEM_STATE] not in ({
                CONTENT_STATE.PUBLISHED,
                CONTENT_STATE.SCHEDULED,
                CONTENT_STATE.KILLED,
                CONTENT_STATE.RECALLED,
                CONTENT_STATE.CORRECTION,
        }):
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE
        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, "PATCH", original)

        del archived_doc[config.ID_FIELD]
        del archived_doc[config.ETAG]  # force etag update
        archived_doc["versioncreated"] = utcnow()

        signals.item_move.send(self, item=archived_doc, original=original)
        archive_service.update(original[config.ID_FIELD], archived_doc,
                               original)

        insert_into_versions(id_=original[config.ID_FIELD])
        push_item_move_notification(original, archived_doc)
        app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)

        # make sure `item._id` is there in signal
        moved_item = archived_doc.copy()
        moved_item[config.ID_FIELD] = original[config.ID_FIELD]
        signals.item_moved.send(self, item=moved_item, original=original)
Пример #41
0
    def ingest_items_for(self, desk, no_of_stories, skip_index):
        desk_id = desk['_id']
        stage_id = desk['incoming_stage']

        bucket_size = min(100, no_of_stories)

        no_of_buckets = len(range(0, no_of_stories, bucket_size))

        for x in range(0, no_of_buckets):
            skip = x * bucket_size * skip_index
            logger.info('Page : {}, skip: {}'.format(x + 1, skip))
            cursor = get_resource_service('published').get_from_mongo(None, {})
            cursor.skip(skip)
            cursor.limit(bucket_size)
            items = list(cursor)
            logger.info('Inserting {} items'.format(len(items)))
            archive_items = []

            for item in items:
                dest_doc = dict(item)
                new_id = generate_guid(type=GUID_TAG)
                dest_doc[app.config['ID_FIELD']] = new_id
                dest_doc['guid'] = new_id
                generate_unique_id_and_name(dest_doc)

                dest_doc[app.config['VERSION']] = 1
                dest_doc[ITEM_STATE] = CONTENT_STATE.FETCHED
                user_id = desk.get('members', [{'user': None}])[0].get('user')
                dest_doc['original_creator'] = user_id
                dest_doc['version_creator'] = user_id

                from apps.tasks import send_to
                send_to(dest_doc, desk_id=desk_id, stage_id=stage_id, user_id=user_id)
                dest_doc[app.config['VERSION']] = 1  # Above step increments the version and needs to reset
                dest_doc[FAMILY_ID] = item['_id']

                remove_unwanted(dest_doc)
                archive_items.append(dest_doc)

            get_resource_service(ARCHIVE).post(archive_items)
            for item in archive_items:
                insert_into_versions(id_=item[app.config['ID_FIELD']])
Пример #42
0
    def move_content(self, id, doc):
        archive_service = get_resource_service(ARCHIVE)
        archived_doc = archive_service.find_one(req=None, _id=id)

        if not archived_doc:
            raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id)

        current_stage_of_item = archived_doc.get('task', {}).get('stage')
        if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')):
            raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.')

        if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]):
            raise InvalidStateTransitionError()

        original = deepcopy(archived_doc)
        user = get_user()

        send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'),
                user_id=user.get(config.ID_FIELD))

        if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}:
            archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
        archived_doc[ITEM_OPERATION] = ITEM_MOVE

        # set the change in desk type when content is moved.
        self.set_change_in_desk_type(archived_doc, original)
        archived_doc.pop(SIGN_OFF, None)
        set_sign_off(archived_doc, original=original)
        convert_task_attributes_to_objectId(archived_doc)
        resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original)

        del archived_doc[config.ID_FIELD]
        archive_service.update(original[config.ID_FIELD], archived_doc, original)

        insert_into_versions(id_=original[config.ID_FIELD])

        push_content_notification([archived_doc, original])

        # finally apply any on stage rules/macros
        apply_onstage_rule(archived_doc, original[config.ID_FIELD])

        return archived_doc
Пример #43
0
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args["guid"]

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)

            archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)
            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            archived_doc["versioncreated"] = utcnow()
            send_to(doc=archived_doc, desk_id=doc.get("desk"), default_stage="working_stage")
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get("notify", True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
    def ingest_items_for(self, desk, no_of_stories, skip_index):
        desk_id = desk['_id']
        stage_id = desk['incoming_stage']

        bucket_size = min(100, no_of_stories)

        no_of_buckets = len(range(0, no_of_stories, bucket_size))

        for x in range(0, no_of_buckets):
            skip = x * bucket_size * skip_index
            self.logger.info('Page : {}, skip: {}'.format(x + 1, skip))
            cursor = get_resource_service('text_archive').get_from_mongo(None, {})
            cursor.skip(skip)
            cursor.limit(bucket_size)
            items = list(cursor)
            self.logger.info('Inserting {} items'.format(len(items)))
            archive_items = []

            for item in items:
                dest_doc = dict(item)
                new_id = generate_guid(type=GUID_TAG)
                dest_doc['_id'] = new_id
                dest_doc['guid'] = new_id
                generate_unique_id_and_name(dest_doc)

                dest_doc[app.config['VERSION']] = 1
                dest_doc['state'] = 'fetched'
                user_id = desk.get('members', [{'user': None}])[0].get('user')
                dest_doc['original_creator'] = user_id
                dest_doc['version_creator'] = user_id
                send_to(dest_doc, desk_id=desk_id, stage_id=stage_id, user_id=user_id)
                dest_doc[FAMILY_ID] = item['_id']

                remove_unwanted(dest_doc)
                archive_items.append(dest_doc)

            get_resource_service(ARCHIVE).post(archive_items)
            for item in archive_items:
                insert_into_versions(id_=item['_id'])
Пример #45
0
    def _create_rewrite_article(self, original, digital):
        """
        Creates a new story and sets the metadata from original and digital
        :param original: original story
        :param digital: digital version of the story
        :return:new story
        """
        rewrite = dict()
        fields = [
            "family_id",
            "abstract",
            "anpa_category",
            "pubstatus",
            "slugline",
            "urgency",
            "subject",
            "priority",
            "byline",
            "dateline",
            "headline",
            "event_id",
            "place",
        ]

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        if digital:
            # check if there's digital
            rewrite["rewrite_of"] = digital["_id"]
        else:
            # if not use original's id
            rewrite["rewrite_of"] = original["_id"]

        send_to(doc=rewrite, desk_id=original["task"]["desk"])
        rewrite["state"] = "in_progress"
        self._set_take_key(rewrite, original.get("event_id"))
        return rewrite
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args['guid']

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)
            archived_doc = {}

            if doc.get('type') == 'archived':
                archived_service = get_resource_service('archived')
                req = ParsedRequest()
                query = {'query':
                         {'filtered':
                          {'filter':
                           {'bool':
                            {'must': [
                                {'term': {'item_id': doc.get('item_id')}}
                            ]}}}}, "sort": [{"_current_version": "desc"}], "size": 1}
                req.args = {'source': json.dumps(query)}
                archived_docs = archived_service.get(req=req, lookup=None)
                if archived_docs.count() > 0:
                    archived_doc = archived_docs[0]

            else:
                archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated)

            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            archived_doc['versioncreated'] = utcnow()
            send_to(doc=archived_doc, desk_id=doc.get('desk'), default_stage='working_stage')
            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get('notify', True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #47
0
    def _create_rewrite_article(self, original, digital):
        """
        Creates a new story and sets the metadata from original and digital
        :param original: original story
        :param digital: digital version of the story
        :return:new story
        """
        rewrite = dict()
        fields = ['family_id', 'abstract', 'anpa_category', 'pubstatus', 'slugline', 'urgency', 'subject', 'priority',
                  'byline', 'dateline', 'headline', 'event_id', 'place', 'flags', 'genre', 'body_footer']

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        if digital:  # check if there's digital
            rewrite['rewrite_of'] = digital[config.ID_FIELD]
        else:  # if not use original's id
            rewrite['rewrite_of'] = original[config.ID_FIELD]

        send_to(doc=rewrite, desk_id=original['task']['desk'])
        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite
Пример #48
0
    def _create_rewrite_article(self, original, digital, existing_item=None, desk_id=None):
        """Creates a new story and sets the metadata from original and digital.

        :param dict original: original story
        :param dict digital: digital version of the story
        :param dict existing_item: existing story that is being re-written
        :return:new story
        """
        rewrite = dict()

        fields = ['family_id', 'event_id', 'flags', 'language']

        if existing_item:
            # for associate an existing file as update merge subjects
            subjects = original.get('subject', [])
            unique_subjects = {subject.get('qcode') for subject in subjects}
            rewrite['subject'] = [subject for subject in existing_item.get('subject', [])
                                  if subject.get('qcode') not in unique_subjects]
            rewrite['subject'].extend(subjects)
        else:
            if original.get('profile'):
                content_type = get_resource_service('content_types').find_one(req=None, _id=original['profile'])
                extended_fields = list(content_type['schema'].keys())
                # extra fields needed.
                extended_fields.extend(['profile', 'associations'])
            else:
                extended_fields = [
                    'abstract', 'anpa_category', 'pubstatus', 'slugline', 'urgency',
                    'subject', 'priority', 'byline', 'dateline', 'headline', 'place',
                    'genre', 'body_footer', 'company_codes', 'keywords',
                    'target_regions', 'target_types', 'target_subscribers'
                ]

            fields.extend(extended_fields)

        for field in fields:
                if original.get(field):
                    rewrite[field] = original[field]

        # if the original was flagged for SMS the rewrite should not be.
        if rewrite.get('flags', {}).get('marked_for_sms', False):
            rewrite['flags']['marked_for_sms'] = False

        # SD-4595 - Default value for the update article to be set based on the system config.
        if config.RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES:
            # if True then reset to the default priority value.
            rewrite['priority'] = int(config.DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES)

        if digital:  # check if there's digital
            rewrite['rewrite_of'] = digital[config.ID_FIELD]
            rewrite['rewrite_sequence'] = (digital.get('rewrite_sequence') or 0) + 1
        else:  # if not use original's id
            rewrite['rewrite_of'] = original[config.ID_FIELD]
            rewrite['rewrite_sequence'] = (original.get('rewrite_sequence') or 0) + 1

        if not existing_item:
            # send the document to the desk only if a new rewrite is created
            send_to(doc=rewrite, desk_id=(desk_id or original['task']['desk']), default_stage='working_stage')

            # if we are rewriting a published item then copy the body_html
            if original.get('state', '') in (CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED):
                if digital:
                    rewrite['body_html'] = digital.get('body_html', '')
                else:
                    rewrite['body_html'] = original.get('body_html', '')

        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite
Пример #49
0
def sync():
    lock_name = "ultrad"
    if not lock(lock_name):
        logger.info("lock taken %s", lock_name)
        return
    try:
        todo_stages = list(
            get_resource_service("stages").get(
                req=None, lookup={"name": app.config["ULTRAD_TODO_STAGE"]}
            )
        )
        if not len(todo_stages):
            logger.warning(
                "ultrad todo stage not found, name=%s", app.config["ULTRAD_TODO_STAGE"]
            )
            return
        for todo_stage in todo_stages:
            desk = get_resource_service("desks").find_one(
                req=None, _id=todo_stage["desk"]
            )
            if not desk:
                logger.warning(
                    "ultrad desk not found for stage desk=%s", todo_stage["desk"]
                )
                continue
            lookup = {"task.stage": todo_stage["_id"]}
            items = list(get_resource_service("archive").get(req=None, lookup=lookup))
            logger.info(
                "checking %d items on ultrad on desk %s", len(items), desk["name"]
            )
            for item in items:
                if not touch(lock_name, expire=300):
                    logger.warning("lost lock %s", lock_name)
                    break
                if item.get("lock_user") and item.get("lock_session"):
                    logger.info("skipping locked item guid=%s", item["guid"])
                    continue
                if item["state"] not in IN_PROGRESS_STATES:
                    logger.info(
                        "ignore item due to state guid=%s state=%s",
                        item["guid"],
                        item["state"],
                    )
                    continue
                try:
                    ultrad_id = item["extra"][ULTRAD_ID]
                except KeyError:
                    continue
                try:
                    ultrad_doc = get_document(ultrad_id)
                except UltradException:
                    continue
                if ultrad_doc["state"] == "revised":
                    try:
                        updated = item.copy()
                        updated["body_html"] = ultrad_doc["text"]["edited"]
                    except KeyError:
                        logger.info(
                            "no content in ultrad for item guid=%s ultrad_id=%s",
                            item["guid"],
                            ultrad_id,
                        )
                        continue
                    logger.info(
                        "updating item from ultrad guid=%s ultrad_id=%s",
                        item["guid"],
                        ultrad_id,
                    )
                    editor = Editor3Content(updated)
                    editor._create_state_from_html(updated["body_html"])
                    editor.update_item()
                    send_to(
                        updated, desk_id=desk["_id"], stage_id=desk["working_stage"]
                    )
                    updates = {
                        "task": updated["task"],
                        "body_html": updated["body_html"],
                        "fields_meta": updated["fields_meta"],
                    }
                    # don't use patch, it assumes there is a user
                    get_resource_service("archive").update(item["_id"], updates, item)
                    get_resource_service("archive").on_updated(updates, item)
                else:
                    logger.debug(
                        "skip updating item guid=%s ultrad_id=%s state=%s",
                        item["guid"],
                        ultrad_id,
                        ultrad_doc["state"],
                    )
    finally:
        unlock(lock_name)
def set_german_and_publish(item, **kwargs):
    """
    Set item language to german and publish it
    """

    if item.get('language') != 'en':
        raise StopDuplication

    try:
        dest_desk_id = kwargs["dest_desk_id"]
        dest_stage_id = kwargs["dest_stage_id"]
    except KeyError:
        logger.warning(
            'missing "dest_desk_id" or "dest_stage_id", is this macro ({name}) used in Internal Destination?'
            .format(name=name))
        raise StopDuplication

    archive_service = get_resource_service('archive')
    archive_publish_service = get_resource_service('archive_publish')
    groups = item.get('groups', [])

    if item.get(ITEM_STATE) == CONTENT_STATE.PUBLISHED:
        new_id, updates = create_de_item(archive_service, item)

        if item[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            # we need to replace original items by duplicated one with german language.
            # Package item is processed after its content items have been duplicated
            # so we can retrieve them from group refs
            for group in groups:
                if group.get('id') != 'root':
                    refs = group.setdefault('refs', [])
                    for ref in refs:
                        if ref.get(RESIDREF):
                            __, ref_item_id, __ = archive_service.packageService.get_associated_item(
                                ref)
                            new_item = archive_service.find_one(
                                req=None, original_id=ref_item_id)
                            if new_item is None:
                                logger.warning(
                                    "no duplicated item found for {ref_item_id}"
                                    .format(ref_item_id=ref_item_id))
                                continue

                            ref[RESIDREF] = ref['guid'] = new_item['guid']
                            ref["_current_version"] = new_item[
                                "_current_version"]
                            new_item_updates = {
                                LINKED_IN_PACKAGES:
                                new_item.get(LINKED_IN_PACKAGES, []),
                                PUBLISHED_IN_PACKAGE:
                                new_id,
                            }
                            new_item_updates[LINKED_IN_PACKAGES].append(
                                {PACKAGE: new_id})
                            archive_publish_service.patch(
                                id=new_item[config.ID_FIELD],
                                updates=new_item_updates)
                            insert_into_versions(id_=new_item[config.ID_FIELD])
            updates["groups"] = groups
            archive_publish_service.patch(id=new_id, updates=updates)
        elif len(item.get(LINKED_IN_PACKAGES, [])) > 0:
            # if item is in a package, we don't want to publish it now.
            # it will be published when the package item will go through this macro, at the end.
            archive_service.patch(id=new_id, updates=updates)
            insert_into_versions(id_=new_id)
        else:
            # this is a single item, not part of package
            archive_publish_service.patch(id=new_id, updates=updates)
            insert_into_versions(id_=new_id)
    elif item.get(ITEM_STATE) == CONTENT_STATE.CORRECTED:
        de_item = archive_service.find_one(
            req=None, processed_from=item[config.ID_FIELD])

        if not de_item:
            raise StopDuplication

        de_item_id = de_item[config.ID_FIELD]

        # "groups" is handled below
        fields_to_correct = ('abstract', 'annotations', 'anpa_category',
                             'anpa_take_key', 'archive_description',
                             'associations', 'attachments', 'authors',
                             'body_footer', 'body_html', 'body_text', 'byline',
                             'company_codes', 'creditline', 'dateline',
                             'description_text', 'deleted_groups', 'ednote',
                             'expiry', 'extra', 'fields_meta', 'genre',
                             'headline', 'keywords', 'more_coming', 'place',
                             'profile', 'sign_off', 'signal', 'slugline',
                             'sms_message', 'source', 'subject', 'urgency',
                             'word_count', 'priority')

        for field in fields_to_correct:
            if item.get(field):
                de_item[field] = item[field]

        if item[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
            # for a package, we need to synchronise references, in case of items have been added or removed from it
            de_item_groups = de_item['groups']

            # groups id to set of items id map for original items (root is skipped)
            ori_group_refs = {}
            for group in groups:
                if group.get('id') != 'root':
                    ori_refs = ori_group_refs[group['id']] = set()
                    refs = group.get('refs', [])
                    for ref in refs:
                        if ref.get(RESIDREF):
                            __, ref_item_id, __ = archive_service.packageService.get_associated_item(
                                ref)
                            ori_refs.add(ref_item_id)

            # group id to dicts mapping from English item ID to their duplicated version in German
            # it is used to compare missing/new items below
            dup_group_en_de_map = {}
            for group in de_item.get('groups', []):
                if group.get('id') != 'root':
                    en_de_map = dup_group_en_de_map[group['id']] = {}
                    refs = group.get('refs', [])
                    for ref in refs:
                        if ref.get(RESIDREF):
                            ref_item, ref_item_id, __ = archive_service.packageService.get_associated_item(
                                ref)
                            try:
                                en_de_map[
                                    ref_item['processed_from']] = ref_item_id
                            except KeyError:
                                logger.warning(
                                    'missing "processed_from" key in referenced item {ref_item_id} from package '
                                    '{item_id}'.format(
                                        ref_item_id=ref_item_id,
                                        item_id=ref_item[config.ID_FIELD]))
                                continue

            # we delete groups in German package which don't exist in English one
            groups_to_delete = []
            for deleted_group in dup_group_en_de_map.keys(
            ) - ori_group_refs.keys():
                for group in groups:
                    if group['id'] == deleted_group:
                        groups_to_delete.append(group)
                        break
                else:
                    logger.error(
                        "Internal error: group {group_id} should exist in German item {item_id}"
                        .format(group_id=deleted_group, item_id=de_item_id))
            for group in groups_to_delete:
                de_item_groups.remove(group)

            # now we synchronise items group by group
            for ori_group_id, ori_refs in ori_group_refs.items():
                for de_group in de_item_groups:
                    if de_group['id'] == ori_group_id:
                        en_de_map = dup_group_en_de_map[ori_group_id]
                        break
                else:
                    # the group doesn't exist in German package, we create it
                    ori_group = next(g for g in item['groups']
                                     if g['id'] == ori_group_id)
                    de_group = {
                        'id': ori_group_id,
                        'role': ori_group['role'],
                        'refs': [],
                    }
                    de_item_groups.append(de_group)
                    de_group_root = next(g for g in de_item_groups
                                         if g['id'] == 'root')
                    de_group_root['refs'].append({"idRef": ori_group_id})
                    en_de_map = {}

                # we synchronise new items
                new_refs = ori_refs - en_de_map.keys()
                for new_ref in new_refs:
                    # we check if we have already a duplicate version of this item
                    dup_ref_item = archive_service.find_one(
                        req=None, original_id=new_ref, language="de")
                    if dup_ref_item is None:
                        # we have to duplicate the item, basically reproducing what would happen
                        # with publish workflow
                        logger.info(
                            "creating German item for {item_id}".format(
                                item_id=new_ref))
                        ori_ref_item = archive_service.find_one(req=None,
                                                                guid=new_ref)
                        # we imitate internal destination
                        ori_ref_item_cpy = deepcopy(ori_ref_item)
                        send_to(ori_ref_item_cpy,
                                desk_id=dest_desk_id,
                                stage_id=dest_stage_id)
                        new_id, updates = create_de_item(
                            archive_service, ori_ref_item_cpy)
                        dup_ref_item = archive_service.find_one(req=None,
                                                                guid=new_id)
                        updates[LINKED_IN_PACKAGES] = dup_ref_item.setdefault(
                            LINKED_IN_PACKAGES, [])
                        dup_ref_item[PUBLISHED_IN_PACKAGE] = updates[
                            PUBLISHED_IN_PACKAGE] = de_item_id
                        updates[LINKED_IN_PACKAGES].append(
                            {PACKAGE: de_item_id})

                        archive_publish_service.patch(id=new_id,
                                                      updates=updates)
                        insert_into_versions(id_=new_id)
                    else:
                        # the item already exists, let's update it
                        updates = {
                            LINKED_IN_PACKAGES:
                            dup_ref_item.setdefault(LINKED_IN_PACKAGES, []),
                            PUBLISHED_IN_PACKAGE:
                            de_item_id,
                        }
                        updates[LINKED_IN_PACKAGES].append(
                            {PACKAGE: de_item_id})
                        archive_service.system_update(id=dup_ref_item["guid"],
                                                      updates=updates,
                                                      original=dup_ref_item)

                    # we have the German item, now we create the ref to update German package
                    new_ref = get_item_ref(dup_ref_item)
                    new_ref['guid'] = new_ref[RESIDREF]
                    de_group['refs'].append(new_ref)

                # now we remove items removed from English version
                deleted_refs = en_de_map.keys() - ori_refs
                for deleted_ref in deleted_refs:
                    de_deleted_ref = en_de_map[deleted_ref]
                    for ref in de_group['refs']:
                        if ref.get('guid') == de_deleted_ref:
                            de_group['refs'].remove(ref)
                            break

        get_resource_service('archive_correct').patch(de_item[config.ID_FIELD],
                                                      de_item)
        insert_into_versions(id_=de_item[config.ID_FIELD])

    # we don't want duplication, we stop here internal_destinations workflow
    raise StopDuplication
Пример #51
0
    def fetch(self, docs, id=None, **kwargs):
        id_of_fetched_items = []

        for doc in docs:
            id_of_item_to_be_fetched = doc.get(
                config.ID_FIELD) if id is None else id

            desk_id = doc.get('desk')
            stage_id = doc.get('stage')

            ingest_service = get_resource_service('ingest')
            ingest_doc = ingest_service.find_one(req=None,
                                                 _id=id_of_item_to_be_fetched)

            if not ingest_doc:
                raise SuperdeskApiError.notFoundError(
                    _('Fail to found ingest item with _id: {id}').format(
                        id=id_of_item_to_be_fetched))

            if not is_workflow_state_transition_valid('fetch_from_ingest',
                                                      ingest_doc[ITEM_STATE]):
                raise InvalidStateTransitionError()

            if doc.get('macro'):  # there is a macro so transform it
                ingest_doc = get_resource_service('macros').execute_macro(
                    ingest_doc, doc.get('macro'))

            archived = utcnow()
            ingest_service.patch(id_of_item_to_be_fetched,
                                 {'archived': archived})

            dest_doc = dict(ingest_doc)

            if doc.get('target'):
                dest_doc.update(doc.get('target'))

            new_id = generate_guid(type=GUID_TAG)
            id_of_fetched_items.append(new_id)
            dest_doc[config.ID_FIELD] = new_id
            dest_doc[GUID_FIELD] = new_id
            generate_unique_id_and_name(dest_doc)

            dest_doc[config.VERSION] = 1
            dest_doc['versioncreated'] = archived
            send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id)
            dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)
            dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc[
                config.ID_FIELD]
            dest_doc[ITEM_OPERATION] = ITEM_FETCH

            remove_unwanted(dest_doc)
            set_original_creator(dest_doc)
            self.__fetch_items_in_package(
                dest_doc, desk_id, stage_id,
                doc.get(ITEM_STATE, CONTENT_STATE.FETCHED))

            self.__fetch_associated_items(
                dest_doc, desk_id, stage_id,
                doc.get(ITEM_STATE, CONTENT_STATE.FETCHED))

            desk = get_resource_service('desks').find_one(req=None,
                                                          _id=desk_id)
            if desk and desk.get('default_content_profile'):
                dest_doc['profile'] = desk['default_content_profile']

            if dest_doc.get('type', 'text') in MEDIA_TYPES:
                dest_doc['profile'] = None

            get_resource_service(ARCHIVE).post([dest_doc])
            insert_into_versions(doc=dest_doc)
            build_custom_hateoas(custom_hateoas, dest_doc)
            superdesk.item_fetched.send(self,
                                        item=dest_doc,
                                        ingest_item=ingest_doc)
            doc.update(dest_doc)

        if kwargs.get('notify', True):
            ingest_doc.update({'task': dest_doc.get('task')})
            push_item_move_notification(ingest_doc, doc, 'item:fetch')

        return id_of_fetched_items
    def create(self, docs, **kwargs):
        new_guids = []
        for doc in docs:
            ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid'))
            if not ingest_doc:
                # see if it is in archive, if it is duplicate it
                archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid'))
                if archived_doc:
                    send_to(archived_doc, doc.get('desk'))
                    new_guid = superdesk.get_resource_service('archive').duplicate_content(archived_doc)
                    new_guids.append(new_guid)
                else:
                    msg = 'Fail to found ingest item with guid: %s' % doc.get('guid')
                    raise SuperdeskApiError.notFoundError(msg)
            else:
                # We are fetching from ingest
                if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]):
                    raise InvalidStateTransitionError()

                archived = utcnow()
                superdesk.get_resource_service('ingest').patch(ingest_doc.get('_id'), {'archived': archived})
                doc['archived'] = archived

                dest_doc = dict(ingest_doc)
                new_id = generate_guid(type=GUID_TAG)
                new_guids.append(new_id)
                dest_doc['_id'] = new_id
                dest_doc['guid'] = new_id
                generate_unique_id_and_name(dest_doc)

                dest_doc[config.VERSION] = 1
                send_to(dest_doc, doc.get('desk'))
                dest_doc[config.CONTENT_STATE] = STATE_FETCHED
                dest_doc[INGEST_ID] = ingest_doc['_id']
                dest_doc[FAMILY_ID] = ingest_doc['_id']
                remove_unwanted(dest_doc)
                for ref in [ref for group in dest_doc.get('groups', [])
                            for ref in group.get('refs', []) if 'residRef' in ref]:
                    ref['location'] = ARCHIVE
                    ref['guid'] = ref['residRef']

                set_original_creator(dest_doc)
                if doc.get(PACKAGE):
                    links = dest_doc.get(LINKED_IN_PACKAGES, [])
                    links.append({PACKAGE: doc.get(PACKAGE)})
                    dest_doc[LINKED_IN_PACKAGES] = links

                desk = doc.get('desk')
                refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')}
                        for group in dest_doc.get('groups', [])
                        for ref in group.get('refs', []) if 'residRef' in ref]
                if refs:
                    new_ref_guids = self.create(refs)
                    count = 0
                    for ref in [ref for group in dest_doc.get('groups', [])
                                for ref in group.get('refs', []) if 'residRef' in ref]:
                        ref['residRef'] = ref['guid'] = new_ref_guids[count]
                        count += 1

                superdesk.get_resource_service(ARCHIVE).post([dest_doc])
                insert_into_versions(dest_doc.get('guid'))

                push_notification('item:fetch', item=str(ingest_doc.get('_id')))

        return new_guids
    def create(self, docs, **kwargs):
        guid_of_item_to_be_duplicated = request.view_args["guid"]

        guid_of_duplicated_items = []

        for doc in docs:
            archive_service = get_resource_service(ARCHIVE)
            archived_doc = {}

            if doc.get("type") == "archived":
                archived_service = get_resource_service("archived")
                req = ParsedRequest()
                query = {
                    "query": {
                        "filtered": {
                            "filter": {
                                "bool": {
                                    "must": [{
                                        "term": {
                                            "item_id": doc.get("item_id")
                                        }
                                    }]
                                }
                            }
                        }
                    },
                    "sort": [{
                        "_current_version": "desc"
                    }],
                    "size": 1,
                }
                req.args = {"source": json.dumps(query)}
                archived_docs = archived_service.get(req=req, lookup=None)
                if archived_docs.count() > 0:
                    archived_doc = archived_docs[0]

            else:
                archived_doc = archive_service.find_one(
                    req=None, _id=guid_of_item_to_be_duplicated)

            self._validate(archived_doc, doc, guid_of_item_to_be_duplicated)

            # reset timestamps
            archived_doc["versioncreated"] = archived_doc[
                "firstcreated"] = utcnow()
            archived_doc["firstpublished"] = None

            remove_is_queued(archived_doc)

            send_to(
                doc=archived_doc,
                desk_id=doc.get("desk"),
                stage_id=doc.get("stage"),
                default_stage="working_stage",
                user_id=get_user_id(),
            )

            if not doc.get("desk"):  # item copied to personal space
                archived_doc["state"] = CONTENT_STATE.PROGRESS

            new_guid = archive_service.duplicate_content(archived_doc)
            guid_of_duplicated_items.append(new_guid)

        if kwargs.get("notify", True):
            push_content_notification([archived_doc])

        return guid_of_duplicated_items
Пример #54
0
    def _create_rewrite_article(self,
                                original,
                                existing_item=None,
                                desk_id=None):
        """Creates a new story and sets the metadata from original.

        :param dict original: original story
        :param dict existing_item: existing story that is being re-written
        :return:new story
        """
        rewrite = dict()

        fields = [
            "family_id",
            "event_id",
            "flags",
            "language",
            ASSOCIATIONS,
            "extra",
            "place",
            "organisation",
            "person",
        ]
        existing_item_preserve_fields = (ASSOCIATIONS, "flags")

        if app.config.get("COPY_ON_REWRITE_FIELDS"):
            fields.extend(app.config["COPY_ON_REWRITE_FIELDS"])

        if existing_item:
            # for associate an existing file as update merge subjects
            subjects = original.get("subject", [])
            unique_subjects = {subject.get("qcode") for subject in subjects}
            rewrite["subject"] = [
                subject for subject in existing_item.get("subject", [])
                if subject.get("qcode") not in unique_subjects
            ]
            rewrite["subject"].extend(subjects)
            rewrite["flags"] = original.get("flags") or {}

            # preserve flags
            for key in rewrite.get("flags").keys():
                rewrite["flags"][
                    key] = original["flags"][key] or existing_item.get(
                        "flags", {}).get(key, False)

            original_associations = original.get(ASSOCIATIONS) or {}
            existing_associations = existing_item.get(ASSOCIATIONS) or {}
            rewrite[ASSOCIATIONS] = existing_associations

            # if the existing item has association then preserve the association
            for key, assoc in original_associations.items():
                if not existing_associations.get(key):
                    rewrite[ASSOCIATIONS][key] = assoc
        else:
            # ingest provider and source to be retained for new item
            fields.extend(["ingest_provider", "source"])

            if original.get("profile"):
                content_type = get_resource_service("content_types").find_one(
                    req=None, _id=original["profile"])
                extended_fields = list(content_type["schema"].keys())
                # extra fields needed.
                extended_fields.extend([
                    "profile", "keywords", "target_regions", "target_types",
                    "target_subscribers"
                ])
            else:
                extended_fields = [
                    "abstract",
                    "anpa_category",
                    "pubstatus",
                    "slugline",
                    "urgency",
                    "subject",
                    "priority",
                    "byline",
                    "dateline",
                    "headline",
                    "place",
                    "genre",
                    "body_footer",
                    "company_codes",
                    "keywords",
                    "target_regions",
                    "target_types",
                    "target_subscribers",
                ]

            fields.extend(extended_fields)

        for field in fields:
            if original.get(field):
                # don't overwrite some fields in existing items
                if existing_item and field in existing_item_preserve_fields:
                    continue

                rewrite[field] = original[field]

        # if the original was flagged for SMS the rewrite should not be.
        if not existing_item and rewrite.get("flags", {}).get(
                "marked_for_sms", False):
            rewrite["flags"]["marked_for_sms"] = False

        # SD-4595 - Default value for the update article to be set based on the system config.
        if config.RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES:
            # if True then reset to the default priority value.
            rewrite["priority"] = int(
                config.DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES)

        rewrite["rewrite_of"] = original[config.ID_FIELD]
        rewrite["rewrite_sequence"] = (original.get("rewrite_sequence")
                                       or 0) + 1
        rewrite.pop(PROCESSED_FROM, None)

        if not existing_item:
            from apps.tasks import send_to

            # send the document to the desk only if a new rewrite is created
            send_to(
                doc=rewrite,
                desk_id=(desk_id or original["task"]["desk"]),
                default_stage="working_stage",
                user_id=get_user_id(),
            )

            # if we are rewriting a published item then copy the body_html
            if original.get("state", "") in (CONTENT_STATE.PUBLISHED,
                                             CONTENT_STATE.CORRECTED,
                                             CONTENT_STATE.SCHEDULED):
                rewrite["body_html"] = original.get("body_html", "")

        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite)
        return rewrite
Пример #55
0
    def _create_rewrite_article(self,
                                original,
                                existing_item=None,
                                desk_id=None):
        """Creates a new story and sets the metadata from original.

        :param dict original: original story
        :param dict existing_item: existing story that is being re-written
        :return:new story
        """
        rewrite = dict()

        fields = [
            'family_id', 'event_id', 'flags', 'language', ASSOCIATIONS, 'extra'
        ]
        existing_item_preserve_fields = (ASSOCIATIONS, 'flags')

        if app.config.get('COPY_ON_REWRITE_FIELDS'):
            fields.extend(app.config['COPY_ON_REWRITE_FIELDS'])

        if existing_item:
            # for associate an existing file as update merge subjects
            subjects = original.get('subject', [])
            unique_subjects = {subject.get('qcode') for subject in subjects}
            rewrite['subject'] = [
                subject for subject in existing_item.get('subject', [])
                if subject.get('qcode') not in unique_subjects
            ]
            rewrite['subject'].extend(subjects)
            rewrite['flags'] = original['flags'] or {}

            # preserve flags
            for key in rewrite.get('flags').keys():
                rewrite['flags'][
                    key] = original['flags'][key] or existing_item.get(
                        'flags', {}).get(key, False)

            original_associations = original.get(ASSOCIATIONS) or {}
            existing_associations = existing_item.get(ASSOCIATIONS) or {}
            rewrite[ASSOCIATIONS] = existing_associations

            # if the existing item has association then preserve the association
            for key, assoc in original_associations.items():
                if not existing_associations.get(key):
                    rewrite[ASSOCIATIONS][key] = assoc
        else:
            # ingest provider and source to be retained for new item
            fields.extend(['ingest_provider', 'source'])

            if original.get('profile'):
                content_type = get_resource_service('content_types').find_one(
                    req=None, _id=original['profile'])
                extended_fields = list(content_type['schema'].keys())
                # extra fields needed.
                extended_fields.extend([
                    'profile', 'keywords', 'target_regions', 'target_types',
                    'target_subscribers'
                ])
            else:
                extended_fields = [
                    'abstract', 'anpa_category', 'pubstatus', 'slugline',
                    'urgency', 'subject', 'priority', 'byline', 'dateline',
                    'headline', 'place', 'genre', 'body_footer',
                    'company_codes', 'keywords', 'target_regions',
                    'target_types', 'target_subscribers'
                ]

            fields.extend(extended_fields)

        for field in fields:
            if original.get(field):
                # don't overwrite some fields in existing items
                if existing_item and field in existing_item_preserve_fields:
                    continue

                rewrite[field] = original[field]

        # if the original was flagged for SMS the rewrite should not be.
        if not existing_item and rewrite.get('flags', {}).get(
                'marked_for_sms', False):
            rewrite['flags']['marked_for_sms'] = False

        # SD-4595 - Default value for the update article to be set based on the system config.
        if config.RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES:
            # if True then reset to the default priority value.
            rewrite['priority'] = int(
                config.DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES)

        rewrite['rewrite_of'] = original[config.ID_FIELD]
        rewrite['rewrite_sequence'] = (original.get('rewrite_sequence')
                                       or 0) + 1
        rewrite.pop(PROCESSED_FROM, None)

        if not existing_item:
            # send the document to the desk only if a new rewrite is created
            send_to(doc=rewrite,
                    desk_id=(desk_id or original['task']['desk']),
                    default_stage='working_stage')

            # if we are rewriting a published item then copy the body_html
            if original.get('state', '') in (CONTENT_STATE.PUBLISHED,
                                             CONTENT_STATE.CORRECTED):
                rewrite['body_html'] = original.get('body_html', '')

        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite)
        return rewrite
Пример #56
0
    def _create_rewrite_article(self,
                                original,
                                digital,
                                existing_item=None,
                                desk_id=None):
        """
        Creates a new story and sets the metadata from original and digital
        :param dict original: original story
        :param dict digital: digital version of the story
        :param dict existing_item: existing story that is being re-written
        :return:new story
        """
        rewrite = dict()

        fields = ['family_id', 'event_id', 'flags']

        if existing_item:
            # for associate an existing file as update merge subjects
            subjects = original.get('subject', [])
            unique_subjects = {subject.get('qcode') for subject in subjects}
            rewrite['subject'] = [
                subject for subject in existing_item.get('subject', [])
                if subject.get('qcode') not in unique_subjects
            ]
            rewrite['subject'].extend(subjects)
        else:
            if original.get('profile'):
                content_type = get_resource_service('content_types').find_one(
                    req=None, _id=original['profile'])
                extended_fields = list(content_type['schema'].keys())
                # extra fields needed.
                extended_fields.extend(['profile', 'associations'])
            else:
                extended_fields = [
                    'abstract', 'anpa_category', 'pubstatus', 'slugline',
                    'urgency', 'subject', 'priority', 'byline', 'dateline',
                    'headline', 'place', 'genre', 'body_footer',
                    'company_codes', 'keywords', 'target_regions',
                    'target_types', 'target_subscribers'
                ]

            fields.extend(extended_fields)

        for field in fields:
            if original.get(field):
                rewrite[field] = original[field]

        # SD-4595 - Default value for the update article to be set based on the system config.
        if config.RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES:
            # if True then reset to the default priority value.
            rewrite['priority'] = int(
                config.DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES)

        if digital:  # check if there's digital
            rewrite['rewrite_of'] = digital[config.ID_FIELD]
            rewrite['rewrite_sequence'] = (digital.get('rewrite_sequence')
                                           or 0) + 1
        else:  # if not use original's id
            rewrite['rewrite_of'] = original[config.ID_FIELD]
            rewrite['rewrite_sequence'] = (original.get('rewrite_sequence')
                                           or 0) + 1

        if not existing_item:
            # send the document to the desk only if a new rewrite is created
            send_to(doc=rewrite,
                    desk_id=(desk_id or original['task']['desk']),
                    default_stage='working_stage')

            # if we are rewriting a published item then copy the body_html
            if original.get('state', '') in (CONTENT_STATE.PUBLISHED,
                                             CONTENT_STATE.CORRECTED):
                if digital:
                    rewrite['body_html'] = digital.get('body_html', '')
                else:
                    rewrite['body_html'] = original.get('body_html', '')

        rewrite[ITEM_STATE] = CONTENT_STATE.PROGRESS
        self._set_take_key(rewrite, original.get('event_id'))
        return rewrite