def _move(self, archived_doc, doc): archive_service = get_resource_service(ARCHIVE) original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in { CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED }: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_item_move_notification(original, archived_doc) app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( _('Fail to found ingest item with _id: {id}').format(id=id_of_item_to_be_fetched)) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro'), dest_desk_id=desk_id, dest_stage_id=stage_id, ) dest_doc = fetch_item(ingest_doc, desk_id, stage_id, state=doc.get(ITEM_STATE), target=doc.get('target')) id_of_fetched_items.append(dest_doc[config.ID_FIELD]) ingest_service.patch(id_of_item_to_be_fetched, {'archived': dest_doc['versioncreated']}) dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD] dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get('version')) dest_doc[INGEST_VERSION] = ingest_doc.get('version') self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) self.__fetch_associated_items(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service('desks').find_one(req=None, _id=desk_id) if desk and desk.get('default_content_profile'): dest_doc.setdefault('profile', desk['default_content_profile']) if dest_doc.get('type', 'text') in MEDIA_TYPES: dest_doc['profile'] = None get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc) doc.update(dest_doc) if kwargs.get('notify', True): ingest_doc.update({'task': dest_doc.get('task')}) push_item_move_notification(ingest_doc, doc, 'item:fetch') return id_of_fetched_items
def _move(self, archived_doc, doc): archive_service = get_resource_service(ARCHIVE) original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_item_move_notification(original, archived_doc)
def _move(self, archived_doc, doc): archive_service = get_resource_service(ARCHIVE) original = deepcopy(archived_doc) user = get_user() send_to( doc=archived_doc, desk_id=doc.get("task", {}).get("desk"), stage_id=doc.get("task", {}).get("stage"), user_id=user.get(config.ID_FIELD), ) if archived_doc[ITEM_STATE] not in ({ CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.CORRECTION, }): archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, "PATCH", original) del archived_doc[config.ID_FIELD] del archived_doc[config.ETAG] # force etag update archived_doc["versioncreated"] = utcnow() signals.item_move.send(self, item=archived_doc, original=original) archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_item_move_notification(original, archived_doc) app.on_archive_item_updated(archived_doc, original, ITEM_MOVE) # make sure `item._id` is there in signal moved_item = archived_doc.copy() moved_item[config.ID_FIELD] = original[config.ID_FIELD] signals.item_moved.send(self, item=moved_item, original=original)
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id desk_id = doc.get("desk") stage_id = doc.get("stage") ingest_service = get_resource_service("ingest") ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( "Fail to found ingest item with _id: %s" % id_of_item_to_be_fetched ) if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get("macro"): # there is a macro so transform it ingest_doc = get_resource_service("macros").execute_macro(ingest_doc, doc.get("macro")) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {"archived": archived}) dest_doc = dict(ingest_doc) if doc.get("target"): dest_doc.update(doc.get("target")) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc[config.ID_FIELD] = new_id dest_doc[GUID_FIELD] = new_id generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 dest_doc["versioncreated"] = archived send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service("desks").find_one(req=None, _id=desk_id) if desk and desk.get("default_content_profile"): dest_doc["profile"] = desk["default_content_profile"] if dest_doc.get("type", "text") in MEDIA_TYPES: dest_doc["profile"] = None get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get("notify", True): ingest_doc.update({"task": dest_doc.get("task")}) push_item_move_notification(ingest_doc, doc, "item:fetch") return id_of_fetched_items
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id desk_id = doc.get("desk") stage_id = doc.get("stage") ingest_service = get_resource_service("ingest") ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( _("Fail to found ingest item with _id: {id}").format(id=id_of_item_to_be_fetched) ) if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get("macro"): # there is a macro so transform it macro_kwargs = kwargs.get("macro_kwargs") or {} ingest_doc = get_resource_service("macros").execute_macro( ingest_doc, doc.get("macro"), dest_desk_id=desk_id, dest_stage_id=stage_id, **macro_kwargs, ) dest_doc = fetch_item( ingest_doc, desk_id, stage_id, # we might want to change state or target from the macro state=ingest_doc[ITEM_STATE] if ingest_doc.get(ITEM_STATE) and ingest_doc[ITEM_STATE] != CONTENT_STATE.INGESTED else doc.get(ITEM_STATE), target=ingest_doc.get("target", doc.get("target")), ) id_of_fetched_items.append(dest_doc[config.ID_FIELD]) ingest_service.patch(id_of_item_to_be_fetched, {"archived": dest_doc["versioncreated"]}) dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD] dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get("version")) dest_doc[INGEST_VERSION] = ingest_doc.get("version") self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) self.__fetch_associated_items(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service("desks").find_one(req=None, _id=desk_id) if desk and desk.get("default_content_profile"): dest_doc.setdefault("profile", desk["default_content_profile"]) if dest_doc.get("type", "text") in MEDIA_TYPES: dest_doc["profile"] = None update_refs(dest_doc, {}) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc) doc.update(dest_doc) if kwargs.get("notify", True): ingest_doc.update({"task": dest_doc.get("task")}) push_item_move_notification(ingest_doc, doc, "item:fetch") return id_of_fetched_items
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get( config.ID_FIELD) if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( 'Fail to found ingest item with _id: %s' % id_of_item_to_be_fetched) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro')) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived}) dest_doc = dict(ingest_doc) if doc.get('target'): dest_doc.update(doc.get('target')) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc[config.ID_FIELD] = new_id dest_doc[GUID_FIELD] = new_id generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 dest_doc['versioncreated'] = archived send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc[ config.ID_FIELD] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package( dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service('desks').find_one(req=None, _id=desk_id) if desk and desk.get('default_content_profile'): dest_doc['profile'] = desk['default_content_profile'] if dest_doc.get('type', 'text') in MEDIA_TYPES: dest_doc['profile'] = None get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get('notify', True): ingest_doc.update({'task': dest_doc.get('task')}) push_item_move_notification(ingest_doc, doc, 'item:fetch') return id_of_fetched_items