def _validate(self, doc_in_archive, doc, guid_to_duplicate): """Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is workflow transition valid? 3. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError( 'The item was locked by another user')
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage')) if archived_doc[config.CONTENT_STATE] != 'published': archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(guid=original['_id']) return archived_doc
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('spike', original_state): raise InvalidStateTransitionError() package_service = PackageService() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) expiry_minutes = app.settings['SPIKE_EXPIRY_MINUTES'] # check if item is in a desk. If it's then use the desks spike_expiry if is_assigned_to_a_desk(item): desk = get_resource_service('desks').find_one( _id=item['task']['desk'], req=None) expiry_minutes = desk.get('spike_expiry', expiry_minutes) updates[EXPIRY] = get_expiry_date(expiry_minutes) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get('rewrite_of'): updates['rewrite_of'] = None item = self.backend.update(self.datasource, id, updates, original) push_notification('item:spike', item=str(item.get('_id')), user=str(user)) package_service.remove_spiked_refs_from_package(id) return item
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one( req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str( current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError( message='Duplicate is allowed within the same desk.') if not is_workflow_state_transition_valid( 'duplicate', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): push_content_notification([archived_doc]) return guid_of_duplicated_items
def update(self, id, updates, original): original_state = original[config.CONTENT_STATE] if not is_workflow_state_transition_valid("spike", original_state): raise InvalidStateTransitionError() package_service = PackageService() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) expiry_minutes = app.settings["SPIKE_EXPIRY_MINUTES"] # check if item is in a desk. If it's then use the desks spike_expiry if is_assigned_to_a_desk(item): desk = get_resource_service("desks").find_one(_id=item["task"]["desk"], req=None) expiry_minutes = desk.get("spike_expiry", expiry_minutes) updates[EXPIRY] = get_expiry_date(expiry_minutes) updates[REVERT_STATE] = item.get(app.config["CONTENT_STATE"], None) if original.get("rewrite_of"): updates["rewrite_of"] = None item = self.backend.update(self.datasource, id, updates, original) push_notification("item:spike", item=str(item.get("_id")), user=str(user)) package_service.remove_spiked_refs_from_package(id) return item
def create(self, docs, **kwargs): guid_of_translated_items = [] for doc in docs: guid_of_item_to_be_translated = doc.get('guid') archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_translated) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_translated) if not is_workflow_state_transition_valid('translate', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() get_resource_service('macros').execute_translation_macro( archived_doc, archived_doc.get('language', None), doc.get('language')) archived_doc['language'] = doc.get('language') new_guid = archive_service.duplicate_content(archived_doc) guid_of_translated_items.append(new_guid) if kwargs.get('notify', True): push_content_notification([archived_doc]) return guid_of_translated_items
def create(self, docs, **kwargs): guid_of_item_to_be_moved = request.view_args['guid'] guid_of_moved_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_moved) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_moved) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('desk'), doc.get('stage')) archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(guid=original['_id']) guid_of_moved_items.append(archived_doc['guid']) return guid_of_moved_items
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE set_sign_off(archived_doc, original=original) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) return archived_doc
def raise_if_invalid_state_transition(self, original): if not is_workflow_state_transition_valid(self.publish_type, original[ITEM_STATE]): error_message = "Can't {} as item state is {}" if original[ITEM_TYPE] == CONTENT_TYPE.TEXT else \ "Can't {} as either package state or one of the items state is {}" raise InvalidStateTransitionError( error_message.format(self.publish_type, original[ITEM_STATE]))
def _translate_item(self, guid, language, task=None, service=None, state=None, **kwargs): if not service: service = ARCHIVE archive_service = get_resource_service(service) macros_service = get_resource_service("macros") published_service = get_resource_service("published") item = archive_service.find_one(req=None, guid=guid) if not item: raise SuperdeskApiError.notFoundError(_("Fail to found item with guid: {guid}").format(guid=guid)) if not is_workflow_state_transition_valid("translate", item[ITEM_STATE]): raise InvalidStateTransitionError() if item.get("language") == language: return guid if package_service.is_package(item): refs = package_service.get_item_refs(item) for ref in refs: ref[RESIDREF] = self._translate_item(ref[RESIDREF], language, service=ref.get("location"), task=task) if not item.get("translation_id"): item["translation_id"] = item["guid"] macros_service.execute_translation_macro(item, item.get("language", None), language) item["language"] = language item["translated_from"] = guid item["versioncreated"] = utcnow() item["firstcreated"] = utcnow() if task: item["task"] = task extra_fields = ["translation_id", "translated_from"] UPDATE_TRANSLATION_METADATA_MACRO = app.config.get("UPDATE_TRANSLATION_METADATA_MACRO") if UPDATE_TRANSLATION_METADATA_MACRO and macros_service.get_macro_by_name(UPDATE_TRANSLATION_METADATA_MACRO): macros_service.execute_macro(item, UPDATE_TRANSLATION_METADATA_MACRO) translation_guid = archive_service.duplicate_item( item, extra_fields=extra_fields, state=state, operation="translate" ) item.setdefault("translations", []).append(translation_guid) updates = { "translation_id": item["translation_id"], "translations": item["translations"], } archive_service.system_update(item["_id"], updates, item) published_service.update_published_items(item["_id"], "translation_id", item["_id"]) published_service.update_published_items(item["_id"], "translations", item["translations"]) if kwargs.get("notify", True): push_content_notification([item]) return translation_guid
def _validate_rewrite(self, original, update): """ Validates the article to be rewritten :param original: article to be rewritten :param update: article as the rewrite :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError( message='Cannot find the article') if original.get(EMBARGO): raise SuperdeskApiError.badRequestError( "Rewrite of an Item having embargo isn't possible") if not original.get('event_id'): raise SuperdeskApiError.notFoundError( message='Event id does not exist') if original.get('rewritten_by'): raise SuperdeskApiError.badRequestError( message='Article has been rewritten before !') if not is_workflow_state_transition_valid('rewrite', original[ITEM_STATE]): raise InvalidStateTransitionError() if not TakesPackageService().is_last_takes_package_item(original): raise SuperdeskApiError.badRequestError( message="Only last take of the package can be rewritten.") if original.get('rewrite_of') and not (original.get(ITEM_STATE) in PUBLISH_STATES): raise SuperdeskApiError.badRequestError( message= "Rewrite is not published. Cannot rewrite the story again.") if update: # in case of associate as update if update.get('rewrite_of'): raise SuperdeskApiError.badRequestError( "Rewrite story has been used as update before !") if update.get(ITEM_STATE) in [ CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.SPIKED ]: raise InvalidStateTransitionError() if update.get(ITEM_TYPE) not in [ CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED ]: raise SuperdeskApiError.badRequestError( "Rewrite story can only be text or pre-formatted !") if update.get('genre') and \ any(genre.get('value', '').lower() == BROADCAST_GENRE.lower() for genre in update.get('genre')): raise SuperdeskApiError.badRequestError( "Broadcast cannot be a update story !")
def create(self, docs, **kwargs): guid_of_translated_items = [] for doc in docs: guid_of_item_to_be_translated = doc.get('guid') archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one( req=None, _id=guid_of_item_to_be_translated) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_of_item_to_be_translated) if not is_workflow_state_transition_valid( 'translate', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() get_resource_service('macros').execute_translation_macro( archived_doc, archived_doc.get('language', None), doc.get('language')) archived_doc['language'] = doc.get('language') new_guid = archive_service.duplicate_content(archived_doc) guid_of_translated_items.append(new_guid) if kwargs.get('notify', True): push_content_notification([archived_doc]) return guid_of_translated_items
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.') if not is_workflow_state_transition_valid('duplicate', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): push_content_notification([archived_doc]) return guid_of_duplicated_items
def _validate_rewrite(self, original): """ Validates the article to be rewritten :param original: article to be rewritten :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError( message='Cannot find the article') if original.get(EMBARGO): raise SuperdeskApiError.badRequestError( "Rewrite of an Item having embargo isn't possible") if not original.get('event_id'): raise SuperdeskApiError.notFoundError( message='Event id does not exist') if get_resource_service('published').is_rewritten_before( original['_id']): raise SuperdeskApiError.badRequestError( message='Article has been rewritten before !') if not is_workflow_state_transition_valid('rewrite', original[ITEM_STATE]): raise InvalidStateTransitionError() if not TakesPackageService().is_last_takes_package_item(original): raise SuperdeskApiError.badRequestError( message="Only last take of the package can be rewritten.")
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args["guid"] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError( _("Fail to found item with guid: {guid}").format(guid=guid_of_item_to_be_copied) ) current_desk_of_item = archived_doc.get("task", {}).get("desk") if current_desk_of_item and not app.config["WORKFLOW_ALLOW_COPY_TO_PERSONAL"]: raise SuperdeskApiError.preconditionFailedError(message=_("Copy is not allowed on items in a desk.")) elif current_desk_of_item: archived_doc["task"] = {} archived_doc["original_creator"] = get_user_id() if not is_workflow_state_transition_valid("copy", archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get("notify", True): user = get_user() push_notification("item:copy", copied=1, user=str(user.get(config.ID_FIELD, ""))) return guid_of_copied_items
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage')) if archived_doc[config.CONTENT_STATE] not in ['published', 'scheduled', 'killed']: archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(id_=original['_id']) return archived_doc
def _validate(self, doc_in_archive, doc, guid_to_duplicate): """Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is workflow transition valid? 3. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError('The item was locked by another user')
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('spike', original_state): raise InvalidStateTransitionError() package_service = PackageService() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) expiry_minutes = app.settings['SPIKE_EXPIRY_MINUTES'] # check if item is in a desk. If it's then use the desks spike_expiry if is_assigned_to_a_desk(item): desk = get_resource_service('desks').find_one(_id=item['task']['desk'], req=None) expiry_minutes = desk.get('spike_expiry', expiry_minutes) updates[EXPIRY] = get_expiry_date(expiry_minutes) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get('rewrite_of'): updates['rewrite_of'] = None item = self.backend.update(self.datasource, id, updates, original) push_notification('item:spike', item=str(item.get('_id')), user=str(user)) package_service.remove_spiked_refs_from_package(id) return item
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_copied) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message='Copy is not allowed on items in a desk.') if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): push_notification('item:copy', copied=1) return guid_of_copied_items
def create(self, docs, **kwargs): for doc in docs: ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid')) if not ingest_doc: msg = 'Fail to found ingest item with guid: %s' % doc.get('guid') raise SuperdeskError(payload=msg) if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() mark_ingest_as_archived(ingest_doc=ingest_doc) archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid')) if not archived_doc: create_from_ingest_doc(doc, ingest_doc) send_to(doc, doc.get('desk')) superdesk.get_resource_service(ARCHIVE).post([doc]) task = archive_item.delay(doc.get('guid'), ingest_doc.get('ingest_provider'), get_user()) doc['task_id'] = task.id if task.state not in ('PROGRESS', states.SUCCESS, states.FAILURE) and not task.result: update_status(task.id, 0, 0) return [doc.get('guid') for doc in docs]
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError(_( 'Fail to found item with guid: {guid}').format(guid=guid_of_item_to_be_copied)) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message=_('Copy is not allowed on items in a desk.')) if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): user = get_user() push_notification('item:copy', copied=1, user=str(user.get(config.ID_FIELD, ''))) return guid_of_copied_items
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid(ITEM_SPIKE, original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) task = item.get('task', {}) updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get('desk'), stage_id=task.get('stage')) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get('rewrite_of'): updates['rewrite_of'] = None if original.get('rewritten_by'): updates['rewritten_by'] = None if original.get('broadcast'): updates['broadcast'] = None if original.get('rewrite_sequence'): updates['rewrite_sequence'] = None # remove any relation with linked items updates[ITEM_EVENT_ID] = generate_guid(type=GUID_TAG) # remove lock updates.update({ 'lock_user': None, 'lock_session': None, }) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: # remove links from items in the package package_service = PackageService() items = package_service.get_item_refs(original) for item in items: package_item = get_resource_service(ARCHIVE).find_one(req=None, _id=item[GUID_FIELD]) if package_item: linked_in_packages = [linked for linked in package_item.get(LINKED_IN_PACKAGES, []) if linked.get(PACKAGE) != original.get(config.ID_FIELD)] super().system_update(package_item[config.ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item) # keep the structure of old group in order to be able to unspike the package updates[DELETED_GROUPS] = original[GROUPS] # and remove all the items from the package updates['groups'] = [] item = self.backend.update(self.datasource, id, updates, original) push_notification('item:spike', item=str(id), user=str(user.get(config.ID_FIELD))) history_updates = dict(updates) if original.get('task'): history_updates['task'] = original.get('task') app.on_archive_item_updated(history_updates, original, ITEM_SPIKE) self._removed_refs_from_package(id) return item
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( _('Fail to found ingest item with _id: {id}').format(id=id_of_item_to_be_fetched)) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro'), dest_desk_id=desk_id, dest_stage_id=stage_id, ) dest_doc = fetch_item(ingest_doc, desk_id, stage_id, state=doc.get(ITEM_STATE), target=doc.get('target')) id_of_fetched_items.append(dest_doc[config.ID_FIELD]) ingest_service.patch(id_of_item_to_be_fetched, {'archived': dest_doc['versioncreated']}) dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD] dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get('version')) dest_doc[INGEST_VERSION] = ingest_doc.get('version') self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) self.__fetch_associated_items(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service('desks').find_one(req=None, _id=desk_id) if desk and desk.get('default_content_profile'): dest_doc.setdefault('profile', desk['default_content_profile']) if dest_doc.get('type', 'text') in MEDIA_TYPES: dest_doc['profile'] = None get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc) doc.update(dest_doc) if kwargs.get('notify', True): ingest_doc.update({'task': dest_doc.get('task')}) push_item_move_notification(ingest_doc, doc, 'item:fetch') return id_of_fetched_items
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid(ITEM_SPIKE, original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) task = item.get('task', {}) updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get('desk'), stage_id=task.get('stage')) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get('rewrite_of'): updates['rewrite_of'] = None if original.get('rewritten_by'): updates['rewritten_by'] = None if original.get('broadcast'): updates['broadcast'] = None if original.get('rewrite_sequence'): updates['rewrite_sequence'] = None # remove any relation with linked items updates[ITEM_EVENT_ID] = generate_guid(type=GUID_TAG) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: # remove links from items in the package package_service = PackageService() items = package_service.get_item_refs(original) for item in items: package_item = get_resource_service(ARCHIVE).find_one( req=None, _id=item[GUID_FIELD]) if package_item: linked_in_packages = [ linked for linked in package_item.get(LINKED_IN_PACKAGES, []) if linked.get(PACKAGE) != original.get(config.ID_FIELD) ] super().system_update( package_item[config.ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item) # and remove all the items from the package updates['groups'] = [] item = self.backend.update(self.datasource, id, updates, original) push_notification('item:spike', item=str(id), user=str(user.get(config.ID_FIELD))) history_updates = dict(updates) if original.get('task'): history_updates['task'] = original.get('task') app.on_archive_item_updated(history_updates, original, ITEM_SPIKE) self._removed_refs_from_package(id) return item
def raise_if_invalid_state_transition(self, original): if not is_workflow_state_transition_valid(self.publish_type, original[ITEM_STATE]): error_message = ( "Can't {} as item state is {}" if original[ITEM_TYPE] == CONTENT_TYPE.TEXT else "Can't {} as either package state or one of the items state is {}" ) raise InvalidStateTransitionError(error_message.format(self.publish_type, original[ITEM_STATE]))
def _translate_item(self, guid, language, task=None, service=None, **kwargs): if not service: service = ARCHIVE archive_service = get_resource_service(service) macros_service = get_resource_service('macros') published_service = get_resource_service('published') item = archive_service.find_one(req=None, _id=guid) if not item: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid) if not is_workflow_state_transition_valid('translate', item[ITEM_STATE]): raise InvalidStateTransitionError() if item.get('language') == language: return guid if package_service.is_package(item): refs = package_service.get_item_refs(item) for ref in refs: ref[RESIDREF] = self._translate_item( ref[RESIDREF], language, service=ref.get('location'), task=task) if not item.get('translation_id'): archive_service.system_update(item['_id'], {'translation_id': item['_id']}, item) item['translation_id'] = item['_id'] published_service.update_published_items(item['_id'], 'translation_id', item['_id']) macros_service.execute_translation_macro(item, item.get('language', None), language) item['language'] = language item['translated_from'] = guid item['versioncreated'] = utcnow() item['firstcreated'] = utcnow() if task: item['task'] = task _id = archive_service.duplicate_item(item, operation='translate') if kwargs.get('notify', True): push_content_notification([item]) return _id
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get('_id') if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( 'Fail to found ingest item with _id: %s' % id_of_item_to_be_fetched) if not is_workflow_state_transition_valid( 'fetch_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro')) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived}) dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc['_id'] = new_id dest_doc['guid'] = new_id dest_doc['destination_groups'] = doc.get('destination_groups') generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, desk_id, stage_id) dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get('state', STATE_FETCHED), doc.get('destination_groups')) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get('notify', True): push_notification('item:fetch', fetched=1) return id_of_fetched_items
def __update_state(self, updates, original): if self.__is_content_assigned_to_new_desk(original, updates): # check if the preconditions for the action are in place original_state = original[config.CONTENT_STATE] if not is_workflow_state_transition_valid('move', original_state): raise InvalidStateTransitionError() updates[config.CONTENT_STATE] = 'draft' if self.__is_content_moved_from_desk(updates) else 'submitted' resolve_document_version(updates, ARCHIVE, 'PATCH', original)
def __update_state(self, updates, original): if self.__is_content_assigned_to_new_desk(original, updates): # check if the preconditions for the action are in place original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('move', original_state): raise InvalidStateTransitionError() updates[ITEM_STATE] = CONTENT_STATE.DRAFT if self.__is_content_moved_from_desk(updates) \ else CONTENT_STATE.SUBMITTED resolve_document_version(updates, ARCHIVE, 'PATCH', original)
def __update_state(self, updates, original): if self.__is_content_assigned_to_new_desk(original, updates): # check if the preconditions for the action are in place original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('move', original_state): raise InvalidStateTransitionError() updates[ITEM_STATE] = CONTENT_STATE.DRAFT if self.__is_content_moved_from_desk(updates) \ else CONTENT_STATE.SUBMITTED resolve_document_version(updates, ARCHIVE, 'PATCH', original)
def _validate(self, archived_doc, doc): """Validate that the item can be move. :param dict archived_doc: item to be moved :param dict doc: new location details """ current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError()
def _validate(self, archived_doc, doc): """Validate that the item can be move. :param dict archived_doc: item to be moved :param dict doc: new location details """ current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError()
def _validate_rewrite(self, original, update): """Validates the article to be rewritten. :param original: article to be rewritten :param update: article as the rewrite :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError(message=_('Cannot find the article')) if original.get(EMBARGO): raise SuperdeskApiError.badRequestError(_("Rewrite of an Item having embargo isn't possible")) if not original.get('event_id'): raise SuperdeskApiError.notFoundError(message=_('Event id does not exist')) if original.get('rewritten_by'): raise SuperdeskApiError.badRequestError(message=_('Article has been rewritten before !')) if (not is_workflow_state_transition_valid('rewrite', original[ITEM_STATE]) and not config.ALLOW_UPDATING_SCHEDULED_ITEMS): raise InvalidStateTransitionError() if ( original.get('rewrite_of') and not (original.get(ITEM_STATE) in PUBLISH_STATES) and not app.config['WORKFLOW_ALLOW_MULTIPLE_UPDATES'] ): raise SuperdeskApiError.badRequestError( message=_("Rewrite is not published. Cannot rewrite the story again.")) if update: # in case of associate as update if update.get('rewrite_of'): raise SuperdeskApiError.badRequestError(_("Rewrite story has been used as update before !")) if update.get(ITEM_STATE) in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.SPIKED]: raise InvalidStateTransitionError() if update.get(ITEM_TYPE) not in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]: raise SuperdeskApiError.badRequestError(_("Rewrite story can only be text or pre-formatted !")) if update.get('genre') and \ any(genre.get('value', '').lower() == BROADCAST_GENRE.lower() for genre in update.get('genre')): raise SuperdeskApiError.badRequestError(_("Broadcast cannot be a update story !")) if original.get('profile') and original.get('profile') != update.get('profile'): raise SuperdeskApiError.badRequestError(_("Rewrite item content profile does " "not match with Original item."))
def __update_state(self, updates, original): if self.__is_content_assigned_to_new_desk(original, updates): # check if the preconditions for the action are in place original_state = original[config.CONTENT_STATE] if not is_workflow_state_transition_valid('move', original_state): raise InvalidStateTransitionError() updates[ config. CONTENT_STATE] = 'draft' if self.__is_content_moved_from_desk( updates) else 'submitted' resolve_document_version(updates, ARCHIVE, 'PATCH', original)
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get('_id') if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError('Fail to found ingest item with _id: %s' % id_of_item_to_be_fetched) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro(ingest_doc, doc.get('macro')) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived}) dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc['_id'] = new_id dest_doc['guid'] = new_id dest_doc['destination_groups'] = doc.get('destination_groups') generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, desk_id, stage_id) dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get('state', STATE_FETCHED), doc.get('destination_groups')) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get('notify', True): push_notification('item:fetch', fetched=1) return id_of_fetched_items
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get("_id") if id is None else id desk_id = doc.get("desk") stage_id = doc.get("stage") ingest_service = get_resource_service("ingest") ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( "Fail to found ingest item with _id: %s" % id_of_item_to_be_fetched ) if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get("macro"): # there is a macro so transform it ingest_doc = get_resource_service("macros").execute_macro(ingest_doc, doc.get("macro")) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {"archived": archived}) dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc["_id"] = new_id dest_doc["guid"] = new_id generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc["_id"] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get("notify", True): push_notification("item:fetch", fetched=1) return id_of_fetched_items
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str( doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError( message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in { CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED }: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def on_update(self, updates, original): if original.get('marked_for_not_publication', False): raise SuperdeskApiError.badRequestError( message='Cannot publish an item which is marked as Not for Publication') if not is_workflow_state_transition_valid(self.publish_type, original[config.CONTENT_STATE]): raise InvalidStateTransitionError() validate_item = {'act': self.publish_type, 'type': original['type'], 'validate': updates} validation_errors = get_resource_service('validate').post([validate_item]) if validation_errors[0]: raise ValidationError(validation_errors)
def update_state(self, original, updates): original_state = original.get(config.CONTENT_STATE) if original_state != 'ingested' and original_state != 'in_progress': if not is_workflow_state_transition_valid('save', original_state): raise InvalidStateTransitionError() elif self._is_req_for_save(updates): if original.get('task', {}).get('desk', None) is None: # content is on workspace if original_state != 'draft': updates[config.CONTENT_STATE] = 'draft' else: # content is on a desk updates[config.CONTENT_STATE] = 'in_progress'
def on_update(self, updates, original): if original.get('marked_for_not_publication', False): raise SuperdeskApiError.badRequestError( message='Cannot publish an item which is marked as Not for Publication') if not is_workflow_state_transition_valid(self.publish_type, original[config.CONTENT_STATE]): raise InvalidStateTransitionError() if original.get('item_id') and get_resource_service('published').is_published_before(original['item_id']): raise PublishQueueError.post_publish_exists_error(Exception('Story with id:{}'.format(original['_id']))) validate_item = {'act': self.publish_type, 'validate': updates} validation_errors = get_resource_service('validate').post([validate_item]) if validation_errors[0]: raise ValidationError(validation_errors)
def update(self, id, updates, original): original_state = original[config.CONTENT_STATE] if not is_workflow_state_transition_valid("unspike", original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) updates.update(self.get_unspike_updates(item)) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification("item:unspike", item=str(id), user=str(user)) return item
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('unspike', original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) self.set_unspike_updates(item, updates) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification('item:unspike', item=str(id), user=str(user.get(config.ID_FIELD))) return item
def _validate_correction(self, original): """Validates the article to be corrected. :param original: article to be corrected. :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError( message=_("Cannot find the article")) if (not is_workflow_state_transition_valid("correction", original[ITEM_STATE]) and not config.ALLOW_UPDATING_SCHEDULED_ITEMS): raise InvalidStateTransitionError()
def create(self, docs, **kwargs): for doc in docs: ingest_doc = superdesk.get_resource_service("ingest").find_one(req=None, _id=doc.get("guid")) if not ingest_doc: msg = "Fail to found ingest item with guid: %s" % doc.get("guid") raise SuperdeskApiError.notFoundError(msg) if not is_workflow_state_transition_valid("fetch_as_from_ingest", ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() archived = utcnow() superdesk.get_resource_service("ingest").patch(ingest_doc.get("_id"), {"archived": archived}) doc["archived"] = archived archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get("guid")) if not archived_doc: dest_doc = dict(ingest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, doc.get("desk")) dest_doc[config.CONTENT_STATE] = STATE_FETCHED remove_unwanted(dest_doc) for ref in [ ref for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref ]: ref["location"] = ARCHIVE ref["guid"] = ref["residRef"] set_original_creator(dest_doc) if doc.get(PACKAGE): links = dest_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) dest_doc[LINKED_IN_PACKAGES] = links superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get("guid")) desk = doc.get("desk") refs = [ {"guid": ref.get("residRef"), "desk": desk, PACKAGE: dest_doc.get("_id")} for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref ] if refs: self.create(refs) else: if doc.get(PACKAGE): links = archived_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get("_id"), {LINKED_IN_PACKAGES: links}) return [doc.get("guid") for doc in docs]
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('unspike', original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) updates.update(self.get_unspike_updates(item)) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification('item:unspike', item=str(id), user=str(user)) return item
def update_state(original, updates): """ Updates the 'updates' with a valid state if the state transition valid. If the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(config.CONTENT_STATE) if original_state != 'ingested' and original_state != 'in_progress': if not is_workflow_state_transition_valid('save', original_state): raise superdesk.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[config.CONTENT_STATE] = 'in_progress' elif not is_assigned_to_a_desk(original): updates[config.CONTENT_STATE] = 'draft'
def update_state(original, updates): """ Updates the 'updates' with a valid state if the state transition valid. If the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(config.CONTENT_STATE) if original_state != 'ingested' and original_state != 'in_progress': if not is_workflow_state_transition_valid('save', original_state): raise superdesk.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[config.CONTENT_STATE] = 'in_progress' elif not is_assigned_to_a_desk(original): updates[config.CONTENT_STATE] = 'draft'
def _validate_rewrite(self, original, update): """Validates the article to be rewritten. :param original: article to be rewritten :param update: article as the rewrite :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError(message='Cannot find the article') if original.get(EMBARGO): raise SuperdeskApiError.badRequestError("Rewrite of an Item having embargo isn't possible") if not original.get('event_id'): raise SuperdeskApiError.notFoundError(message='Event id does not exist') if original.get('rewritten_by'): raise SuperdeskApiError.badRequestError(message='Article has been rewritten before !') if not is_workflow_state_transition_valid('rewrite', original[ITEM_STATE]): raise InvalidStateTransitionError() if original.get('rewrite_of') and not (original.get(ITEM_STATE) in PUBLISH_STATES): raise SuperdeskApiError.badRequestError(message="Rewrite is not published. Cannot rewrite the story again.") if update: # in case of associate as update if update.get('rewrite_of'): raise SuperdeskApiError.badRequestError("Rewrite story has been used as update before !") if update.get(ITEM_STATE) in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.SPIKED]: raise InvalidStateTransitionError() if update.get(ITEM_TYPE) not in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]: raise SuperdeskApiError.badRequestError("Rewrite story can only be text or pre-formatted !") if update.get('genre') and \ any(genre.get('value', '').lower() == BROADCAST_GENRE.lower() for genre in update.get('genre')): raise SuperdeskApiError.badRequestError("Broadcast cannot be a update story !") if original.get('profile') and original.get('profile') != update.get('profile'): raise SuperdeskApiError.badRequestError("Rewrite item content profile does " "not match with Original item.")
def update_state(original, updates): """Updates the 'updates' with a valid state If the state transition valid, the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(ITEM_STATE) if original_state not in {CONTENT_STATE.INGESTED, CONTENT_STATE.PROGRESS, CONTENT_STATE.SCHEDULED}: if not is_workflow_state_transition_valid('save', original_state): raise superdesk.errors.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.PROGRESS elif not is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.DRAFT
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid(ITEM_UNSPIKE, original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) self.set_unspike_updates(item, updates) self.backend.update(self.datasource, id, updates, original) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) push_notification("item:unspike", item=str(id), user=str(user.get(config.ID_FIELD))) app.on_archive_item_updated(updates, original, ITEM_UNSPIKE) return item
def update_state(original, updates): """Updates the 'updates' with a valid state If the state transition valid, the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(ITEM_STATE) if original_state not in {CONTENT_STATE.INGESTED, CONTENT_STATE.PROGRESS, CONTENT_STATE.SCHEDULED}: if not is_workflow_state_transition_valid('save', original_state): raise superdesk.errors.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.PROGRESS elif not is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.DRAFT
def create(self, docs, **kwargs): for doc in docs: ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid')) if not ingest_doc: msg = 'Fail to found ingest item with guid: %s' % doc.get('guid') raise SuperdeskApiError.notFoundError(msg) if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() archived = utcnow() superdesk.get_resource_service('ingest').patch(ingest_doc.get('_id'), {'archived': archived}) doc['archived'] = archived archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid')) if not archived_doc: dest_doc = dict(ingest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, doc.get('desk')) dest_doc[config.CONTENT_STATE] = STATE_FETCHED remove_unwanted(dest_doc) for ref in [ref for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref['location'] = ARCHIVE ref['guid'] = ref['residRef'] set_original_creator(dest_doc) if doc.get(PACKAGE): links = dest_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) dest_doc[LINKED_IN_PACKAGES] = links superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('guid')) desk = doc.get('desk') refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')} for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref] if refs: self.create(refs) else: if doc.get(PACKAGE): links = archived_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get('_id'), {LINKED_IN_PACKAGES: links}) return [doc.get('guid') for doc in docs]
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('spike', original_state): raise InvalidStateTransitionError() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) task = item.get('task', {}) updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get('desk'), stage_id=task.get('stage')) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get('rewrite_of'): updates['rewrite_of'] = None if original.get('rewritten_by'): updates['rewritten_by'] = None if original.get('broadcast'): updates['broadcast'] = None if original.get('rewrite_sequence'): updates['rewrite_sequence'] = None if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: # remove links from items in the package package_service = PackageService() items = package_service.get_item_refs(original) for item in items: package_item = get_resource_service(ARCHIVE).find_one(req=None, _id=item['guid']) if package_item: linked_in_packages = [linked for linked in package_item.get(LINKED_IN_PACKAGES, []) if linked.get(PACKAGE) != original.get(config.ID_FIELD)] super().system_update(package_item[config.ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item) # and remove all the items from the package updates['groups'] = [] item = self.backend.update(self.datasource, id, updates, original) push_notification('item:spike', item=str(id), user=str(user.get(config.ID_FIELD))) self._removed_refs_from_package(id) return item
def update_state(original, updates): """ Updates the 'updates' with a valid state if the state transition valid. If the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(ITEM_STATE) if original_state not in {CONTENT_STATE.INGESTED, CONTENT_STATE.PROGRESS, CONTENT_STATE.SCHEDULED}: if original.get(PACKAGE_TYPE) == TAKES_PACKAGE: # skip any state transition validation for takes packages # also don't change the stage of the package return if not is_workflow_state_transition_valid('save', original_state): raise superdesk.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.PROGRESS elif not is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.DRAFT
def update_state(original, updates): """ Updates the 'updates' with a valid state if the state transition valid. If the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(config.CONTENT_STATE) if original_state not in ['ingested', 'in_progress', 'scheduled']: if original.get(PACKAGE_TYPE) == TAKES_PACKAGE: # skip any state transition validation for takes packages # also don't change the stage of the package return if not is_workflow_state_transition_valid('save', original_state): raise superdesk.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[config.CONTENT_STATE] = 'in_progress' elif not is_assigned_to_a_desk(original): updates[config.CONTENT_STATE] = 'draft'
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def update_state(original, updates): """ Updates the 'updates' with a valid state if the state transition valid. If the content is in user's workspace and original['state'] is not draft then updates['state'] is set to 'draft'. If the content is in a desk then the state is changed to 'in-progress'. """ original_state = original.get(ITEM_STATE) if original_state not in { CONTENT_STATE.INGESTED, CONTENT_STATE.PROGRESS, CONTENT_STATE.SCHEDULED }: if original.get(PACKAGE_TYPE) == TAKES_PACKAGE: # skip any state transition validation for takes packages # also don't change the stage of the package return if not is_workflow_state_transition_valid('save', original_state): raise superdesk.InvalidStateTransitionError() elif is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.PROGRESS elif not is_assigned_to_a_desk(original): updates[ITEM_STATE] = CONTENT_STATE.DRAFT
def _validate_rewrite(self, original, update): """Validates the article to be rewritten. :param original: article to be rewritten :param update: article as the rewrite :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError( message=_("Cannot find the article")) embargo = original.get(SCHEDULE_SETTINGS, {}).get( "utc_{}".format(EMBARGO)) if original.get(EMBARGO) else None if embargo is not None and embargo > utcnow(): raise SuperdeskApiError.badRequestError( _("Rewrite of an Item having embargo isn't possible")) if not original.get("event_id"): raise SuperdeskApiError.notFoundError( message=_("Event id does not exist")) if original.get("rewritten_by"): raise SuperdeskApiError.badRequestError( message=_("Article has been rewritten before !")) if (not is_workflow_state_transition_valid("rewrite", original[ITEM_STATE]) and not config.ALLOW_UPDATING_SCHEDULED_ITEMS): raise InvalidStateTransitionError() if (original.get("rewrite_of") and not (original.get(ITEM_STATE) in PUBLISH_STATES) and not app.config["WORKFLOW_ALLOW_MULTIPLE_UPDATES"]): raise SuperdeskApiError.badRequestError(message=_( "Rewrite is not published. Cannot rewrite the story again.")) if update: # in case of associate as update if update.get("rewrite_of"): raise SuperdeskApiError.badRequestError( _("Rewrite story has been used as update before !")) if update.get(ITEM_STATE) in [ CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.SPIKED, ]: raise InvalidStateTransitionError() if update.get(ITEM_TYPE) not in [ CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED ]: raise SuperdeskApiError.badRequestError( _("Rewrite story can only be text or pre-formatted !")) if update.get("genre") and any( genre.get("value", "").lower() == BROADCAST_GENRE.lower() for genre in update.get("genre")): raise SuperdeskApiError.badRequestError( _("Broadcast cannot be a update story !")) if original.get("profile") and str(original.get("profile")) != str( update.get("profile")): raise SuperdeskApiError.badRequestError( _("Rewrite item content profile does " "not match with Original item."))
def on_update(self, updates, original): if not is_workflow_state_transition_valid('publish', original[app.config['CONTENT_STATE']]): raise InvalidStateTransitionError()
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid(ITEM_SPIKE, original_state): raise InvalidStateTransitionError() archive_service = get_resource_service(ARCHIVE) published_service = get_resource_service("published") user = get_user(required=True) item = archive_service.find_one(req=None, _id=id) task = item.get("task", {}) updates[EXPIRY] = self._get_spike_expiry(desk_id=task.get("desk"), stage_id=task.get("stage")) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get("rewrite_of"): updates["rewrite_of"] = None if original.get("rewritten_by"): updates["rewritten_by"] = None if original.get("broadcast"): updates["broadcast"] = None if original.get("rewrite_sequence"): updates["rewrite_sequence"] = None if original.get("marked_for_user"): # remove marked_for_user on spike and keep it as previous_marked_user for history updates["previous_marked_user"] = original["marked_for_user"] updates["marked_for_user"] = None updates["marked_for_sign_off"] = None if original.get("translation_id") and original.get("translated_from"): # remove translations info from the translated item on spike updates["translated_from"] = None updates["translation_id"] = None id_to_remove = original.get(config.ID_FIELD) # Remove the translated item from the list of translations in the original item # where orignal item can be in archive or in both archive and published resource as well translated_from = archive_service.find_one( req=None, _id=original.get("translated_from")) translated_from_id = translated_from.get(config.ID_FIELD) self._remove_translations(archive_service, translated_from, id_to_remove) if translated_from.get("state") in PUBLISH_STATES: published_items = list( published_service.get_from_mongo( req=None, lookup={"item_id": translated_from_id})) if published_items: for item in published_items: self._remove_translations(published_service, item, id_to_remove) # remove any relation with linked items updates[ITEM_EVENT_ID] = generate_guid(type=GUID_TAG) # remove lock updates.update({ "lock_user": None, "lock_session": None, }) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: # remove links from items in the package package_service = PackageService() items = package_service.get_item_refs(original) for item in items: package_item = archive_service.find_one(req=None, _id=item[GUID_FIELD]) if package_item: linked_in_packages = [ linked for linked in package_item.get(LINKED_IN_PACKAGES, []) if linked.get(PACKAGE) != original.get(config.ID_FIELD) ] super().system_update( package_item[config.ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item) # keep the structure of old group in order to be able to unspike the package updates[DELETED_GROUPS] = original[GROUPS] # and remove all the items from the package updates["groups"] = [] item = self.backend.update(self.datasource, id, updates, original) push_notification("item:spike", item=str(id), user=str(user.get(config.ID_FIELD))) history_updates = dict(updates) if original.get("task"): history_updates["task"] = original.get("task") app.on_archive_item_updated(history_updates, original, ITEM_SPIKE) self._removed_refs_from_package(id) return item