def on_delete(self, doc): """ Checks if deleting the stage would not violate data integrity, raises an exception if it does. 1. Can't delete the stage if it's an incoming stage or a working stage 2. The stage must have no documents (spiked or unspiked) 3. The stage can not be referred to by a ingest routing rule :param doc: """ if doc['working_stage'] is True: desk_id = doc.get('desk', None) if desk_id and superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id): raise SuperdeskApiError.preconditionFailedError(message='Cannot delete a Working Stage.') if doc['default_incoming'] is True: desk_id = doc.get('desk', None) if desk_id and superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id): raise SuperdeskApiError.preconditionFailedError(message='Cannot delete a Incoming Stage.') archive_versions_query = {'task.stage': str(doc[config.ID_FIELD])} items = superdesk.get_resource_service('archive_versions').get(req=None, lookup=archive_versions_query) if items and items.count(): raise SuperdeskApiError.preconditionFailedError( message='Cannot delete stage as it has article(s) or referenced by versions of the article(s).') # check if the stage is referred to in a ingest routing rule rules = self._stage_in_rule(doc[config.ID_FIELD]) if rules.count() > 0: rule_names = ', '.join(rule.get('name') for rule in rules) raise SuperdeskApiError.preconditionFailedError( message='Stage is referred by Ingest Routing Schemes : {}'.format(rule_names))
def on_delete(self, doc): """ Checks if deleting the stage would not violate data integrity, raises an exception if it does. 1/ Can't delete the default incoming stage 2/ The stage must have no documents (spiked or unspiked) 3/ The stage can not be referred to by a ingest routing rule :param doc: :return: """ if doc['default_incoming'] is True: desk_id = doc.get('desk', None) if desk_id and superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id): raise SuperdeskApiError.preconditionFailedError(message='Cannot delete a default stage.') archive_versions_query = {'task.stage': str(doc[config.ID_FIELD])} items = superdesk.get_resource_service('archive_versions').get(req=None, lookup=archive_versions_query) if items and items.count(): raise SuperdeskApiError.preconditionFailedError( message='Cannot delete stage as it has article(s) or referenced by versions of the article(s).') # check if the stage is referred to in a ingest routing rule rules = self._stage_in_rule(doc[config.ID_FIELD]) if rules.count() > 0: rule_names = ', '.join(rule.get('name') for rule in rules) raise SuperdeskApiError.preconditionFailedError( message='Stage is referred by Ingest Routing Schemes : {}'.format(rule_names))
def on_delete(self, desk): """Runs on desk delete. Overriding to prevent deletion of a desk if the desk meets one of the below conditions: 1. The desk isn't assigned as a default desk to user(s) 2. The desk has no content 3. The desk is associated with routing rule(s) """ as_default_desk = superdesk.get_resource_service("users").get( req=None, lookup={"desk": desk[config.ID_FIELD]}) if as_default_desk and as_default_desk.count(): raise SuperdeskApiError.preconditionFailedError(message=_( "Cannot delete desk as it is assigned as default desk to user(s)." )) routing_rules_query = { "$or": [ { "rules.actions.fetch.desk": desk[config.ID_FIELD] }, { "rules.actions.publish.desk": desk[config.ID_FIELD] }, ] } routing_rules = superdesk.get_resource_service("routing_schemes").get( req=None, lookup=routing_rules_query) if routing_rules and routing_rules.count(): raise SuperdeskApiError.preconditionFailedError(message=_( "Cannot delete desk as routing scheme(s) are associated with the desk" )) archive_versions_query = { "$or": [ { "task.desk": str(desk[config.ID_FIELD]) }, { "task.last_authoring_desk": str(desk[config.ID_FIELD]) }, { "task.last_production_desk": str(desk[config.ID_FIELD]) }, ] } items = superdesk.get_resource_service("archive_versions").get( req=None, lookup=archive_versions_query) if items and items.count(): raise SuperdeskApiError.preconditionFailedError(message=_( "Cannot delete desk as it has article(s) or referenced by versions of the article(s)." ))
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE set_sign_off(archived_doc, original=original) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) return archived_doc
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one( req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str( current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError( message='Duplicate is allowed within the same desk.') if not is_workflow_state_transition_valid( 'duplicate', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): push_content_notification([archived_doc]) return guid_of_duplicated_items
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage')) if archived_doc[config.CONTENT_STATE] not in ['published', 'scheduled', 'killed']: archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(id_=original['_id']) return archived_doc
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage')) if archived_doc[config.CONTENT_STATE] != 'published': archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(guid=original['_id']) return archived_doc
def create(self, docs, **kwargs): guid_of_item_to_be_moved = request.view_args['guid'] guid_of_moved_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_moved) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_moved) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('desk'), doc.get('stage')) archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(guid=original['_id']) guid_of_moved_items.append(archived_doc['guid']) return guid_of_moved_items
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.') send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): task = archived_doc.get('task', {}) push_notification( 'content:update', duplicated=1, item=str(new_guid), desk=str(task.get('desk', '')), stage=str(task.get('stage', '')) ) return guid_of_duplicated_items
def on_delete(self, doc): lookup = {'destinations': str(doc.get('_id'))} subscribers = get_resource_service('output_channels').get( req=None, lookup=lookup) if subscribers and subscribers.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Subscriber is associated with Output Channel.')
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_copied) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message='Copy is not allowed on items in a desk.') if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): push_notification('item:copy', copied=1) return guid_of_copied_items
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.') if not is_workflow_state_transition_valid('duplicate', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): push_content_notification([archived_doc]) return guid_of_duplicated_items
def restore_version(self, id, doc): item_id = id old_version = int(doc.get('old_version', 0)) last_version = int(doc.get('last_version', 0)) if (not all([item_id, old_version, last_version])): return None old = get_resource_service('archive_versions').find_one(req=None, _id_document=item_id, _version=old_version) if old is None: raise SuperdeskApiError.notFoundError('Invalid version %s' % old_version) curr = get_resource_service(SOURCE).find_one(req=None, _id=item_id) if curr is None: raise SuperdeskApiError.notFoundError('Invalid item id %s' % item_id) if curr[config.VERSION] != last_version: raise SuperdeskApiError.preconditionFailedError('Invalid last version %s' % last_version) old['_id'] = old['_id_document'] old['_updated'] = old['versioncreated'] = utcnow() set_item_expiry(old, doc) del old['_id_document'] resolve_document_version(old, 'archive', 'PATCH', curr) remove_unwanted(old) res = super().replace(id=item_id, document=old) del doc['old_version'] del doc['last_version'] doc.update(old) return res
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError(_( 'Fail to found item with guid: {guid}').format(guid=guid_of_item_to_be_copied)) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message=_('Copy is not allowed on items in a desk.')) if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): user = get_user() push_notification('item:copy', copied=1, user=str(user.get(config.ID_FIELD, ''))) return guid_of_copied_items
def restore_version(self, id, doc, original): item_id = id old_version = int(doc.get("old_version", 0)) last_version = int(doc.get("last_version", 0)) if not all([item_id, old_version, last_version]): return None old = get_resource_service("archive_versions").find_one( req=None, _id_document=item_id, _current_version=old_version ) if old is None: raise SuperdeskApiError.notFoundError("Invalid version %s" % old_version) curr = get_resource_service(SOURCE).find_one(req=None, _id=item_id) if curr is None: raise SuperdeskApiError.notFoundError("Invalid item id %s" % item_id) if curr[config.VERSION] != last_version: raise SuperdeskApiError.preconditionFailedError("Invalid last version %s" % last_version) old["_id"] = old["_id_document"] old["_updated"] = old["versioncreated"] = utcnow() set_item_expiry(old, doc) del old["_id_document"] resolve_document_version(old, "archive", "PATCH", curr) remove_unwanted(old) super().replace(id=item_id, document=old, original=curr) del doc["old_version"] del doc["last_version"] doc.update(old) return item_id
def restore_version(self, id, doc, original): item_id = id old_version = int(doc.get('old_version', 0)) last_version = int(doc.get('last_version', 0)) if (not all([item_id, old_version, last_version])): return None old = get_resource_service('archive_versions').find_one( req=None, _id_document=item_id, _version=old_version) if old is None: raise SuperdeskApiError.notFoundError('Invalid version %s' % old_version) curr = get_resource_service(SOURCE).find_one(req=None, _id=item_id) if curr is None: raise SuperdeskApiError.notFoundError('Invalid item id %s' % item_id) if curr[config.VERSION] != last_version: raise SuperdeskApiError.preconditionFailedError( 'Invalid last version %s' % last_version) old['_id'] = old['_id_document'] old['_updated'] = old['versioncreated'] = utcnow() set_item_expiry(old, doc) del old['_id_document'] resolve_document_version(old, 'archive', 'PATCH', curr) remove_unwanted(old) super().replace(id=item_id, document=old, original=curr) del doc['old_version'] del doc['last_version'] doc.update(old) return item_id
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args["guid"] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError( _("Fail to found item with guid: {guid}").format(guid=guid_of_item_to_be_copied) ) current_desk_of_item = archived_doc.get("task", {}).get("desk") if current_desk_of_item and not app.config["WORKFLOW_ALLOW_COPY_TO_PERSONAL"]: raise SuperdeskApiError.preconditionFailedError(message=_("Copy is not allowed on items in a desk.")) elif current_desk_of_item: archived_doc["task"] = {} archived_doc["original_creator"] = get_user_id() if not is_workflow_state_transition_valid("copy", archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get("notify", True): user = get_user() push_notification("item:copy", copied=1, user=str(user.get(config.ID_FIELD, ""))) return guid_of_copied_items
def on_delete(self, doc): lookup = {'output_channels.channel': str(doc.get('_id'))} dest_groups = get_resource_service('destination_groups').get( req=None, lookup=lookup) if dest_groups and dest_groups.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Output Channel is associated with Destination Groups.' )
def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError('Content is missing') req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]['_id'], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError('Client and server etags don\'t match') return [docs[0]['_id']]
def validate_template_name(self, doc_template_name): query = { 'template_name': re.compile('^{}$'.format(doc_template_name), re.IGNORECASE) } if self.find_one(req=None, **query): msg = 'Template name must be unique' raise SuperdeskApiError.preconditionFailedError(message=msg, payload=msg)
def create(self, docs, **kwargs): if not docs: raise SuperdeskApiError.notFoundError("Content is missing") req = parse_request(self.datasource) try: get_component(ItemAutosave).autosave(docs[0]["_id"], docs[0], get_user(required=True), req.if_match) except InvalidEtag: raise SuperdeskApiError.preconditionFailedError("Client and server etags don't match") except KeyError: raise SuperdeskApiError.badRequestError("Request for Auto-save must have _id") return [docs[0]["_id"]]
def _validate(self, archived_doc, doc): """Validate that the item can be move. :param dict archived_doc: item to be moved :param dict doc: new location details """ current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError()
def on_delete(self, doc): doc_id = doc.get(superdesk.config.ID_FIELD) query = { "query": { "filtered": { "filter": { "term": { "destination_groups": str(doc_id) } } } } } request = ParsedRequest() request.args = {'source': json.dumps(query)} archive_content = get_resource_service('archive') \ .get(req=request, lookup=None) if archive_content and archive_content.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Destination Group is referenced by items.') dest_groups = self.get(req=None, lookup={'destination_groups': doc_id}) if dest_groups and dest_groups.count() > 0: raise SuperdeskApiError.preconditionFailedError( message= 'Destination Group is referenced by other Destination Group/s.' ) dest_groups = get_resource_service('routing_schemes') \ .get(req=None, lookup={'$or': [ {'rules.actions.fetch.destination_groups': doc_id}, {'rules.actions.publish.destination_groups': doc_id} ]}) if dest_groups and dest_groups.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Destination Group is referenced by Routing Scheme/s.')
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str( doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError( message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in { CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED }: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def on_delete(self, desk): """ Overriding to prevent deletion of a desk if the desk meets one of the below conditions: 1. The desk isn't assigned as a default desk to user(s) 2. The desk has no content 3. The desk is associated with routing rule(s) """ as_default_desk = superdesk.get_resource_service("users").get(req=None, lookup={"desk": desk[config.ID_FIELD]}) if as_default_desk and as_default_desk.count(): raise SuperdeskApiError.preconditionFailedError( message="Cannot delete desk as it is assigned as default desk to user(s)." ) routing_rules_query = { "$or": [ {"rules.actions.fetch.desk": desk[config.ID_FIELD]}, {"rules.actions.publish.desk": desk[config.ID_FIELD]}, ] } routing_rules = superdesk.get_resource_service("routing_schemes").get(req=None, lookup=routing_rules_query) if routing_rules and routing_rules.count(): raise SuperdeskApiError.preconditionFailedError( message="Cannot delete desk as routing scheme(s) are associated with the desk" ) archive_versions_query = { "$or": [ {"task.desk": str(desk[config.ID_FIELD])}, {"task.last_authoring_desk": str(desk[config.ID_FIELD])}, {"task.last_production_desk": str(desk[config.ID_FIELD])}, ] } items = superdesk.get_resource_service("archive_versions").get(req=None, lookup=archive_versions_query) if items and items.count(): raise SuperdeskApiError.preconditionFailedError( message="Cannot delete desk as it has article(s) or referenced by versions of the article(s)." )
def on_delete(self, doc): doc_id = doc.get(superdesk.config.ID_FIELD) query = { "query": { "filtered": { "filter": { "term": { "destination_groups": str(doc_id) } } } } } request = ParsedRequest() request.args = {'source': json.dumps(query)} archive_content = get_resource_service('archive') \ .get(req=request, lookup=None) if archive_content and archive_content.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Destination Group is referenced by items.') dest_groups = self.get(req=None, lookup={'destination_groups': doc_id}) if dest_groups and dest_groups.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Destination Group is referenced by other Destination Group/s.') dest_groups = get_resource_service('routing_schemes') \ .get(req=None, lookup={'$or': [ {'rules.actions.fetch.destination_groups': doc_id}, {'rules.actions.publish.destination_groups': doc_id} ]}) if dest_groups and dest_groups.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Destination Group is referenced by Routing Scheme/s.')
def on_delete(self, desk): """ Overriding to prevent deletion of a desk if the desk meets one of the below conditions: 1. The desk isn't assigned as a default desk to user(s) 2. The desk has no content 3. The desk is associated with routing rule(s) """ as_default_desk = superdesk.get_resource_service('users').get(req=None, lookup={'desk': desk['_id']}) if as_default_desk and as_default_desk.count(): raise SuperdeskApiError.preconditionFailedError( message='Cannot delete desk as it is assigned as default desk to user(s).') routing_rules_query = {'$or': [{'rules.actions.fetch.desk': desk['_id']}, {'rules.actions.publish.desk': desk['_id']}] } routing_rules = superdesk.get_resource_service('routing_schemes').get(req=None, lookup=routing_rules_query) if routing_rules and routing_rules.count(): raise SuperdeskApiError.preconditionFailedError( message='Cannot delete desk as routing scheme(s) are associated with the desk') items = superdesk.get_resource_service('archive').get(req=None, lookup={'task.desk': str(desk['_id'])}) if items and items.count(): raise SuperdeskApiError.preconditionFailedError(message='Cannot delete desk as it has article(s).')
def on_delete(self, desk): """ Overriding to prevent deletion of a desk if the desk meets one of the below conditions: 1. The desk isn't assigned as a default desk to user(s) 2. The desk has no content 3. The desk is associated with routing rule(s) """ as_default_desk = superdesk.get_resource_service('users').get( req=None, lookup={'desk': desk['_id']}) if as_default_desk and as_default_desk.count(): raise SuperdeskApiError.preconditionFailedError( message= 'Cannot delete desk as it is assigned as default desk to user(s).' ) routing_rules_query = { '$or': [{ 'rules.actions.fetch.desk': desk['_id'] }, { 'rules.actions.publish.desk': desk['_id'] }] } routing_rules = superdesk.get_resource_service('routing_schemes').get( req=None, lookup=routing_rules_query) if routing_rules and routing_rules.count(): raise SuperdeskApiError.preconditionFailedError( message= 'Cannot delete desk as routing scheme(s) are associated with the desk' ) items = superdesk.get_resource_service('archive').get( req=None, lookup={'task.desk': str(desk['_id'])}) if items and items.count(): raise SuperdeskApiError.preconditionFailedError( message='Cannot delete desk as it has article(s).')
def restore_version(self, id, doc, original): item_id = id old_version = int(doc.get('old_version', 0)) last_version = int(doc.get('last_version', 0)) if (not all([item_id, old_version, last_version])): return None old = get_resource_service('archive_versions').find_one( req=None, _id_document=item_id, _current_version=old_version) if old is None: raise SuperdeskApiError.notFoundError( _('Invalid version {old_version}').format( old_version=old_version)) curr = get_resource_service(SOURCE).find_one(req=None, _id=item_id) if curr is None: raise SuperdeskApiError.notFoundError( _('Invalid item id {item_id}').format(item_id=item_id)) if curr[config.VERSION] != last_version: raise SuperdeskApiError.preconditionFailedError( _('Invalid last version {last_version}').format( last_version=last_version)) old['_id'] = old['_id_document'] old['_updated'] = old['versioncreated'] = utcnow() set_item_expiry(old, doc) old.pop('_id_document', None) old.pop(SIGN_OFF, None) old[ITEM_OPERATION] = ITEM_RESTORE resolve_document_version(old, SOURCE, 'PATCH', curr) remove_unwanted(old) set_sign_off(updates=old, original=curr) super().replace(id=item_id, document=old, original=curr) old.pop('old_version', None) old.pop('last_version', None) doc.update(old) return item_id
def _validate(self, doc_in_archive, doc, guid_to_duplicate): """ Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is item still in the same desk? 3. Is workflow transition valid? 4. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.preconditionFailedError: if item is moved to a different desk SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_to_duplicate) current_desk_of_item = doc_in_archive.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str( doc.get('desk')): raise SuperdeskApiError.preconditionFailedError( message='Duplicate is allowed within the same desk.') if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError( 'The item was locked by another user')
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = deepcopy(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desk'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE # set the change in desk type when content is moved. self.set_change_in_desk_type(archived_doc, original) archived_doc.pop(SIGN_OFF, None) set_sign_off(archived_doc, original=original) convert_task_attributes_to_objectId(archived_doc) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) push_content_notification([archived_doc, original]) # finally apply any on stage rules/macros apply_onstage_rule(archived_doc, original[config.ID_FIELD]) return archived_doc
def _validate(self, doc_in_archive, doc, guid_to_duplicate): """ Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is item still in the same desk? 3. Is workflow transition valid? 4. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.preconditionFailedError: if item is moved to a different desk SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_to_duplicate) current_desk_of_item = doc_in_archive.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.') if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError('The item was locked by another user')
def restore_version(self, id, doc, original): item_id = id old_version = int(doc.get('old_version', 0)) last_version = int(doc.get('last_version', 0)) if (not all([item_id, old_version, last_version])): return None old = get_resource_service('archive_versions').find_one(req=None, _id_document=item_id, _current_version=old_version) if old is None: raise SuperdeskApiError.notFoundError('Invalid version %s' % old_version) curr = get_resource_service(SOURCE).find_one(req=None, _id=item_id) if curr is None: raise SuperdeskApiError.notFoundError('Invalid item id %s' % item_id) if curr[config.VERSION] != last_version: raise SuperdeskApiError.preconditionFailedError('Invalid last version %s' % last_version) old['_id'] = old['_id_document'] old['_updated'] = old['versioncreated'] = utcnow() set_item_expiry(old, doc) old.pop('_id_document', None) old.pop(SIGN_OFF, None) old[ITEM_OPERATION] = ITEM_RESTORE resolve_document_version(old, SOURCE, 'PATCH', curr) remove_unwanted(old) set_sign_off(updates=old, original=curr) super().replace(id=item_id, document=old, original=curr) old.pop('old_version', None) old.pop('last_version', None) doc.update(old) return item_id
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_copied) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message='Copy is not allowed on items in a desk.') new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): push_notification('item:copy', copied=1) return guid_of_copied_items
def validate_template_name(self, doc_template_name): query = {'template_name': re.compile('^{}$'.format(doc_template_name), re.IGNORECASE)} if self.find_one(req=None, **query): msg = 'Template name must be unique' raise SuperdeskApiError.preconditionFailedError(message=msg, payload=msg)
def on_delete(self, doc): lookup = {'destinations': str(doc.get('_id'))} subscribers = get_resource_service('output_channels').get(req=None, lookup=lookup) if subscribers and subscribers.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Subscriber is associated with Output Channel.')
def on_delete(self, doc): lookup = {'output_channels.channel': str(doc.get('_id'))} dest_groups = get_resource_service('destination_groups').get(req=None, lookup=lookup) if dest_groups and dest_groups.count() > 0: raise SuperdeskApiError.preconditionFailedError( message='Output Channel is associated with Destination Groups.')