def unlock(self, item_filter, user_id, session_id, etag): item_model = get_model(ItemModel) item = item_model.find_one(item_filter) if not item: raise SuperdeskApiError.notFoundError() if not item.get(LOCK_USER): raise SuperdeskApiError.badRequestError(message="Item is not locked.") can_user_unlock, error_message = self.can_unlock(item, user_id) if can_user_unlock: self.app.on_item_unlock(item, user_id) # delete the item if nothing is saved so far # version 0 created on lock item if item.get(config.VERSION, 0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT: superdesk.get_resource_service('archive').delete_action(lookup={'_id': item['_id']}) push_content_notification([item]) else: updates = {LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'force_unlock': True} item_model.update(item_filter, updates) self.app.on_item_unlocked(item, user_id) push_notification('item:unlock', item=str(item_filter.get(config.ID_FIELD)), item_version=str(item.get(config.VERSION)), state=item.get(ITEM_STATE), user=str(user_id), lock_session=str(session_id)) else: raise SuperdeskApiError.forbiddenError(message=error_message) item = item_model.find_one(item_filter) return item
def _validate_routing_scheme(self, routing_scheme): """ Validates routing scheme for the below: 1. A routing scheme must have at least one rule. 2. Every rule in the routing scheme must have name, filter and at least one action Will throw BadRequestError if any of the conditions fail. :param routing_scheme: """ routing_rules = routing_scheme.get('rules', []) if len(routing_rules) == 0: raise SuperdeskApiError.badRequestError(message="A Routing Scheme must have at least one Rule") for routing_rule in routing_rules: invalid_fields = [field for field in routing_rule.keys() if field not in ('name', 'filter', 'actions', 'schedule')] if invalid_fields: raise SuperdeskApiError.badRequestError( message="A routing rule has invalid fields %s".format(invalid_fields)) schedule = routing_rule.get('schedule') actions = routing_rule.get('actions') if routing_rule.get('name') is None: raise SuperdeskApiError.badRequestError(message="A routing rule must have a name") elif actions is None or len(actions) == 0 or (actions.get('fetch') is None and actions.get( 'publish') is None and actions.get('exit') is None): raise SuperdeskApiError.badRequestError(message="A routing rule must have actions") else: self._validate_schedule(schedule)
def __validate_schedule(self, schedule): if schedule is not None \ and (len(schedule) == 0 or (schedule.get('day_of_week') is None or len(schedule.get('day_of_week', [])) == 0)): raise SuperdeskApiError.badRequestError(message="Schedule when defined can't be empty.") if schedule: day_of_week = [str(week_day).upper() for week_day in schedule.get('day_of_week', [])] if not (len(set(day_of_week) & set(self.day_of_week)) == len(day_of_week)): raise SuperdeskApiError.badRequestError(message="Invalid values for day of week.") if schedule.get('hour_of_day_from') or schedule.get('hour_of_day_to'): try: from_time = datetime.strptime(schedule.get('hour_of_day_from'), '%H%M') except: raise SuperdeskApiError.badRequestError(message="Invalid value for from time.") try: to_time = datetime.strptime(schedule.get('hour_of_day_to'), '%H%M') except: raise SuperdeskApiError.badRequestError(message="Invalid value for to time.") if from_time > to_time: raise SuperdeskApiError.badRequestError(message="From time should be less than To Time.")
def restore_version(self, id, doc): item_id = id old_version = int(doc.get('old_version', 0)) last_version = int(doc.get('last_version', 0)) if (not all([item_id, old_version, last_version])): return None old = get_resource_service('archive_versions').find_one(req=None, _id_document=item_id, _version=old_version) if old is None: raise SuperdeskApiError.notFoundError('Invalid version %s' % old_version) curr = get_resource_service(SOURCE).find_one(req=None, _id=item_id) if curr is None: raise SuperdeskApiError.notFoundError('Invalid item id %s' % item_id) if curr[config.VERSION] != last_version: raise SuperdeskApiError.preconditionFailedError('Invalid last version %s' % last_version) old['_id'] = old['_id_document'] old['_updated'] = old['versioncreated'] = utcnow() set_item_expiry(old, doc) del old['_id_document'] resolve_document_version(old, 'archive', 'PATCH', curr) remove_unwanted(old) res = super().replace(id=item_id, document=old) del doc['old_version'] del doc['last_version'] doc.update(old) return res
def lock(self, item_filter, user_id, session_id, etag): item_model = get_model(ItemModel) item = item_model.find_one(item_filter) if not item: raise SuperdeskApiError.notFoundError() can_user_lock, error_message = self.can_lock(item, user_id, session_id) if can_user_lock: self.app.on_item_lock(item, user_id) updates = {LOCK_USER: user_id, LOCK_SESSION: session_id, 'lock_time': utcnow()} item_model.update(item_filter, updates) if item.get(TASK): item[TASK]['user'] = user_id else: item[TASK] = {'user': user_id} superdesk.get_resource_service('tasks').assign_user(item[config.ID_FIELD], item[TASK]) self.app.on_item_locked(item, user_id) push_notification('item:lock', item=str(item.get(config.ID_FIELD)), item_version=str(item.get(config.VERSION)), user=str(user_id), lock_time=updates['lock_time'], lock_session=str(session_id)) else: raise SuperdeskApiError.forbiddenError(message=error_message) item = item_model.find_one(item_filter) return item
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.') send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): task = archived_doc.get('task', {}) push_notification( 'content:update', duplicated=1, item=str(new_guid), desk=str(task.get('desk', '')), stage=str(task.get('stage', '')) ) return guid_of_duplicated_items
def on_update(self, updates, original): user = get_user() if 'unique_name' in updates and not is_admin(user) \ and (user['active_privileges'].get('metadata_uniquename', 0) == 0): raise SuperdeskApiError.forbiddenError("Unauthorized to modify Unique Name") remove_unwanted(updates) if self.__is_req_for_save(updates): update_state(original, updates) lock_user = original.get('lock_user', None) force_unlock = updates.get('force_unlock', False) original_creator = updates.get('original_creator', None) if not original_creator: updates['original_creator'] = original['original_creator'] str_user_id = str(user.get('_id')) if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError('The item was locked by another user') updates['versioncreated'] = utcnow() set_item_expiry(updates, original) updates['version_creator'] = str_user_id update_word_count(updates) if force_unlock: del updates['force_unlock']
def get_expiry(desk_id=None, stage_id=None, desk_or_stage_doc=None): """ Calculates the expiry for a content from fetching the expiry duration from one of the below 1. desk identified by desk_id 2. stage identified by stage_id. This will ignore desk_id if specified 3. desk doc or stage doc identified by doc_or_stage_doc. This will ignore desk_id and stage_id if specified :param desk_id: desk identifier :param stage_id: stage identifier :param desk_or_stage_doc: doc from either desks collection or stages collection :return: when the doc will expire """ stage = None if desk_or_stage_doc is None and desk_id: desk = superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id) if not desk: raise SuperdeskApiError.notFoundError('Invalid desk identifier %s' % desk_id) if not stage_id: stage = get_resource_service('stages').find_one(req=None, _id=desk['incoming_stage']) if not stage: raise SuperdeskApiError.notFoundError('Invalid stage identifier %s' % stage_id) if desk_or_stage_doc is None and stage_id: stage = get_resource_service('stages').find_one(req=None, _id=stage_id) if not stage: raise SuperdeskApiError.notFoundError('Invalid stage identifier %s' % stage_id) return get_item_expiry(app=app, stage=desk_or_stage_doc or stage)
def get_expiry(desk_id, stage_id, offset=None): """ Calculates the expiry for a content from fetching the expiry duration from one of the below 1. desk identified by desk_id 2. stage identified by stage_id :param desk_id: desk identifier :param stage_id: stage identifier :return: when the doc will expire """ stage = None desk = None if desk_id: desk = superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id) if not desk: raise SuperdeskApiError.notFoundError('Invalid desk identifier %s' % desk_id) if stage_id: stage = get_resource_service('stages').find_one(req=None, _id=stage_id) if not stage: raise SuperdeskApiError.notFoundError('Invalid stage identifier %s' % stage_id) return get_item_expiry(desk, stage, offset)
def on_delete(self, doc): """ Checks if deleting the stage would not violate data integrity, raises an exception if it does. 1/ Can't delete the default incoming stage 2/ The stage must have no documents (spiked or unspiked) 3/ The stage can not be referred to by a ingest routing rule :param doc: :return: """ if doc['default_incoming'] is True: desk_id = doc.get('desk', None) if desk_id and superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id): raise SuperdeskApiError.preconditionFailedError(message='Cannot delete a default stage.') archive_versions_query = {'task.stage': str(doc[config.ID_FIELD])} items = superdesk.get_resource_service('archive_versions').get(req=None, lookup=archive_versions_query) if items and items.count(): raise SuperdeskApiError.preconditionFailedError( message='Cannot delete stage as it has article(s) or referenced by versions of the article(s).') # check if the stage is referred to in a ingest routing rule rules = self._stage_in_rule(doc[config.ID_FIELD]) if rules.count() > 0: rule_names = ', '.join(rule.get('name') for rule in rules) raise SuperdeskApiError.preconditionFailedError( message='Stage is referred by Ingest Routing Schemes : {}'.format(rule_names))
def _validate_disable(self, updates, original): """ Checks the templates and desks that are referencing the given content profile if the profile is being disabled """ if 'enabled' in updates and updates.get('enabled') is False and original.get('enabled') is True: templates = list(superdesk.get_resource_service('content_templates'). get_templates_by_profile_id(original.get('_id'))) if len(templates) > 0: template_names = ', '.join([t.get('template_name') for t in templates]) raise SuperdeskApiError.badRequestError( message='Cannot disable content profile as following templates are referencing: {}'. format(template_names)) req = ParsedRequest() all_desks = list(superdesk.get_resource_service('desks').get(req=req, lookup={})) profile_desks = [desk for desk in all_desks if desk.get('default_content_profile') == str(original.get('_id'))] if len(profile_desks) > 0: profile_desk_names = ', '.join([d.get('name') for d in profile_desks]) raise SuperdeskApiError.badRequestError( message='Cannot disable content profile as following desks are referencing: {}'. format(profile_desk_names))
def validate_crop(self, original, updates, crop_name): """ :param dict original: original item :param dict updates: updated renditions :param str crop_name: name of the crop :param dict doc: crop co-ordinates :raises SuperdeskApiError.badRequestError: For following conditions: 1) if type != picture 2) if renditions are missing in the original image 3) if original rendition is missing 4) Crop name is invalid """ # Check if type is picture if original[ITEM_TYPE] != CONTENT_TYPE.PICTURE: raise SuperdeskApiError.badRequestError(message='Only images can be cropped!') # Check if the renditions exists if not original.get('renditions'): raise SuperdeskApiError.badRequestError(message='Missing renditions!') # Check if the original rendition exists if not original.get('renditions').get('original'): raise SuperdeskApiError.badRequestError(message='Missing original rendition!') # Check if the crop name is valid crop = self.get_crop_by_name(crop_name) crop_data = updates.get('renditions', {}).get(crop_name, {}) if not crop and 'CropLeft' in crop_data: raise SuperdeskApiError.badRequestError(message='Unknown crop name! (name=%s)' % crop_name) self._validate_values(crop_data) self._validate_poi(original, updates, crop_name) self._validate_aspect_ratio(crop, crop_data)
def delete(self, lookup): filter_id = lookup.get('_id') # check if the filter is referenced by any subscribers... subscribers = self._get_referencing_subscribers(filter_id) if subscribers.count() > 0: references = ','.join(s['name'] for s in subscribers) raise SuperdeskApiError.badRequestError( 'Content filter has been referenced by ' 'subscriber(s) {}'.format(references) ) # check if the filter is referenced by any routing schemes... schemes = self._get_referencing_routing_schemes(filter_id) if schemes.count() > 0: references = ','.join(s['name'] for s in schemes) raise SuperdeskApiError.badRequestError( 'Content filter has been referenced by ' 'routing scheme(s) {}'.format(references) ) # check if the filter is referenced by any other content filters... referenced_filters = self._get_content_filters_by_content_filter(filter_id) if referenced_filters.count() > 0: references = ','.join([pf['name'] for pf in referenced_filters]) raise SuperdeskApiError.badRequestError( 'Content filter has been referenced in {}'.format(references)) return super().delete(lookup)
def validate_embargo(self, item): """Validates the embargo of the item. Following are checked: 1. Item can't be a package or a re-write of another story 2. Publish Schedule and Embargo are mutually exclusive 3. Always a future date except in case of Corrected and Killed. :raises: SuperdeskApiError.badRequestError() if the validation fails """ if item[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE: if EMBARGO in item: embargo = item.get(SCHEDULE_SETTINGS, {}).get('utc_{}'.format(EMBARGO)) if embargo: if item.get(PUBLISH_SCHEDULE) or item[ITEM_STATE] == CONTENT_STATE.SCHEDULED: raise SuperdeskApiError.badRequestError("An item can't have both Publish Schedule and Embargo") if (item[ITEM_STATE] not in { CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.SCHEDULED}) \ and embargo <= utcnow(): raise SuperdeskApiError.badRequestError("Embargo cannot be earlier than now") if item.get('rewrite_of'): raise SuperdeskApiError.badRequestError("Rewrites doesn't support Embargo") if not isinstance(embargo, datetime.date) or not embargo.time(): raise SuperdeskApiError.badRequestError("Invalid Embargo") elif is_normal_package(item): if item.get(EMBARGO): raise SuperdeskApiError.badRequestError("A Package doesn't support Embargo") self.packageService.check_if_any_item_in_package_has_embargo(item)
def _validate_aspect_ratio(self, crop, doc): """ Checks if the aspect ratio is consistent with one in defined in spec :param crop: Spec parameters :param doc: Posted parameters :raises SuperdeskApiError.badRequestError: """ if 'CropLeft' not in doc: return width = doc['CropRight'] - doc['CropLeft'] height = doc['CropBottom'] - doc['CropTop'] if not (crop.get('width') or crop.get('height') or crop.get('ratio')): raise SuperdeskApiError.badRequestError( message='Crop data are missing. width, height or ratio need to be defined') if crop.get('width') and crop.get('height'): expected_crop_width = int(crop['width']) expected_crop_height = int(crop['height']) if width < expected_crop_width or height < expected_crop_height: raise SuperdeskApiError.badRequestError( message='Wrong crop size. Minimum crop size is {}x{}.'.format(crop['width'], crop['height'])) doc_ratio = round(width / height, 1) spec_ratio = round(expected_crop_width / expected_crop_height, 1) if doc_ratio != spec_ratio: raise SuperdeskApiError.badRequestError(message='Wrong aspect ratio!') elif crop.get('ratio'): ratio = crop.get('ratio') if type(ratio) not in [int, float]: ratio = ratio.split(':') ratio = int(ratio[0]) / int(ratio[1]) if abs((width / height) - ratio) > 0.01: raise SuperdeskApiError.badRequestError( message='Ratio %s is not respected. We got %f' % (crop.get('ratio'), abs((width / height))))
def on_delete(self, docs): if docs.get('is_default'): raise SuperdeskApiError.forbiddenError('Cannot delete the default role') # check if there are any users in the role user = get_resource_service('users').find_one(req=None, role=docs.get('_id')) if user: raise SuperdeskApiError.forbiddenError('Cannot delete the role, it still has users in it!')
def update(self, id, updates, original): archived_item = super().find_one(req=None, _id=id) try: if archived_item['type'] == 'composite': self.__publish_package_items(archived_item, updates[config.LAST_UPDATED]) # document is saved to keep the initial changes self.backend.update(self.datasource, id, updates, original) original.update(updates) if archived_item['type'] != 'composite': # queue only text items self.queue_transmission(original) task = self.__send_to_publish_stage(original) if task: updates['task'] = task # document is saved to change the status updates[config.CONTENT_STATE] = 'published' item = self.backend.update(self.datasource, id, updates, original) original.update(updates) user = get_user() push_notification('item:publish', item=str(item.get('_id')), user=str(user)) original.update(super().find_one(req=None, _id=id)) except KeyError as e: raise SuperdeskApiError.badRequestError( message="Key is missing on article to be published: {}" .format(str(e))) except Exception as e: logger.error("Something bad happened while publishing %s".format(id), e) raise SuperdeskApiError.internalError(message="Failed to publish the item: {}" .format(str(e)))
def _validate_seq_num_settings(self, subscriber): """ Validates the 'sequence_num_settings' property if present in subscriber. Below are the validation rules: 1. If min value is present then it should be greater than 0 2. If min is present and max value isn't available then it's defaulted to MAX_VALUE_OF_PUBLISH_SEQUENCE :return: True if validation succeeds otherwise return False. """ if subscriber.get('sequence_num_settings'): min = subscriber.get('sequence_num_settings').get('min', 1) max = subscriber.get('sequence_num_settings').get('max', app.config['MAX_VALUE_OF_PUBLISH_SEQUENCE']) if min <= 0: raise SuperdeskApiError.badRequestError(payload={"sequence_num_settings.min": 1}, message="Value of Minimum in Sequence Number Settings should " "be greater than 0") if min >= max: raise SuperdeskApiError.badRequestError(payload={"sequence_num_settings.min": 1}, message="Value of Minimum in Sequence Number Settings should " "be less than the value of Maximum") del subscriber['sequence_num_settings'] subscriber['sequence_num_settings'] = {"min": min, "max": max} return True
def send_to(doc, desk_id=None, stage_id=None): """Send item to given desk and stage. :param doc: item to be sent :param desk: id of desk where item should be sent :param stage: optional stage within the desk """ task = doc.get('task', {}) task.setdefault('desk', desk_id) task.setdefault('stage', stage_id) calculate_expiry_from = None if desk_id and not stage_id: desk = superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id) if not desk: raise SuperdeskApiError.notFoundError('Invalid desk identifier %s' % desk_id) calculate_expiry_from = desk task['desk'] = desk_id task['stage'] = desk.get('incoming_stage') if stage_id: stage = get_resource_service('stages').find_one(req=None, _id=stage_id) if not stage: raise SuperdeskApiError.notFoundError('Invalid stage identifier %s' % stage_id) calculate_expiry_from = stage task['desk'] = stage['desk'] task['stage'] = stage_id if stage.get('task_status'): task['status'] = stage['task_status'] doc['task'] = task doc['expiry'] = get_expiry(desk_or_stage_doc=calculate_expiry_from)
def unlock(self, item_filter, user_id, session_id, etag): item_model = get_model(ItemModel) item = item_model.find_one(item_filter) if not item: raise SuperdeskApiError.notFoundError() if not item.get(LOCK_USER): raise SuperdeskApiError.badRequestError(message="Item is not locked.") can_user_unlock, error_message = self.can_unlock(item, user_id) if can_user_unlock: self.app.on_item_unlock(item, user_id) # delete the item if nothing is saved so far if item['_version'] == 1 and item['state'] == 'draft': superdesk.get_resource_service('archive').delete(lookup={'_id': item['_id']}) return updates = {LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'force_unlock': True} item_model.update(item_filter, updates) self.app.on_item_unlocked(item, user_id) push_notification('item:unlock', item=str(item_filter.get(config.ID_FIELD)), user=str(user_id), lock_session=str(session_id)) else: raise SuperdeskApiError.forbiddenError(message=error_message) item = item_model.find_one(item_filter) return item
def _validate(self, doc_in_archive, doc, guid_to_duplicate): """Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is workflow transition valid? 3. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError('The item was locked by another user')
def create_crop(self, original_image, crop_name, crop_data): """Create a new crop based on the crop co-ordinates :param original: Article to add the crop :param crop_name: Name of the crop :param doc: Crop details :raises SuperdeskApiError.badRequestError :return dict: rendition """ original_file = superdesk.app.media.fetch_rendition(original_image) if not original_file: raise SuperdeskApiError.badRequestError('Original file couldn\'t be found') try: cropped, out = crop_image(original_file, crop_name, crop_data) crop = self.get_crop_by_name(crop_name) if not cropped: raise SuperdeskApiError.badRequestError('Saving crop failed.') # resize if needed if crop.get('width') or crop.get('height'): out, width, height = _resize_image(out, size=(crop.get('width'), crop.get('height')), keepProportions=crop.get('keep_proportions', True)) crop['width'] = width crop['height'] = height out.seek(0) return self._save_cropped_image(out, original_image, crop_data) except SuperdeskApiError: raise except Exception as ex: raise SuperdeskApiError.badRequestError('Generating crop failed: {}'.format(str(ex)))
def create(self, docs, **kwargs): for doc in docs: if doc.get('group_by') and doc.get('desk'): raise SuperdeskApiError.badRequestError('The desk must not be defined when group by is defined.') if not doc.get('group_by', False) and not doc.get('desk'): raise SuperdeskApiError.badRequestError('The desk is required when group by desk is false') return super().create(docs, **kwargs)
def __validate_seq_num_settings(self, output_channel): """ Validates the 'sequence_num_settings' property if present in output_channel. Below are the validation rules: 1. If min value is present then it should be greater than 0 2. If min is present and max value isn't available then it's defaulted to MAX_VALUE_OF_PUBLISH_SEQUENCE 3. If start_from present then the value should be between min and max. Otherwise, it's defaulted to the value of min :return: True if validation succeeds otherwise return False. """ if output_channel.get('sequence_num_settings'): min = output_channel.get('sequence_num_settings').get('min', 1) max = output_channel.get('sequence_num_settings').get('max', MAX_VALUE_OF_PUBLISH_SEQUENCE) start_from = output_channel.get('sequence_num_settings').get('start_from', min) if min <= 0: raise SuperdeskApiError.badRequestError(payload={"sequence_num_settings.min": 1}, message="Value of Minimum in Sequence Number Settings should " "be greater than 0") if min >= max: raise SuperdeskApiError.badRequestError(payload={"sequence_num_settings.min": 1}, message="Value of Minimum in Sequence Number Settings should " "be less than the value of Maximum") if not min <= start_from <= max: raise SuperdeskApiError.badRequestError(payload={"sequence_num_settings.start_from": 1}, message="Value of Start From in Sequence Number Settings " "should be between Minimum and Maximum") del output_channel['sequence_num_settings'] output_channel['sequence_num_settings'] = {"min": min, "max": max, "start_from": start_from} return True
def create(self, docs, **kwargs): service = get_resource_service('archive') doc = docs[0] formatter_name = doc.get('formatter_name') if not formatter_name: raise SuperdeskApiError.badRequestError('Formatter name not found') formatter = self._get_formatter(formatter_name) if not formatter: raise SuperdeskApiError.badRequestError('Formatter not found') if 'article_id' in doc: article_id = doc.get('article_id') article = service.find_one(req=None, _id=article_id) if not article: raise SuperdeskApiError.badRequestError('Article not found!') try: self._validate(article) sequence, formatted_doc = formatter.format(article, {'_id': '0'}, None)[0] formatted_doc = formatted_doc.replace('\'\'', '\'') except Exception as ex: raise SuperdeskApiError.\ badRequestError('Error in formatting article: {}'.format(str(ex))) return [{'formatted_doc': formatted_doc}]
def on_update(self, updates, original): """Called on the patch request to mark a activity/notification/comment as read and nothing else :param updates: :param original: :return: """ user = getattr(g, 'user', None) if not user: raise SuperdeskApiError.notFoundError('Can not determine user') user_id = user.get('_id') # make sure that the user making the read notification is in the notification list if not self.is_recipient(updates, user_id): raise SuperdeskApiError.forbiddenError('User is not in the notification list') # make sure the transition is from not read to read if not self.is_read(updates, user_id) and self.is_read(original, user_id): raise SuperdeskApiError.forbiddenError('Can not set notification as read') # make sure that no other users are being marked as read for recipient in updates.get('recipients', []): if recipient['user_id'] != user_id: if self.is_read(updates, recipient['user_id']) != self.is_read(original, recipient['user_id']): raise SuperdeskApiError.forbiddenError('Can not set other users notification as read') # make sure that no other fields are being up dated just read and _updated if len(updates) != 2: raise SuperdeskApiError.forbiddenError('Can not update')
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) user = get_user() send_to(doc=archived_doc, desk_id=doc.get('task', {}).get('desc'), stage_id=doc.get('task', {}).get('stage'), user_id=user.get(config.ID_FIELD)) if archived_doc[ITEM_STATE] not in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.KILLED}: archived_doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED archived_doc[ITEM_OPERATION] = ITEM_MOVE set_sign_off(archived_doc, original=original) resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc[config.ID_FIELD] archive_service.update(original[config.ID_FIELD], archived_doc, original) insert_into_versions(id_=original[config.ID_FIELD]) return archived_doc
def get(self, req, lookup): """ Return a list of items related to the given item. The given item id is retrieved from the lookup dictionary as 'item_id' """ if 'item_id' not in lookup: raise SuperdeskApiError.badRequestError('The item identifier is required') item = get_resource_service('archive_autosave').find_one(req=None, _id=lookup['item_id']) if not item: item = get_resource_service('archive').find_one(req=None, _id=lookup['item_id']) if not item: raise SuperdeskApiError.notFoundError('Invalid item identifer') keywords = self.provider.get_keywords(self._transform(item)) if not keywords: return ElasticCursor([]) query = { 'query': { 'filtered': { 'query': { 'query_string': { 'query': ' '.join(kwd['text'] for kwd in keywords) } } } } } req = ParsedRequest() req.args = {'source': json.dumps(query), 'repo': 'archive,published,archived'} return get_resource_service('search').get(req=req, lookup=None)
def move_content(self, id, doc): archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=id) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % id) current_stage_of_item = archived_doc.get('task', {}).get('stage') if current_stage_of_item and str(current_stage_of_item) == str(doc.get('task', {}).get('stage')): raise SuperdeskApiError.preconditionFailedError(message='Move is not allowed within the same stage.') if not is_workflow_state_transition_valid('submit_to_desk', archived_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() original = dict(archived_doc) send_to(archived_doc, doc.get('task', {}).get('desc'), doc.get('task', {}).get('stage')) if archived_doc[config.CONTENT_STATE] not in ['published', 'scheduled', 'killed']: archived_doc[config.CONTENT_STATE] = 'submitted' resolve_document_version(archived_doc, ARCHIVE, 'PATCH', original) del archived_doc['_id'] archive_service.update(original['_id'], archived_doc, original) insert_into_versions(id_=original['_id']) return archived_doc
def create(self, docs, **kwargs): target_id = request.view_args['target_id'] doc = docs[0] link_id = doc.get('link_id') desk_id = doc.get('desk') service = get_resource_service(ARCHIVE) target = service.find_one(req=None, _id=target_id) self._validate_link(target, target_id) link = {} if is_genre(target, BROADCAST_GENRE): raise SuperdeskApiError.badRequestError("Cannot add new take to the story with genre as broadcast.") if desk_id: link = {'task': {'desk': desk_id}} user = get_user() lookup = {'_id': desk_id, 'members.user': user['_id']} desk = get_resource_service('desks').find_one(req=None, **lookup) if not desk: raise SuperdeskApiError.forbiddenError("No privileges to create new take on requested desk.") link['task']['stage'] = desk['working_stage'] if link_id: link = service.find_one(req=None, _id=link_id) linked_item = self.packageService.link_as_next_take(target, link) doc.update(linked_item) build_custom_hateoas(CUSTOM_HATEOAS, doc) return [linked_item['_id']]
def check_all_groups_have_id_set(self, groups): if any(group for group in groups if not group.get(GROUP_ID)): message = 'Group is missing id.' logger.error(message) raise SuperdeskApiError.forbiddenError(message=message)
def server_error_handler(error): """Log server errors.""" return_error = SuperdeskApiError.internalError(error) return client_error_handler(return_error)
def _gen_attachments(report): report_service = get_report_service(report.get('type')) if report_service is None: raise SuperdeskApiError.badRequestError( 'Unknown report type "{}"'.format(report.get('type'))) if report.get('mimetype') in [ MIME_TYPES.PNG, MIME_TYPES.JPEG, MIME_TYPES.GIF, MIME_TYPES.PDF, MIME_TYPES.SVG ]: # This mimetype is handled by highcharts, so generate the highcharts config return_type = 'highcharts_config' elif report.get('mimetype') == MIME_TYPES.CSV: return_type = MIME_TYPES.CSV else: return_type = 'aggregations' generated_report = list( report_service.get(req=None, params=report.get('params') or {}, translations=report.get('translations') or {}, return_type=return_type))[0] if return_type == 'highcharts_config': options = generated_report.get('highcharts') elif return_type == MIME_TYPES.CSV: options = [generated_report.get('csv')] else: options = [] attachments = [] i = 1 for option in options: mime_type = report.get('mimetype') report_width = report.get('width') or 800 if isinstance(option, dict) and option.get('type') == 'table': mime_type = MIME_TYPES.HTML try: attachments.append({ 'file': generate_report(option, mimetype=mime_type, base64=True, width=report_width), 'mimetype': mime_type, 'filename': 'chart_{}.{}'.format(i, get_mime_type_extension(mime_type)), 'width': report_width }) i += 1 except Exception as e: logger.error('Failed to generate chart.') logger.exception(e) return attachments
def _publish_associated_items(self, original, updates=None): """If there any updates to associated item and if setting:PUBLISH_ASSOCIATED_ITEMS is true then publish the associated item """ if updates is None: updates = {} if not publish_services.get(self.publish_type): # publish type not supported return publish_service = get_resource_service( publish_services.get(self.publish_type)) if not updates.get(ASSOCIATIONS) and not original.get(ASSOCIATIONS): # there's nothing to update return associations = original.get(ASSOCIATIONS) or {} if updates and updates.get(ASSOCIATIONS): associations.update(updates[ASSOCIATIONS]) archive_service = get_resource_service("archive") for associations_key, associated_item in associations.items(): if associated_item is None: continue if type(associated_item) == dict and associated_item.get( config.ID_FIELD): if not config.PUBLISH_ASSOCIATED_ITEMS or not publish_service: if original.get(ASSOCIATIONS, {}).get(associations_key): # Not allowed to publish original[ASSOCIATIONS][associations_key][ "state"] = self.published_state original[ASSOCIATIONS][associations_key][ "operation"] = self.publish_type continue # if item is not fetchable, only mark it as published if not associated_item.get("_fetchable", True): associated_item["state"] = self.published_state associated_item["operation"] = self.publish_type updates[ASSOCIATIONS] = updates.get(ASSOCIATIONS, {}) updates[ASSOCIATIONS][associations_key] = associated_item continue if associated_item.get("state") == CONTENT_STATE.UNPUBLISHED: # get the original associated item from archive orig_associated_item = archive_service.find_one( req=None, _id=associated_item[config.ID_FIELD]) orig_associated_item["state"] = updates.get( "state", self.published_state) orig_associated_item["operation"] = self.publish_type # if main item is scheduled we must also schedule associations self._inherit_publish_schedule(original, updates, orig_associated_item) get_resource_service("archive_publish").patch( id=orig_associated_item.pop(config.ID_FIELD), updates=orig_associated_item) continue if associated_item.get("state") not in PUBLISH_STATES: # This associated item has not been published before remove_unwanted(associated_item) # get the original associated item from archive orig_associated_item = archive_service.find_one( req=None, _id=associated_item[config.ID_FIELD]) # check if the original associated item exists in archive if not orig_associated_item: raise SuperdeskApiError.badRequestError( _('Associated item "{}" does not exist in the system' .format(associations_key))) if orig_associated_item.get("state") in PUBLISH_STATES: # item was published already original[ASSOCIATIONS][associations_key].update({ "state": orig_associated_item["state"], "operation": orig_associated_item.get("operation", self.publish_type), }) continue # if the original associated item stage is present, it should be updated in the association item. if orig_associated_item.get( "task", {}).get("stage") and associated_item.get("task"): associated_item["task"].update({ "stage": orig_associated_item.get("task", {}).get("stage") }) # update _updated, otherwise it's stored as string. # fixes SDESK-5043 associated_item["_updated"] = utcnow() # if main item is scheduled we must also schedule associations self._inherit_publish_schedule(original, updates, associated_item) get_resource_service("archive_publish").patch( id=associated_item.pop(config.ID_FIELD), updates=associated_item) associated_item["state"] = updates.get( "state", self.published_state) associated_item["operation"] = self.publish_type updates[ASSOCIATIONS] = updates.get(ASSOCIATIONS, {}) updates[ASSOCIATIONS][associations_key] = associated_item elif associated_item.get("state") != self.published_state: # Check if there are updates to associated item association_updates = updates.get(ASSOCIATIONS, {}).get(associations_key) # if main item is scheduled we must also schedule associations self._inherit_publish_schedule(original, updates, associated_item) if not association_updates: # there is no update for this item associated_item.get("task", {}).pop("stage", None) remove_unwanted(associated_item) publish_service.patch(id=associated_item.pop( config.ID_FIELD), updates=associated_item) continue if association_updates.get("state") not in PUBLISH_STATES: # There's an update to the published associated item remove_unwanted(association_updates) publish_service.patch(id=association_updates.pop( config.ID_FIELD), updates=association_updates) self._refresh_associated_items(original)
def _publish_package_items(self, package, updates): """Publishes all items of a package recursively then publishes the package itself. :param package: package to publish :param updates: payload """ items = self.package_service.get_residrefs(package) if len(items) == 0 and self.publish_type == ITEM_PUBLISH: raise SuperdeskApiError.badRequestError( _("Empty package cannot be published!")) added_items = [] removed_items = [] if self.publish_type in [ITEM_CORRECT, ITEM_KILL]: removed_items, added_items = self._get_changed_items( items, updates) # we raise error if correction is done on a empty package. Kill is fine. if len(removed_items) == len(items) and len( added_items) == 0 and self.publish_type == ITEM_CORRECT: raise SuperdeskApiError.badRequestError( _("Corrected package cannot be empty!")) items.extend(added_items) if not updates.get("groups") and package.get( "groups"): # this saves some typing in tests updates["groups"] = package.get("groups") if items: archive_publish = get_resource_service("archive_publish") for guid in items: package_item = super().find_one(req=None, _id=guid) if not package_item: raise SuperdeskApiError.badRequestError( _("Package item with id: {guid} does not exist."). format(guid=guid)) if package_item[ ITEM_STATE] not in PUBLISH_STATES: # if the item is not published then publish it if package_item[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: # if the item is a package do recursion to publish sub_updates = { i: updates[i] for i in ["state", "operation"] if i in updates } sub_updates["groups"] = list(package_item["groups"]) self._publish_package_items(package_item, sub_updates) self._update_archive(original=package_item, updates=sub_updates, should_insert_into_versions=False) else: # publish the item package_item[PUBLISHED_IN_PACKAGE] = package[ config.ID_FIELD] archive_publish.patch(id=package_item.pop( config.ID_FIELD), updates=package_item) insert_into_versions(id_=guid) elif guid in added_items: linked_in_packages = package_item.get( LINKED_IN_PACKAGES, []) if package[config.ID_FIELD] not in ( lp.get(PACKAGE) for lp in linked_in_packages): linked_in_packages.append( {PACKAGE: package[config.ID_FIELD]}) super().system_update( guid, { LINKED_IN_PACKAGES: linked_in_packages, PUBLISHED_IN_PACKAGE: package[config.ID_FIELD] }, package_item, ) elif guid in removed_items: # remove the package information from the package item. linked_in_packages = [ linked for linked in package_item.get(LINKED_IN_PACKAGES, []) if linked.get(PACKAGE) != package.get(config.ID_FIELD) ] super().system_update( guid, {LINKED_IN_PACKAGES: linked_in_packages}, package_item) package_item = super().find_one(req=None, _id=guid) self.package_service.update_field_in_package( updates, package_item[config.ID_FIELD], config.VERSION, package_item[config.VERSION]) if package_item.get(ASSOCIATIONS): self.package_service.update_field_in_package( updates, package_item[config.ID_FIELD], ASSOCIATIONS, package_item[ASSOCIATIONS]) updated = deepcopy(package) updated.update(updates) self.update_published_collection( published_item_id=package[config.ID_FIELD], updated=updated)
def raise_if_not_marked_for_publication(self, original): if original.get("flags", {}).get("marked_for_not_publication", False): raise SuperdeskApiError.badRequestError( _("Cannot publish an item which is marked as Not for Publication" ))
def _validate(self, original, updates): self.raise_if_invalid_state_transition(original) self._raise_if_unpublished_related_items(original) updated = original.copy() updated.update(updates) self.raise_if_not_marked_for_publication(updated) if self.publish_type == "publish": # The publish schedule has not been cleared if (updates.get(PUBLISH_SCHEDULE) or updated.get(SCHEDULE_SETTINGS, {}).get( "utc_{}".format(PUBLISH_SCHEDULE)) or not original.get(PUBLISH_SCHEDULE)): update_schedule_settings(updated, PUBLISH_SCHEDULE, updated.get(PUBLISH_SCHEDULE)) validate_schedule( updated.get(SCHEDULE_SETTINGS, {}).get("utc_{}".format(PUBLISH_SCHEDULE))) if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get( EMBARGO): update_schedule_settings(updated, EMBARGO, updated.get(EMBARGO)) get_resource_service(ARCHIVE).validate_embargo(updated) if self.publish_type in [ITEM_CORRECT, ITEM_KILL]: if updates.get(EMBARGO) and not original.get(EMBARGO): raise SuperdeskApiError.badRequestError( _("Embargo can't be set after publishing")) if self.publish_type == ITEM_KILL: if updates.get("dateline"): raise SuperdeskApiError.badRequestError( _("Dateline can't be modified on kill or take down")) if self.publish_type == ITEM_PUBLISH and updated.get("rewritten_by"): rewritten_by = get_resource_service(ARCHIVE).find_one( req=None, _id=updated.get("rewritten_by")) if rewritten_by and rewritten_by.get(ITEM_STATE) in PUBLISH_STATES: raise SuperdeskApiError.badRequestError( _("Cannot publish the story after Update is published.")) if self.publish_type == ITEM_PUBLISH and updated.get("rewrite_of"): rewrite_of = get_resource_service(ARCHIVE).find_one( req=None, _id=updated.get("rewrite_of")) if rewrite_of and rewrite_of.get(ITEM_STATE) not in PUBLISH_STATES: raise SuperdeskApiError.badRequestError( _("Can't publish update until original story is published." )) publish_type = "auto_publish" if updates.get( "auto_publish") else self.publish_type validate_item = { "act": publish_type, "type": original["type"], "validate": updated } validation_errors = get_resource_service("validate").post( [validate_item], fields=True) for errors, fields in validation_errors: if errors: raise SuperdeskValidationError(errors, fields) validation_errors = [] self._validate_associated_items(original, updates, validation_errors) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: self._validate_package(original, updates, validation_errors) if len(validation_errors) > 0: raise ValidationError(validation_errors)
def update(self, id, updates, original): """ Handles workflow of each Publish, Corrected, Killed and TakeDown. """ try: user = get_user() auto_publish = updates.get("auto_publish", False) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: self._publish_package_items(original, updates) self._update_archive(original, updates, should_insert_into_versions=auto_publish) else: self._publish_associated_items(original, updates) updated = deepcopy(original) updated.update(deepcopy(updates)) if updates.get(ASSOCIATIONS): self._refresh_associated_items( updated, skip_related=True) # updates got lost with update if updated.get(ASSOCIATIONS): self._fix_related_references(updated, updates) signals.item_publish.send(self, item=updated) self._update_archive(original, updates, should_insert_into_versions=auto_publish) self.update_published_collection( published_item_id=original[config.ID_FIELD], updated=updated) from apps.publish.enqueue import enqueue_published enqueue_published.apply_async() push_notification( "item:publish", item=str(id), unique_name=original["unique_name"], desk=str(original.get("task", {}).get("desk", "")), user=str(user.get(config.ID_FIELD, "")), ) if updates.get("previous_marked_user" ) and not updates.get("marked_for_user"): # send notification so that marked for me list can be updated get_resource_service("archive").handle_mark_user_notifications( updates, original, False) except SuperdeskApiError: raise except KeyError as e: logger.exception(e) raise SuperdeskApiError.badRequestError(message=_( "Key is missing on article to be published: {exception}"). format(exception=str(e))) except Exception as e: logger.exception(e) raise SuperdeskApiError.internalError( message=_("Failed to publish the item: {id}").format( id=str(id)), exception=e)
def raise_if_not_marked_for_publication(self, original): if original.get('marked_for_not_publication', False): raise SuperdeskApiError.badRequestError( 'Cannot publish an item which is marked as Not for Publication' )
def update(self, id, updates): if is_sensitive_update( updates) and not current_user_has_privilege('users'): raise SuperdeskApiError.forbiddenError() return super().update(id, updates)
def update(self, id, updates, original): """ Handles workflow of each Publish, Corrected and Killed. """ try: user = get_user() last_updated = updates.get(config.LAST_UPDATED, utcnow()) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: self._publish_package_items(original, updates) queued_digital = False package = None if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE: # if target_for is set the we don't to digital client. if not updates.get('targeted_for', original.get('targeted_for')): # check if item is in a digital package package = self.takes_package_service.get_take_package( original) if package: queued_digital = self._publish_takes_package( package, updates, original, last_updated) else: ''' If type of the item is text or preformatted then item need to be sent to digital subscribers. So, package the item as a take. ''' updated = copy(original) updated.update(updates) if original[ITEM_TYPE] in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED] and \ self.sending_to_digital_subscribers(updated): # create a takes package package_id = self.takes_package_service.package_story_as_a_take( updated, {}, None) updates[LINKED_IN_PACKAGES] = updated[ LINKED_IN_PACKAGES] package = get_resource_service(ARCHIVE).find_one( req=None, _id=package_id) queued_digital = self._publish_takes_package( package, updates, original, last_updated) # queue only text items queued_wire = \ self.publish(doc=original, updates=updates, target_media_type=WIRE if package else None) queued = queued_digital or queued_wire if not queued: logger.exception( 'Nothing is saved to publish queue for story: {} for action: {}' .format(original[config.ID_FIELD], self.publish_type)) self._update_archive(original=original, updates=updates, should_insert_into_versions=False) push_notification('item:publish', item=str(id), unique_name=original['unique_name'], desk=str( original.get('task', {}).get('desk', '')), user=str(user.get(config.ID_FIELD, ''))) except SuperdeskApiError as e: raise e except KeyError as e: raise SuperdeskApiError.badRequestError( message="Key is missing on article to be published: {}".format( str(e))) except Exception as e: logger.exception( "Something bad happened while publishing %s".format(id)) raise SuperdeskApiError.internalError( message="Failed to publish the item: {}".format(str(e)))
def server_error_handler(error): """Log server errors.""" app.sentry.captureException() logger.exception(error) return_error = SuperdeskApiError.internalError() return client_error_handler(return_error)
def on_delete(self, doc): self._validate_privileges(doc, action="delete") if doc.get("template_type") == TemplateType.KILL.value: raise SuperdeskApiError.badRequestError(_("Kill templates can not be deleted."))
def _publish_package_items(self, package, updates): """ Publishes all items of a package recursively then publishes the package itself :param package: package to publish :param updates: payload """ items = self.package_service.get_residrefs(package) if len(items) == 0 and self.publish_type == ITEM_PUBLISH: raise SuperdeskApiError.badRequestError( "Empty package cannot be published!") removed_items = [] if self.publish_type == ITEM_CORRECT: removed_items, added_items = self._get_changed_items( items, updates) if len(removed_items) == len(items) and len(added_items) == 0: raise SuperdeskApiError.badRequestError( "Corrected package cannot be empty!") items.extend(added_items) subscriber_items = {} if items: archive_publish = get_resource_service('archive_publish') for guid in items: package_item = super().find_one(req=None, _id=guid) if not package_item: raise SuperdeskApiError.badRequestError( "Package item with id: {} does not exist.".format( guid)) if package_item[ITEM_STATE] not in PUBLISH_STATES: # if the item is not published then publish it if package_item[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: # if the item is a package do recursion to publish sub_updates = { i: updates[i] for i in ['state', 'operation'] if i in updates } sub_updates['groups'] = list(package_item['groups']) self._publish_package_items(package_item, sub_updates) self._update_archive(original=package_item, updates=sub_updates, should_insert_into_versions=False) self.update_published_collection( published_item_id=package_item[config.ID_FIELD]) else: # publish the item archive_publish.patch(id=package_item.pop( config.ID_FIELD), updates=package_item) insert_into_versions(id_=guid) package_item = super().find_one(req=None, _id=guid) subscribers = self._get_subscribers_for_package_item( package_item) self.package_service.update_field_in_package( updates, package_item[config.ID_FIELD], config.VERSION, package_item[config.VERSION]) if package_item[config.ID_FIELD] in removed_items: digital_item_id = None else: digital_item_id = self._get_digital_id_for_package_item( package_item) self._extend_subscriber_items(subscriber_items, subscribers, package_item, digital_item_id) self.publish_package(package, updates, target_subscribers=subscriber_items) return subscribers
def update(self, id, updates, original): # if the completion is being done by an external application then ensure that it is not locked if 'proxy_user' in updates: if original.get('lock_user'): raise SuperdeskApiError.forbiddenError('Assignment is locked') user = updates.pop('proxy_user', None) proxy_user = True else: user = get_user(required=True).get(config.ID_FIELD, '') proxy_user = False session = get_auth().get(config.ID_FIELD, '') original_assigned_to = deepcopy(original).get('assigned_to') if not updates.get('assigned_to'): updates['assigned_to'] = {} original_assigned_to.update(updates['assigned_to']) updates['assigned_to'] = original_assigned_to assignments_service = get_resource_service('assignments') # If we are confirming availability, save the revert state for revert action text_assignment = assignments_service.is_text_assignment(original) if not text_assignment: updates['assigned_to']['revert_state'] = updates['assigned_to'][ 'state'] updates['assigned_to']['state'] = get_next_assignment_status( updates, ASSIGNMENT_WORKFLOW_STATE.COMPLETED) remove_lock_information(updates) item = self.backend.update(self.datasource, id, updates, original) # publish the planning item assignments_service.publish_planning(original['planning_item']) # Save history if user initiates complete if text_assignment: get_resource_service('assignments_history').on_item_complete( updates, original) else: if proxy_user: updates['proxy_user'] = user get_resource_service( 'assignments_history').on_item_confirm_availability( updates, original) push_notification('assignments:completed', item=str(original[config.ID_FIELD]), planning=original.get('planning_item'), assigned_user=(original.get('assigned_to') or {}).get('user'), assigned_desk=(original.get('assigned_to') or {}).get('desk'), assignment_state=ASSIGNMENT_WORKFLOW_STATE.COMPLETED, user=str(user), session=str(session), coverage=original.get('coverage_item')) # Send notification that the work has been completed # Determine the display name of the assignee assigned_to_user = get_resource_service('users').find_one(req=None, _id=user) assignee = assigned_to_user.get( 'display_name') if assigned_to_user else 'Unknown' target_user = original.get('assigned_to', {}).get('assignor_user') if target_user is None: target_user = original.get('assigned_to', {}).get('assignor_desk') PlanningNotifications().notify_assignment( target_user=target_user, message='assignment_fulfilled_msg', assignee=assignee, coverage_type=get_coverage_type_name( original.get('planning', {}).get('g2_content_type', '')), slugline=original.get('planning', {}).get('slugline'), omit_user=True, assignment_id=original[config.ID_FIELD], is_link=True, no_email=True) return item
def _validate_take(self, original): takes_service = TakesPackageService() if not takes_service.is_last_takes_package_item(original): raise SuperdeskApiError.badRequestError( message="Only last take of the package can be spiked.")
def _validate_poi_properties(self, doc): raise SuperdeskApiError.badRequestError( message="Request is not valid", payload={"cpnat_type": "concept type 'cpnat:poi' is not supported"} )
def _validate_rewrite(self, original, update): """Validates the article to be rewritten. :param original: article to be rewritten :param update: article as the rewrite :raises: SuperdeskApiError """ if not original: raise SuperdeskApiError.notFoundError( message='Cannot find the article') if original.get(EMBARGO): raise SuperdeskApiError.badRequestError( "Rewrite of an Item having embargo isn't possible") if not original.get('event_id'): raise SuperdeskApiError.notFoundError( message='Event id does not exist') if original.get('rewritten_by'): raise SuperdeskApiError.badRequestError( message='Article has been rewritten before !') if not is_workflow_state_transition_valid('rewrite', original[ITEM_STATE]): raise InvalidStateTransitionError() if original.get('rewrite_of') and not (original.get(ITEM_STATE) in PUBLISH_STATES): raise SuperdeskApiError.badRequestError( message= "Rewrite is not published. Cannot rewrite the story again.") if update: # in case of associate as update if update.get('rewrite_of'): raise SuperdeskApiError.badRequestError( "Rewrite story has been used as update before !") if update.get(ITEM_STATE) in [ CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.SPIKED ]: raise InvalidStateTransitionError() if update.get(ITEM_TYPE) not in [ CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED ]: raise SuperdeskApiError.badRequestError( "Rewrite story can only be text or pre-formatted !") if update.get('genre') and \ any(genre.get('value', '').lower() == BROADCAST_GENRE.lower() for genre in update.get('genre')): raise SuperdeskApiError.badRequestError( "Broadcast cannot be a update story !") if original.get('profile') and original.get( 'profile') != update.get('profile'): raise SuperdeskApiError.badRequestError( "Rewrite item content profile does " "not match with Original item.")
def _validate_group_id(self, doc): if "group_id" not in doc: raise SuperdeskApiError.badRequestError( message="Request is not valid", payload={"group_id": "This field is required"} )
def _validate(self, original, updates): self.raise_if_not_marked_for_publication(original) self.raise_if_invalid_state_transition(original) updated = original.copy() updated.update(updates) takes_package = self.takes_package_service.get_take_package(original) if self.publish_type == 'publish': # validate if take can be published if takes_package and not self.takes_package_service.can_publish_take( takes_package, updates.get(SEQUENCE, original.get(SEQUENCE, 1))): raise PublishQueueError.previous_take_not_published_error( Exception("Previous takes are not published.")) update_schedule_settings(updated, PUBLISH_SCHEDULE, updated.get(PUBLISH_SCHEDULE)) validate_schedule( updated.get(SCHEDULE_SETTINGS, {}).get('utc_{}'.format(PUBLISH_SCHEDULE)), takes_package.get(SEQUENCE, 1) if takes_package else 1) if original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE and updates.get( EMBARGO): update_schedule_settings(updated, EMBARGO, updated.get(EMBARGO)) get_resource_service(ARCHIVE).validate_embargo(updated) if self.publish_type in [ITEM_CORRECT, ITEM_KILL]: if updates.get(EMBARGO) and not original.get(EMBARGO): raise SuperdeskApiError.badRequestError( "Embargo can't be set after publishing") if self.publish_type in [ITEM_CORRECT, ITEM_KILL]: if updates.get('dateline'): raise SuperdeskApiError.badRequestError( "Dateline can't be modified after publishing") if self.publish_type == ITEM_PUBLISH and updated.get('rewritten_by'): # if update is published then user cannot publish the takes rewritten_by = get_resource_service(ARCHIVE).find_one( req=None, _id=updated.get('rewritten_by')) if rewritten_by and rewritten_by.get(ITEM_STATE) in PUBLISH_STATES: raise SuperdeskApiError.badRequestError( "Cannot publish the story after Update is published.!") publish_type = 'auto_publish' if updates.get( 'auto_publish') else self.publish_type validate_item = { 'act': publish_type, 'type': original['type'], 'validate': updated } validation_errors = get_resource_service('validate').post( [validate_item]) if validation_errors[0]: raise ValidationError(validation_errors) validation_errors = [] self._validate_associated_items(original, takes_package, validation_errors) if original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: self._validate_package(original, updates, validation_errors) if len(validation_errors) > 0: raise ValidationError(validation_errors)
def send_to(doc, update=None, desk_id=None, stage_id=None, user_id=None, default_stage='incoming_stage'): """Send item to given desk and stage. Applies the outgoing and incoming macros of current and destination stages :param doc: original document to be sent :param update: updates for the document :param desk: id of desk where item should be sent :param stage: optional stage within the desk :param default_stage: if no stage_id is passed then it determines the stage in that desk the doc is assigned, either the the incomming stage or the working stage. """ original_task = doc.setdefault('task', {}) current_stage = None if original_task.get('stage'): current_stage = get_resource_service('stages').find_one( req=None, _id=original_task.get('stage')) desk = destination_stage = None task = { 'desk': desk_id, 'stage': stage_id, 'user': original_task.get('user') if user_id is None else user_id } if current_stage: apply_stage_rule(doc, update, current_stage, MACRO_OUTGOING) if desk_id: desk = superdesk.get_resource_service('desks').find_one(req=None, _id=desk_id) if not desk: raise SuperdeskApiError.notFoundError( 'Invalid desk identifier %s' % desk_id) task['desk'] = desk_id if not stage_id: task['stage'] = desk.get(default_stage) destination_stage = get_resource_service('stages').find_one( req=None, _id=desk.get(default_stage)) if stage_id: destination_stage = get_resource_service('stages').find_one( req=None, _id=stage_id) if not destination_stage: raise SuperdeskApiError.notFoundError( 'Invalid stage identifier %s' % stage_id) task['desk'] = destination_stage['desk'] task['stage'] = stage_id if destination_stage: apply_stage_rule(doc, update, destination_stage, MACRO_INCOMING, desk=desk) if destination_stage.get('task_status'): task['status'] = destination_stage['task_status'] if update: update.setdefault('task', {}) update['task'].update(task) update['expiry'] = get_item_expiry(desk=desk, stage=destination_stage) else: doc['task'].update(task) doc['expiry'] = get_item_expiry(desk=desk, stage=destination_stage) superdesk.get_resource_service('desks').apply_desk_metadata(doc, doc)
def _validate_abstract_properties(self, doc): if "properties" in doc: raise SuperdeskApiError.badRequestError( message="Request is not valid", payload={"properties": "field is not supported when 'cpnat_type' is 'cpnat:abstract'"}, )
def on_create(self, docs): for doc in docs: if not doc.get("desk") and not app.config[ "WORKFLOW_ALLOW_COPY_TO_PERSONAL"]: raise SuperdeskApiError.forbiddenError( message=_("Duplicate to Personal space is not allowed."))
def link_as_next_take(self, target, link): """Makes next take to target from given link. Check if target has an associated takes package. If not, create it and add target as a take. Check if the target is the last take, if not, resolve the last take. Copy metadata from the target and add it as the next take and return the update link item :return: the updated link item """ takes_package_id = self.get_take_package_id(target) archive_service = get_resource_service(ARCHIVE) takes_package = archive_service.find_one( req=None, _id=takes_package_id) if takes_package_id else {} if not takes_package: # setting the sequence to 1 for target. updates = {SEQUENCE: 1} if target[ITEM_STATE] in [ CONTENT_STATE.SPIKED, CONTENT_STATE.KILLED, CONTENT_STATE.SCHEDULED, CONTENT_STATE.INGESTED ]: raise SuperdeskApiError.forbiddenError( "Item isn't in a valid state for creating takes.") else: archive_service.system_update(target.get(config.ID_FIELD), updates, target) link_updates = {} if not link.get(config.ID_FIELD): # A new story to be linked self.__copy_metadata__(target, link, takes_package, set_state=True) archive_service.post([link]) else: self.__copy_metadata__(target, link_updates, takes_package, set_state=False) link.update(link_updates) if not takes_package_id: takes_package_id = self.package_story_as_a_take( target, takes_package, link) else: self.__link_items__(takes_package, target, link) del takes_package[config.ID_FIELD] takes_package.pop('unique_id', None) takes_package.pop('unique_name', None) takes_package.pop(PUBLISH_SCHEDULE, None) takes_package.pop(SCHEDULE_SETTINGS, None) resolve_document_version(takes_package, ARCHIVE, 'PATCH', takes_package) archive_service.patch(takes_package_id, takes_package) get_resource_service( 'archive_broadcast').on_broadcast_master_updated( ITEM_CREATE, target, takes_package_id=takes_package_id) if link.get(SEQUENCE): link_updates.update({SEQUENCE: link[SEQUENCE]}) archive_service.system_update(link[config.ID_FIELD], link_updates, link) insert_into_versions(id_=takes_package_id) return link
def embed(blog_id, theme=None, output=None, api_host=None): from liveblog.themes import UnknownTheme # adding import here to avoid circular references from liveblog.advertisements.utils import get_advertisements_list from liveblog.advertisements.amp import AdsSettings, inject_advertisements api_host = api_host or request.url_root blog = get_resource_service('client_blogs').find_one(req=None, _id=blog_id) if not blog: return 'blog not found', 404 # if the `output` is the `_id` get the data. if output: if isinstance(output, str): output = get_resource_service('outputs').find_one(req=None, _id=output) if not output: return 'output not found', 404 else: collection = get_resource_service('collections').find_one( req=None, _id=output.get('collection')) output['collection'] = collection # Retrieve picture url from relationship. if blog.get('picture', None): blog['picture'] = get_resource_service('archive').find_one( req=None, _id=blog['picture']) # Retrieve the wanted theme and add it to blog['theme'] if is not the registered one. try: theme_name = request.args.get('theme', theme) except RuntimeError: # This method can be called outside from a request context. theme_name = theme blog_preferences = blog.get('blog_preferences') if blog_preferences is None: return 'blog preferences are not available', 404 blog_theme_name = blog_preferences.get('theme') if not theme_name: # No theme specified. Fallback to theme in blog_preferences. theme_name = blog_theme_name theme_service = get_resource_service('themes') theme = theme_service.find_one(req=None, name=theme_name) if theme is None: raise SuperdeskApiError.badRequestError( message= 'You will be able to access the embed after you register the themes' ) try: assets, template_content = collect_theme_assets(theme, parents=[]) except UnknownTheme as e: return str(e), 500 if not template_content: logger.warning('Template file not found for theme "%s". Theme: %s' % (theme_name, theme)) return 'Template file not found', 500 # Compute the assets root. if theme.get('public_url', False): assets_root = theme.get('public_url') else: assets_root = theme_service.get_theme_assets_url(theme_name) theme_settings = theme_service.get_default_settings(theme) i18n = theme.get('i18n', {}) # the blog level theme overrides the one in theme level # this way we allow user to enable commenting only for certain blog(s) # or the other way around unset = 'unset' blog_users_can_comment = blog.get('users_can_comment', unset) if blog_users_can_comment != unset: theme_settings[ 'canComment'] = True if blog_users_can_comment == 'enabled' else False # also when blog has been archived, we should disable commenting if blog.get('blog_status') == 'closed': theme_settings['canComment'] = False # Check if theme is SEO and/or AMP compatible. is_amp = theme.get('ampTheme', False) is_seo = theme.get('seoTheme', False) if is_seo: # Fetch initial blog posts for SEO theme blog_instance = Blog(blog) page_limit = theme_settings.get('postsPerPage', 10) sticky_limit = theme_settings.get('stickyPostsPerPage', 10) ordering = theme_settings.get('postOrder', blog_instance.default_ordering) posts = blog_instance.posts(wrap=True, limit=page_limit, ordering=ordering, deleted=is_amp) sticky_posts = blog_instance.posts(wrap=True, limit=sticky_limit, sticky=True, ordering='newest_first', deleted=is_amp) api_response = {'posts': posts, 'stickyPosts': sticky_posts} embed_env = theme_service.get_theme_template_env( theme, loader=CompiledThemeTemplateLoader) embed_template = embed_env.from_string(template_content) template_content = embed_template.render( blog=blog, output=output, options=theme, json_options=bson_dumps(theme), settings=theme_settings, api_response=api_response, assets_root=assets_root, i18n=i18n, api_host=api_host) asyncTheme = theme.get('asyncTheme', False) api_host = api_host.replace('//', app.config.get( 'EMBED_PROTOCOL')) if api_host.startswith('//') else api_host api_host = api_host.replace('http://', app.config.get('EMBED_PROTOCOL')) scope = { 'blog': blog, 'settings': theme_settings, 'assets': assets, 'api_host': api_host, 'output': output, 'template': template_content, 'debug': app.config.get('LIVEBLOG_DEBUG'), 'assets_root': assets_root, 'async': asyncTheme, 'i18n': i18n, 'hook_urls': bool(TRIGGER_HOOK_URLS) } if is_amp: # Add AMP compatible css to template context styles = theme.get('files', {}).get('styles', {}).values() if len(styles): scope['amp_style'] = next(iter(styles)) embed_template = 'embed_amp.html' if is_amp else 'embed.html' blog_archived = blog['blog_status'] == 'closed' solo_subscription = 'solo' in SUBSCRIPTION_LEVEL if blog_archived and solo_subscription: scope['template'] = render_template('blog-unavailable.html', **scope) scope['assets']['scripts'] = [] response_content = render_template(embed_template, **scope) # TODO: move to somewhere else to simplify this method if is_amp and output and theme.get('supportAdsInjection', False): parsed_content = BeautifulSoup(response_content, 'lxml') ads = get_advertisements_list(output) frequency = output['settings'].get('frequency', 4) order = output['settings'].get('order', 1) ad_template = get_theme_template(theme, 'template-ad-entry.html') ads_settings = AdsSettings(frequency=frequency, order=order, template=ad_template, tombstone_class='hide-item') # let's remove hidden elements initially because they're just garbage # complex validation because `embed` it's also called from outside without request context if not request or request and not request.args.get( 'amp_latest_update_time', False): hidden_items = parsed_content.find_all( 'article', class_=ads_settings.tombstone_class) for tag in hidden_items: tag.decompose() styles_tmpl = get_theme_template(theme, 'template-ad-styles.html') amp_style = BeautifulSoup(styles_tmpl.render(frequency=frequency), 'html.parser') style_tag = parsed_content.find('style', attrs={'amp-custom': True}) if style_tag: style_tag.append(amp_style.find('style').contents[0]) inject_advertisements(parsed_content, ads_settings, ads, theme) response_content = parsed_content.prettify() return response_content
def authenticate(self): """Returns 401 response with CORS headers.""" raise SuperdeskApiError.unauthorizedError()
def on_delete(self, doc): if doc.get('is_used'): raise SuperdeskApiError(status_code=202, payload={"is_used": True}) remove_profile_from_templates(doc) remove_profile_from_desks(doc)
def generate_text_item(items, template_name, resource_type): template = get_resource_service( 'planning_export_templates').get_export_template( template_name, resource_type) archive_service = get_resource_service('archive') if not template: raise SuperdeskApiError.badRequestError('Invalid template selected') for item in items: # Create list of assignee with preference to coverage_provider, if not, assigned user item['published_archive_items'] = [] item['assignees'] = [] item['text_assignees'] = [] item['contacts'] = [] text_users = [] text_desks = [] users = [] desks = [] def enhance_coverage(planning, item, users): def _enhance_assigned_provider(coverage, item, assigned_to): """ Enhances the text_assignees with the contact details if it's assigned to an external provider """ if assigned_to.get('contact'): provider_contact = get_resource_service( 'contacts').find_one(req=None, _id=assigned_to.get('contact')) assignee_str = "{0} - {1} {2} ".format( assigned_to['coverage_provider']['name'], provider_contact.get('first_name', ''), provider_contact.get('last_name', '')) phone_number = [ n.get('number') for n in provider_contact.get('mobile', []) + provider_contact.get('contact_phone', []) ] if len(phone_number): assignee_str += ' ({0})'.format(phone_number[0]) # If there is an internal note on the coverage that is different to the internal note # on the planning if (coverage.get('planning', {})).get('internal_note', '') \ and item.get('internal_note', '') !=\ (coverage.get('planning', {})).get('internal_note', ''): assignee_str += ' ({0})'.format( (coverage.get('planning', {})).get('internal_note', '')) item['text_assignees'].append(assignee_str) else: item['text_assignees'].append( assigned_to['coverage_provider']['name']) for c in (planning.get('coverages') or []): is_text = c.get('planning', {}).get('g2_content_type', '') == 'text' completed = ( c.get('assigned_to') or {}).get('state') == ASSIGNMENT_WORKFLOW_STATE.COMPLETED assigned_to = c.get('assigned_to') or {} user = None desk = None if assigned_to.get('coverage_provider'): item['assignees'].append( assigned_to['coverage_provider']['name']) if is_text and not completed: _enhance_assigned_provider(c, item, assigned_to) elif assigned_to.get('user'): user = assigned_to['user'] users.append(user) elif assigned_to.get('desk'): desk = assigned_to.get('desk') desks.append(desk) # Get abstract from related text item if coverage is 'complete' if is_text: if completed: results = list( archive_service.get_from_mongo( req=None, lookup={ 'assignment_id': ObjectId( c['assigned_to']['assignment_id']), 'state': { '$in': ['published', 'corrected'] }, 'pubstatus': 'usable', 'rewrite_of': None })) if len(results) > 0: item['published_archive_items'].append({ 'archive_text': get_first_paragraph_text( results[0].get('abstract')) or '', 'archive_slugline': results[0].get('slugline') or '' }) elif c.get('news_coverage_status', {}).get('qcode') == 'ncostat:int': if user: text_users.append({ 'user': user, 'note': (c.get('planning', {})).get( 'internal_note', '') if (c.get('planning', {})).get( 'internal_note', '') != item.get('internal_note') else None }) else: text_desks.append(desk) item['contacts'] = get_contacts_from_item(item) if resource_type == 'planning': enhance_coverage(item, item, users) else: for p in (item.get('plannings') or []): enhance_coverage(p, item, users) users = get_resource_service('users').find( where={'_id': { '$in': users }}) desks = get_resource_service('desks').find( where={'_id': { '$in': desks }}) for u in users: name = u.get( 'display_name', "{0} {1}".format(u.get('first_name'), u.get('last_name'))) item['assignees'].append(name) text_user = next( (_i for _i in text_users if _i['user'] == str(u.get('_id'))) or [], None) if text_user: item['text_assignees'].append( '{0} ({1})'.format(name, text_user.get('note')) if text_user.get('note') else '{0}'.format(name)) for d in desks: item['assignees'].append(d['name']) if str(d['_id']) in text_desks: item['text_assignees'].append(d['name']) set_item_place(item) item['description_text'] = item.get('description_text') or ( item.get('event') or {}).get('definition_short') item['slugline'] = item.get('slugline') or (item.get('event') or {}).get('name') # Handle dates and remote time-zones if item.get('dates') or (item.get('event') or {}).get('dates'): dates = item.get('dates') or item.get('event').get('dates') item['schedule'] = utc_to_local(config.DEFAULT_TIMEZONE, dates.get('start')) if get_timezone_offset(config.DEFAULT_TIMEZONE, utcnow()) !=\ get_timezone_offset(dates.get('tz'), utcnow()): item['schedule'] = "{} ({})".format( item['schedule'].strftime('%H%M'), item['schedule'].tzname()) else: item['schedule'] = item['schedule'].strftime('%H%M') agendas = [] if resource_type == 'planning': agendas = group_items_by_agenda(items) inject_internal_converages(items) labels = {} cv = get_resource_service('vocabularies').find_one( req=None, _id='g2_content_type') if cv: labels = {_type['qcode']: _type['name'] for _type in cv['items']} for item in items: item['coverages'] = [ labels.get( coverage.get('planning').get('g2_content_type'), coverage.get('planning').get('g2_content_type')) + (' (cancelled)' if coverage.get('workflow_status', '') == 'cancelled' else '') for coverage in item.get('coverages', []) if (coverage.get('planning') or {}).get('g2_content_type') ] article = {} for key, value in template.items(): if value.endswith(".html"): article[key.replace('_template', '')] = render_template(value, items=items, agendas=agendas) else: article[key] = render_template_string(value, items=items, agendas=agendas) return article
def _validate_updates(self, original, updates, user): """ Validates updates to the article for the below conditions, if any of them then exception is raised: 1. Is article locked by another user other than the user requesting for update 2. Is state of the article is Killed? 3. Is user trying to update the package with Public Service Announcements? 4. Is user authorized to update unique name of the article? 5. Is user trying to update the genre of a broadcast article? 6. Is article being scheduled and is in a package? 7. Is article being scheduled and schedule timestamp is invalid? 8. Does article has valid crops if the article type is a picture? 9. Is article a valid package if the article type is a package? 10. Does article has a valid Embargo? 11. Make sure that there are no duplicate anpa_category codes in the article. 12. Make sure there are no duplicate subjects in the upadte :raises: SuperdeskApiError.forbiddenError() - if state of the article is killed or user is not authorized to update unique name or if article is locked by another user SuperdeskApiError.badRequestError() - if Public Service Announcements are being added to a package or genre is being updated for a broadcast, is invalid for scheduling, the updates contain duplicate anpa_category or subject codes """ updated = original.copy() updated.update(updates) lock_user = original.get('lock_user', None) force_unlock = updates.get('force_unlock', False) str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError( 'The item was locked by another user') if original.get(ITEM_STATE) == CONTENT_STATE.KILLED: raise SuperdeskApiError.forbiddenError( "Item isn't in a valid state to be updated.") if updates.get('body_footer') and is_normal_package(original): raise SuperdeskApiError.badRequestError( "Package doesn't support Public Service Announcements") if 'unique_name' in updates and not is_admin(user) \ and (user['active_privileges'].get('metadata_uniquename', 0) == 0): raise SuperdeskApiError.forbiddenError( "Unauthorized to modify Unique Name") # if broadcast then update to genre is not allowed. if original.get('broadcast') and updates.get('genre') and \ any(genre.get('qcode', '').lower() != BROADCAST_GENRE.lower() for genre in updates.get('genre')): raise SuperdeskApiError.badRequestError( 'Cannot change the genre for broadcast content.') if PUBLISH_SCHEDULE in updates or "schedule_settings" in updates: if is_item_in_package(original): raise SuperdeskApiError.badRequestError( 'This item is in a package and it needs to be removed before the item can be scheduled!' ) package = TakesPackageService().get_take_package(original) or {} if updates.get(PUBLISH_SCHEDULE): validate_schedule(updates[PUBLISH_SCHEDULE], package.get(SEQUENCE, 1)) update_schedule_settings(updated, PUBLISH_SCHEDULE, updated.get(PUBLISH_SCHEDULE)) updates[SCHEDULE_SETTINGS] = updated.get(SCHEDULE_SETTINGS, {}) if original[ITEM_TYPE] == CONTENT_TYPE.PICTURE: CropService().validate_multiple_crops(updates, original) elif original[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE: self.packageService.on_update(updates, original) # Do the validation after Circular Reference check passes in Package Service self.validate_embargo(updated) if EMBARGO in updates or "schedule_settings" in updates: update_schedule_settings(updated, EMBARGO, updated.get(EMBARGO)) updates[SCHEDULE_SETTINGS] = updated.get(SCHEDULE_SETTINGS, {}) # Ensure that there are no duplicate categories in the update category_qcodes = [ q['qcode'] for q in updates.get('anpa_category', []) or [] ] if category_qcodes and len(category_qcodes) != len( set(category_qcodes)): raise SuperdeskApiError.badRequestError( "Duplicate category codes are not allowed") # Ensure that there are no duplicate subjects in the update subject_qcodes = [q['qcode'] for q in updates.get('subject', []) or []] if subject_qcodes and len(subject_qcodes) != len(set(subject_qcodes)): raise SuperdeskApiError.badRequestError( "Duplicate subjects are not allowed")
def on_delete(self, doc): if self.backend.find_one('ingest_providers', req=None, rule_set=doc['_id']): raise SuperdeskApiError.forbiddenError(_("Cannot delete Rule set as it's associated with channel(s)."))