def on_create(self, docs): for doc in docs: doc['template_name'] = doc['template_name'].lower().strip() if doc.get('schedule'): doc['next_run'] = get_next_run(doc.get('schedule')) if doc.get('template_type') == TemplateType.KILL.value and \ any(key for key in doc.keys() if key in KILL_TEMPLATE_NOT_REQUIRED_FIELDS): raise SuperdeskApiError.badRequestError( message="Invalid kill template. " "{} are not allowed".format(', '.join(KILL_TEMPLATE_NOT_REQUIRED_FIELDS))) if get_user(): doc.setdefault('user', get_user()[config.ID_FIELD])
def update(self, id, updates, original): user = get_user(required=True) updates['revert_state'] = original[ITEM_STATE] updates[ITEM_STATE] = WORKFLOW_STATE.SPIKED set_item_expiry(updates) # Mark item as unlocked directly in order to avoid more queries and notifications # coming from lockservice. updates.update({ LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'lock_action': None }) remove_autosave_on_spike(original) item = self.backend.update(self.datasource, id, updates, original) push_notification('planning:spiked', item=str(id), user=str(user.get(config.ID_FIELD)), etag=item['_etag'], revert_state=item['revert_state']) coverages = original.get('coverages') or [] for coverage in coverages: assigned_to = coverage.get('assigned_to') if assigned_to: user = get_user() assignment_service = get_resource_service('assignments') assignment = assignment_service.find_one( req=None, _id=assigned_to.get('assignment_id')) slugline = assignment.get('planning').get('slugline', '') coverage_type = assignment.get('planning').get( 'g2_content_type', '') PlanningNotifications().notify_assignment( coverage_status=coverage.get('workflow_status'), target_user=assignment.get('assigned_to').get('user'), target_desk=assignment.get('assigned_to').get('desk') if not assignment.get('assigned_to').get('user') else None, message='{{actioning_user}} has spiked a {{coverage_type}} ' 'coverage for \"{{slugline}}\"', slugline=slugline, coverage_type=get_coverage_type_name(coverage_type), actioning_user=user.get('display_name', user.get('username', 'Unknown')), omit_user=True) return item
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_copied) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message='Copy is not allowed on items in a desk.') if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): user = get_user() push_notification('item:copy', copied=1, user=str(user.get(config.ID_FIELD, ''))) return guid_of_copied_items
def set_sign_off(updates, original=None, repo_type=ARCHIVE, user=None): """Set sign_off on updates object. Rules: 1. updates['sign_off'] = original['sign_off'] + sign_off of the user performing operation. 2. If the last modified user and the user performing operation are same then sign_off shouldn't change 3. If sign_off is received on updates, this value will be preserved 4. If a users sign_off is already in the list then remove it an append it to the remaining """ if repo_type != ARCHIVE: return user = user if user else get_user() if not user: return if SIGN_OFF in updates: return sign_off = get_sign_off(user) current_sign_off = '' if original is None else (original.get(SIGN_OFF, '') or '') if current_sign_off.endswith(sign_off): return # remove the sign off from the list if already there current_sign_off = current_sign_off.replace(sign_off + '/', '') updated_sign_off = '{}/{}'.format(current_sign_off, sign_off) updates[SIGN_OFF] = updated_sign_off[1:] if updated_sign_off.startswith('/') else updated_sign_off
def _validate(self, doc_in_archive, doc, guid_to_duplicate): """Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is workflow transition valid? 3. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError( 'Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError( 'The item was locked by another user')
def push_content_notification(items, event='content:update'): """Push content:update notification for multiple items. It can be also 2 versions of same item in updated handler so that we sent event with both old and new desk/stage. :param list items: list of items :param event: custom event name """ ids = {} desks = {} stages = {} for item in items: ids[str(item.get('_id', ''))] = 1 task = item.get('task', {}) if task.get('desk'): desks[str(task.get('desk', ''))] = 1 if task.get('stage'): stages[str(task.get('stage', ''))] = 1 user = get_user() push_notification(event, user=str(user.get(config.ID_FIELD, '')), items=ids, desks=desks, stages=stages)
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args["guid"] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError( _("Fail to found item with guid: {guid}").format(guid=guid_of_item_to_be_copied) ) current_desk_of_item = archived_doc.get("task", {}).get("desk") if current_desk_of_item and not app.config["WORKFLOW_ALLOW_COPY_TO_PERSONAL"]: raise SuperdeskApiError.preconditionFailedError(message=_("Copy is not allowed on items in a desk.")) elif current_desk_of_item: archived_doc["task"] = {} archived_doc["original_creator"] = get_user_id() if not is_workflow_state_transition_valid("copy", archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get("notify", True): user = get_user() push_notification("item:copy", copied=1, user=str(user.get(config.ID_FIELD, ""))) return guid_of_copied_items
def push_content_notification(items, event='content:update'): """Push content:update notification for multiple items. It can be also 2 versions of same item in updated handler so that we sent event with both old and new desk/stage. :param list items: list of items :param event: custom event name """ ids = {} desks = {} stages = {} for item in items: ids[str(item.get('_id', ''))] = 1 task = item.get('task', {}) if task.get('desk'): desks[str(task.get('desk', ''))] = 1 if task.get('stage'): stages[str(task.get('stage', ''))] = 1 user = get_user() push_notification( event, user=str(user.get(config.ID_FIELD, '')), items=ids, desks=desks, stages=stages )
def _validate(self, doc_in_archive, doc, guid_to_duplicate): """Validates if the given archived_doc is still eligible to be duplicated. Rules: 1. Is the item requested found in archive collection? 2. Is workflow transition valid? 3. Is item locked by another user? :param doc_in_archive: object representing the doc in archive collection :type doc_in_archive: dict :param doc: object received as part of request :type doc: dict :param guid_to_duplicate: GUID of the item to duplicate :type guid_to_duplicate: str :raises SuperdeskApiError.notFoundError: If doc_in_archive is None SuperdeskApiError.forbiddenError: if item is locked InvalidStateTransitionError: if workflow transition is invalid """ if not doc_in_archive: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_to_duplicate) if not is_workflow_state_transition_valid('duplicate', doc_in_archive[ITEM_STATE]): raise InvalidStateTransitionError() lock_user = doc_in_archive.get('lock_user', None) force_unlock = doc_in_archive.get('force_unlock', False) user = get_user() str_user_id = str(user.get(config.ID_FIELD)) if user else None if lock_user and str(lock_user) != str_user_id and not force_unlock: raise SuperdeskApiError.forbiddenError('The item was locked by another user')
def create(self, docs, **kwargs): target_id = request.view_args['target_id'] doc = docs[0] link_id = doc.get('link_id') desk_id = doc.get('desk') service = get_resource_service(ARCHIVE) target = service.find_one(req=None, _id=target_id) self._validate_link(target, target_id) link = {} if is_genre(target, BROADCAST_GENRE): raise SuperdeskApiError.badRequestError( "Cannot add new take to the story with genre as broadcast.") if desk_id: link = {'task': {'desk': desk_id}} user = get_user() lookup = {'_id': desk_id, 'members.user': user['_id']} desk = get_resource_service('desks').find_one(req=None, **lookup) if not desk: raise SuperdeskApiError.forbiddenError( "No privileges to create new take on requested desk.") link['task']['stage'] = desk['working_stage'] if link_id: link = service.find_one(req=None, _id=link_id) linked_item = self.packageService.link_as_next_take(target, link) doc.update(linked_item) build_custom_hateoas(CUSTOM_HATEOAS, doc) return [linked_item['_id']]
def set_sign_off(updates, original=None, repo_type=ARCHIVE, user=None): """ Set sign_off on updates object. Rules: 1. updates['sign_off'] = original['sign_off'] + sign_off of the user performing operation. 2. If the last modified user and the user performing operation are same then sign_off shouldn't change 3. If sign_off is received on updates, this value will be preserved """ if repo_type != ARCHIVE: return user = user if user else get_user() if not user: return if SIGN_OFF in updates: return sign_off = get_sign_off(user) current_sign_off = '' if original is None else original.get(SIGN_OFF, '') if current_sign_off.endswith(sign_off): return updated_sign_off = '{}/{}'.format(current_sign_off, sign_off) updates[SIGN_OFF] = updated_sign_off[1:] if updated_sign_off.startswith('/') else updated_sign_off
def on_deleted(self, docs): docs = docs if isinstance(docs, list) else [docs] file_ids = [ rend.get("media") for doc in docs for rend in doc.get("renditions", {}).values() if not doc.get("archived") and rend.get("media") ] for file_id in file_ids: superdesk.app.media.delete(file_id) ids = [ ref.get("residRef") for doc in docs for group in doc.get("groups", {}) for ref in group.get("refs", {}) if ref.get("residRef") ] if ids: self.delete({"_id": {"$in": ids}}) user = get_user(required=True) if docs: push_notification("item:deleted", item=str(docs[0].get(config.ID_FIELD)), user=str(user))
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError(_( 'Fail to found item with guid: {guid}').format(guid=guid_of_item_to_be_copied)) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message=_('Copy is not allowed on items in a desk.')) if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): user = get_user() push_notification('item:copy', copied=1, user=str(user.get(config.ID_FIELD, ''))) return guid_of_copied_items
def set_sign_off(updates, original=None, repo_type=ARCHIVE, user=None): """Set sign_off on updates object. Rules: 1. updates['sign_off'] = original['sign_off'] + sign_off of the user performing operation. 2. If the last modified user and the user performing operation are same then sign_off shouldn't change 3. If sign_off is received on updates, this value will be preserved 4. If a users sign_off is already in the list then remove it an append it to the remaining """ if repo_type != ARCHIVE: return user = user if user else get_user() if not user: return if SIGN_OFF in updates: return sign_off = get_sign_off(user) current_sign_off = '' if original is None else (original.get(SIGN_OFF, '') or '') if current_sign_off.endswith(sign_off): return # remove the sign off from the list if already there current_sign_off = current_sign_off.replace(sign_off + '/', '') updated_sign_off = '{}/{}'.format(current_sign_off, sign_off) updates[SIGN_OFF] = updated_sign_off[1:] if updated_sign_off.startswith( '/') else updated_sign_off
def set_dateline(doc, repo_type): """ If repo_type is ARCHIVE and dateline isn't available then this method sets dateline property for the article represented by doc. Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline should be timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. :param doc: article :param repo_type: collection name where the doc will be persisted """ if repo_type == ARCHIVE and 'dateline' not in doc: current_date_time = dateline_ts = utcnow() doc['dateline'] = {'date': current_date_time, 'source': ORGANIZATION_NAME_ABBREVIATION, 'located': None, 'text': None} user = get_user() if user and user.get('user_preferences', {}).get('dateline:located'): located = user.get('user_preferences', {}).get('dateline:located', {}).get('located') if located: doc['dateline']['located'] = located doc['dateline']['text'] = format_dateline_to_locmmmddsrc(located, dateline_ts)
def delete(self, lookup): target_id = request.view_args['target_id'] archive_service = get_resource_service(ARCHIVE) target = archive_service.find_one(req=None, _id=target_id) updates = {} if target.get('rewrite_of'): # remove the rewrite info ArchiveSpikeService().update_rewrite(target) if not target.get('rewrite_of'): # there is nothing to do raise SuperdeskApiError.badRequestError("Only updates can be unlinked!") if target.get('rewrite_of'): updates['rewrite_of'] = None if target.get('anpa_take_key'): updates['anpa_take_key'] = None if target.get('rewrite_sequence'): updates['rewrite_sequence'] = None if target.get('sequence'): updates['sequence'] = None updates['event_id'] = generate_guid(type=GUID_TAG) archive_service.system_update(target_id, updates, target) user = get_user(required=True) push_notification('item:unlink', item=target_id, user=str(user.get(config.ID_FIELD))) app.on_archive_item_updated(updates, target, ITEM_UNLINK)
def create(self, docs, **kwargs): target_id = request.view_args['target_id'] doc = docs[0] link_id = doc.get('link_id') desk_id = doc.get('desk') service = get_resource_service(ARCHIVE) target = service.find_one(req=None, _id=target_id) self._validate_link(target, target_id) link = {} if desk_id: link = {'task': {'desk': desk_id}} user = get_user() lookup = {'_id': desk_id, 'members.user': user['_id']} desk = get_resource_service('desks').find_one(req=None, **lookup) if not desk: raise SuperdeskApiError.forbiddenError("No privileges to create new take on requested desk.") link['task']['stage'] = desk['working_stage'] if link_id: link = service.find_one(req=None, _id=link_id) linked_item = self.packageService.link_as_next_take(target, link) insert_into_versions(id_=linked_item[config.ID_FIELD]) doc.update(linked_item) build_custom_hateoas(CUSTOM_HATEOAS, doc) return [linked_item['_id']]
def copy_metadata_from_user_preferences(doc, repo_type=ARCHIVE): """ Copies following properties: byline, signoff, dateline.located, place from user preferences to doc if the repo_type is Archive. About Dateline: Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline is timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. """ if repo_type == ARCHIVE: user = get_user() if 'dateline' not in doc: current_date_time = dateline_ts = utcnow() doc['dateline'] = {'date': current_date_time, 'source': ORGANIZATION_NAME_ABBREVIATION, 'located': None, 'text': None} if user and user.get('user_preferences', {}).get('dateline:located'): located = user.get('user_preferences', {}).get('dateline:located', {}).get('located') if located: doc['dateline']['located'] = located doc['dateline']['text'] = format_dateline_to_locmmmddsrc(located, dateline_ts) if BYLINE not in doc and user and user.get(BYLINE): doc[BYLINE] = user[BYLINE] if 'place' not in doc and user: place_in_preference = user.get('user_preferences', {}).get('article:default:place') if place_in_preference: doc['place'] = place_in_preference.get('place') set_sign_off(doc, repo_type=repo_type, user=user)
def create(self, docs, **kwargs): target_id = request.view_args['target_id'] doc = docs[0] link_id = doc.get('link_id') desk_id = doc.get('desk') service = get_resource_service(ARCHIVE) target = service.find_one(req=None, _id=target_id) self._validate_link(target, target_id) link = {} if is_genre(target, BROADCAST_GENRE): raise SuperdeskApiError.badRequestError("Cannot add new take to the story with genre as broadcast.") if desk_id: link = {'task': {'desk': desk_id}} user = get_user() lookup = {'_id': desk_id, 'members.user': user['_id']} desk = get_resource_service('desks').find_one(req=None, **lookup) if not desk: raise SuperdeskApiError.forbiddenError("No privileges to create new take on requested desk.") link['task']['stage'] = desk['working_stage'] if link_id: link = service.find_one(req=None, _id=link_id) linked_item = self.packageService.link_as_next_take(target, link) doc.update(linked_item) build_custom_hateoas(CUSTOM_HATEOAS, doc) return [linked_item['_id']]
def delete(self, lookup): target_id = request.view_args["target_id"] archive_service = get_resource_service(ARCHIVE) target = archive_service.find_one(req=None, _id=target_id) updates = {} if target.get("rewrite_of"): # remove the rewrite info ArchiveSpikeService().update_rewrite(target) if not target.get("rewrite_of"): # there is nothing to do raise SuperdeskApiError.badRequestError( _("Only updates can be unlinked!")) if target.get("rewrite_of"): updates["rewrite_of"] = None if target.get("anpa_take_key"): updates["anpa_take_key"] = None if target.get("rewrite_sequence"): updates["rewrite_sequence"] = None if target.get("sequence"): updates["sequence"] = None updates["event_id"] = generate_guid(type=GUID_TAG) archive_service.system_update(target_id, updates, target) user = get_user(required=True) push_notification("item:unlink", item=target_id, user=str(user.get(config.ID_FIELD))) app.on_archive_item_updated(updates, target, ITEM_UNLINK)
def on_deleted(self, docs): docs = docs if isinstance(docs, list) else [docs] file_ids = [ rend.get('media') for doc in docs for rend in doc.get('renditions', {}).values() if not doc.get('archived') and rend.get('media') ] for file_id in file_ids: superdesk.app.media.delete(file_id) ids = [ ref.get('residRef') for doc in docs for group in doc.get('groups', {}) for ref in group.get('refs', {}) if ref.get('residRef') ] if ids: self.delete({'_id': {'$in': ids}}) user = get_user(required=True) if docs: push_notification('item:deleted', item=str(docs[0].get(config.ID_FIELD)), user=str(user))
def create(self, docs, **kwargs): target_id = request.view_args['target_id'] doc = docs[0] link_id = doc.get('link_id') desk_id = doc.get('desk') service = get_resource_service(ARCHIVE) target = service.find_one(req=None, _id=target_id) self._validate_link(target, target_id) link = {} if desk_id: link = {'task': {'desk': desk_id}} user = get_user() lookup = {'_id': desk_id, 'members.user': user['_id']} desk = get_resource_service('desks').find_one(req=None, **lookup) if not desk: raise SuperdeskApiError.forbiddenError( "No privileges to create new take on requested desk.") link['task']['stage'] = desk['working_stage'] if link_id: link = service.find_one(req=None, _id=link_id) linked_item = self.packageService.link_as_next_take(target, link) insert_into_versions(id_=linked_item[config.ID_FIELD]) doc.update(linked_item) build_custom_hateoas(CUSTOM_HATEOAS, doc) return [linked_item['_id']]
def on_create(self, docs): logged_in_user = get_user().get('username') for index, doc in enumerate(docs): # ensuring the that logged in user is importing the profile. if logged_in_user != doc.get('username'): raise SuperdeskApiError.forbiddenError( message="Invalid Credentials.", payload={'credentials': 1}) try: # authenticate on error sends 401 and the client is redirected to login. # but in case import user profile from Active Directory 403 should be fine. user = get_resource_service('auth').authenticate(doc) except CredentialsAuthError: raise SuperdeskApiError.forbiddenError( message="Invalid Credentials.", payload={'credentials': 1}) if user.get('_id'): raise SuperdeskApiError.badRequestError( message="User already exists in the system.", payload={'profile_to_import': 1}) docs[index] = user super().on_create(docs)
def get_user_from_request(required=False): """ Get user authenticated for current request. :param boolean required: if True and there is no user it will raise an error """ return auth.get_user(required)
def on_create(self, docs): for doc in docs: self._validate_privileges(doc, action="create") doc["template_name"] = doc["template_name"].lower().strip() if doc.get("schedule"): doc["next_run"] = get_next_run(doc.get("schedule")) if doc.get("template_type") == TemplateType.KILL.value and any( key for key in doc.keys() if key in KILL_TEMPLATE_NOT_REQUIRED_FIELDS): raise SuperdeskApiError.badRequestError(message=_( "Invalid kill template. {fields} are not allowed").format( fields=", ".join(KILL_TEMPLATE_NOT_REQUIRED_FIELDS))) if doc.get("template_type") == TemplateType.KILL.value: self._validate_kill_template(doc) if get_user(): doc.setdefault("user", get_user()[config.ID_FIELD]) self._validate_template_desks(doc)
def update(self, id, updates, original): user = get_user(required=True) updates[ITEM_STATE] = ITEM_ACTIVE updates[ITEM_EXPIRY] = None item = self.backend.update(self.datasource, id, updates, original) push_notification('agenda:unspiked', item=str(id), user=str(user.get(config.ID_FIELD))) return item
def update(self, id, updates, original): user = get_user(required=True) updates[ITEM_STATE] = ITEM_SPIKED set_item_expiry(updates) item = self.backend.update(self.datasource, id, updates, original) push_notification('agenda:spiked', item=str(id), user=str(user.get(config.ID_FIELD))) return item
def push_template_notification(docs, event="template:update"): user = get_user() template_desks = set() for doc in docs: if doc.get("template_desks"): template_desks.update([str(template) for template in doc.get("template_desks")]) push_notification(event, user=str(user.get(config.ID_FIELD, "")), desks=list(template_desks))
def update(self, id, updates, original): user = get_user(required=True) updates[ITEM_STATE] = ITEM_SPIKED set_item_expiry(updates) updates['pubstatus'] = PUB_STATUS_CANCELED item = self.backend.update(self.datasource, id, updates, original) push_notification('events:spiked', item=str(id), user=str(user.get(config.ID_FIELD))) return item
def set_byline(doc, repo_type=ARCHIVE): """ Sets byline property on the doc if it's from ARCHIVE repo. If user creating the article has byline set in the profile then doc['byline'] = user_profile['byline']. Otherwise it's not set. """ if repo_type == ARCHIVE: user = get_user() if user and user.get(BYLINE): doc[BYLINE] = user[BYLINE]
def get_timezones(): user = get_user() lang = user.get('language', flask.current_app.config.get('DEFAULT_LANGUAGE', 'en')).replace('-', '_') return [{ 'id': tz, 'name': dates.get_timezone_name(tz, locale=lang), 'location': dates.get_timezone_location(tz, locale=lang), } for tz in pytz.common_timezones]
def get_timezones(): user = get_user() lang = user.get("language", flask.current_app.config.get("DEFAULT_LANGUAGE", "en")).replace("-", "_") return [{ "id": tz, "name": dates.get_timezone_name(tz, locale=lang), "location": dates.get_timezone_location(tz, locale=lang), } for tz in pytz.common_timezones]
def render_content_template(item, template, update=False): """Render the template. :param dict item: item on which template is applied :param dict template: template :return dict: updates to the item """ new_template_data_ignore_fields = TEMPLATE_DATA_IGNORE_FIELDS.copy() kwargs = dict(item=item, user=get_user(), now=utcnow()) dateline_present_in_user_preferences = (kwargs["user"].get( "user_preferences", {}).get("dateline:located", {}).get("located")) if dateline_present_in_user_preferences: new_template_data_ignore_fields.add("dateline") template_data = template.get("data", {}) if template else {} def render_content_template_fields(data, dest=None, top=True): updates = {} for key, value in data.items(): if (top and key in new_template_data_ignore_fields) or not value: continue if top and key == "extra": updates[key] = render_content_template_fields(value, top=False) if update: item.setdefault(key, {}).update(updates[key]) elif isinstance(value, str): try: updates[key] = render_template_string(value, **kwargs) except jinja2.exceptions.UndefinedError as err: logger.error(err, extra=dict(field=key, template=value)) except jinja2.exceptions.TemplateSyntaxError as err: logger.error(err, extra=dict(field=key, template=value)) elif isinstance(value, (dict, list)): updates[key] = value elif not isinstance(value, (dict, list)): updates[key] = value if top: update_dateline(updates) filter_plaintext_fields(updates) if update: for key, value in updates.items(): if item.get(key) and isinstance(item[key], dict): item[key].update(value) else: item[key] = value return updates return render_content_template_fields(template_data, dest=item)
def update(self, id, updates, original): user = get_user(required=True) updates[ITEM_STATE] = original.get('revert_state', WORKFLOW_STATE.IN_PROGRESS) updates['revert_state'] = None updates[ITEM_EXPIRY] = None item = self.backend.update(self.datasource, id, updates, original) push_notification('planning:unspiked', item=str(id), user=str(user.get(config.ID_FIELD))) return item
def copy_metadata_from_user_preferences(doc, repo_type=ARCHIVE): """Copies following properties: byline, dateline.located, place from user preferences to doc if the repo_type is Archive and if the story is not fetched. signoff is copied for fetched and created stories About Dateline: Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline is timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. """ if repo_type == ARCHIVE: user = get_user() source = doc.get("source") or get_default_source() if doc.get("operation", "") != "fetch": located = user.get("user_preferences", {}).get("dateline:located", {}).get("located") try: dateline = doc["dateline"]["located"] except (KeyError, TypeError): dateline = None if not dateline and user and located: current_date_time = dateline_ts = utcnow() doc["dateline"] = { "date": current_date_time, "source": source, "located": located, "text": format_dateline_to_locmmmddsrc(located, dateline_ts, source), } if BYLINE not in doc and user and user.get(BYLINE): doc[BYLINE] = user[BYLINE] if "place" not in doc and user: place_in_preference = user.get("user_preferences", {}).get("article:default:place") if place_in_preference: doc["place"] = place_in_preference.get("place") set_sign_off(doc, repo_type=repo_type, user=user)
def copy_metadata_from_user_preferences(doc, repo_type=ARCHIVE): """Copies following properties: byline, dateline.located, place from user preferences to doc if the repo_type is Archive and if the story is not fetched. signoff is copied for fetched and created stories About Dateline: Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline is timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. """ if repo_type == ARCHIVE: user = get_user() source = doc.get('source') or get_default_source() if doc.get('operation', '') != 'fetch': located = user.get('user_preferences', {}).get('dateline:located', {}).get('located') if 'dateline' not in doc and user and located: current_date_time = dateline_ts = utcnow() doc['dateline'] = { 'date': current_date_time, 'source': source, 'located': located, 'text': format_dateline_to_locmmmddsrc(located, dateline_ts, source) } if doc.get( PACKAGE_TYPE ) != TAKES_PACKAGE and BYLINE not in doc and user and user.get( BYLINE): doc[BYLINE] = user[BYLINE] if 'place' not in doc and user: place_in_preference = user.get('user_preferences', {}).get('article:default:place') if place_in_preference: doc['place'] = place_in_preference.get('place') set_sign_off(doc, repo_type=repo_type, user=user)
def push_item_move_notification(original, doc, event='item:move'): """Push item:move notification. :param original: original doc :param doc: doc after updates :param event: event name """ from_task = original.get('task', {}) to_task = doc.get('task', {}) user = get_user() push_notification(event, user=str(user.get(config.ID_FIELD, '')), item=str(original.get(config.ID_FIELD)), item_version=str(original.get(config.VERSION)), from_desk=str(from_task.get('desk')), from_stage=str(from_task.get('stage')), to_desk=str(to_task.get('desk')), to_stage=str(to_task.get('stage')))
def update(self, id, updates, original): if original.get('pubstatus') or original.get('state') not in\ [WORKFLOW_STATE.DRAFT, WORKFLOW_STATE.POSTPONED, WORKFLOW_STATE.CANCELLED]: raise SuperdeskApiError.badRequestError( message= "Spike failed. Planning item in invalid state for spiking.") user = get_user(required=True) updates['revert_state'] = original[ITEM_STATE] updates[ITEM_STATE] = WORKFLOW_STATE.SPIKED set_item_expiry(updates) coverages = deepcopy(original.get('coverages') or []) for coverage in coverages: if coverage.get('workflow_status') == WORKFLOW_STATE.ACTIVE: coverage['workflow_status'] = WORKFLOW_STATE.DRAFT coverage['assigned_to'] = {} updates['coverages'] = coverages # Mark item as unlocked directly in order to avoid more queries and notifications # coming from lockservice. updates.update({ LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'lock_action': None }) remove_autosave_on_spike(original) item = self.backend.update(self.datasource, id, updates, original) push_notification('planning:spiked', item=str(id), user=str(user.get(config.ID_FIELD)), etag=item['_etag'], revert_state=item['revert_state']) for coverage in coverages: workflow_status = coverage.get('workflow_status') if workflow_status == WORKFLOW_STATE.DRAFT: self.notify_draft_coverage_on_spike(coverage) return item
def set_dateline(doc, repo_type): """ If repo_type is ARCHIVE and dateline isn't available then this method sets dateline property for the article represented by doc. Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline should be timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. :param doc: article :param repo_type: collection name where the doc will be persisted """ if repo_type == ARCHIVE and 'dateline' not in doc: current_date_time = dateline_ts = utcnow() doc['dateline'] = { 'date': current_date_time, 'source': ORGANIZATION_NAME_ABBREVIATION, 'located': None, 'text': None } user = get_user() if user and user.get('user_preferences', {}).get('dateline:located'): located = user.get('user_preferences', {}).get('dateline:located', {}).get('located') if located: if located['tz'] != 'UTC': dateline_ts = datetime.fromtimestamp( dateline_ts.timestamp(), tz=timezone(located['tz'])) if dateline_ts.month == 9: formatted_date = 'Sept {}'.format( dateline_ts.strftime('%d')) elif 3 <= dateline_ts.month <= 7: formatted_date = dateline_ts.strftime('%B %d') else: formatted_date = dateline_ts.strftime('%b %d') doc['dateline']['located'] = located doc['dateline']['text'] = '{}, {} {} -'.format( located['city'], formatted_date, ORGANIZATION_NAME_ABBREVIATION)
def push_item_move_notification(original, doc, event='item:move'): """Push item:move notification. :param original: original doc :param doc: doc after updates :param event: event name """ from_task = original.get('task', {}) to_task = doc.get('task', {}) user = get_user() push_notification( event, user=str(user.get(config.ID_FIELD, '')), item=str(original.get(config.ID_FIELD)), item_version=str(original.get(config.VERSION)), from_desk=str(from_task.get('desk')), from_stage=str(from_task.get('stage')), to_desk=str(to_task.get('desk')), to_stage=str(to_task.get('stage')) )
def set_dateline(doc, repo_type): """ If repo_type is ARCHIVE and dateline isn't available then this method sets dateline property for the article represented by doc. Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline should be timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. :param doc: article :param repo_type: collection name where the doc will be persisted """ if repo_type == ARCHIVE and "dateline" not in doc: current_date_time = dateline_ts = utcnow() doc["dateline"] = { "date": current_date_time, "source": ORGANIZATION_NAME_ABBREVIATION, "located": None, "text": None, } user = get_user() if user and user.get("user_preferences", {}).get("dateline:located"): located = user.get("user_preferences", {}).get("dateline:located", {}).get("located") if located: if located["tz"] != "UTC": dateline_ts = datetime.fromtimestamp(dateline_ts.timestamp(), tz=timezone(located["tz"])) if dateline_ts.month == 9: formatted_date = "Sept {}".format(dateline_ts.strftime("%d")) elif 3 <= dateline_ts.month <= 7: formatted_date = dateline_ts.strftime("%B %d") else: formatted_date = dateline_ts.strftime("%b %d") doc["dateline"]["located"] = located doc["dateline"]["text"] = "{}, {} {} -".format( located["city"], formatted_date, ORGANIZATION_NAME_ABBREVIATION )
def on_create(self, docs): logged_in_user = get_user().get('username') for index, doc in enumerate(docs): # ensuring the that logged in user is importing the profile. if logged_in_user != doc.get('username'): raise SuperdeskApiError.forbiddenError(message="Invalid Credentials.", payload={'credentials': 1}) try: # authenticate on error sends 401 and the client is redirected to login. # but in case import user profile from Active Directory 403 should be fine. user = get_resource_service('auth').authenticate(doc) except CredentialsAuthError: raise SuperdeskApiError.forbiddenError(message="Invalid Credentials.", payload={'credentials': 1}) if user.get('_id'): raise SuperdeskApiError.badRequestError(message="User already exists in the system.", payload={'profile_to_import': 1}) docs[index] = user super().on_create(docs)
def set_sign_off(updates, original=None, repo_type=ARCHIVE): """ Set sign_off on updates object. Rules: 1. updates['sign_off'] = original['sign_off'] + sign_off of the user performing operation. 2. If the last modified user and the user performing operation are same then sign_off shouldn't change """ if repo_type != ARCHIVE: return user = get_user() if not user: return sign_off = get_sign_off(user) current_sign_off = "" if original is None else original.get(SIGN_OFF, "") if current_sign_off.endswith(sign_off): return updated_sign_off = "{}/{}".format(current_sign_off, sign_off) updates[SIGN_OFF] = updated_sign_off[1:] if updated_sign_off.startswith("/") else updated_sign_off
def on_deleted(self, docs): docs = docs if isinstance(docs, list) else [docs] file_ids = [rend.get('media') for doc in docs for rend in doc.get('renditions', {}).values() if not doc.get('archived') and rend.get('media')] for file_id in file_ids: superdesk.app.media.delete(file_id) ids = [ref.get('residRef') for doc in docs for group in doc.get('groups', {}) for ref in group.get('refs', {}) if ref.get('residRef')] if ids: self.delete({'_id': {'$in': ids}}) user = get_user(required=True) if docs: push_notification('item:deleted', item=str(docs[0].get(config.ID_FIELD)), user=str(user))
def copy_metadata_from_user_preferences(doc, repo_type=ARCHIVE): """Copies following properties: byline, dateline.located, place from user preferences to doc if the repo_type is Archive and if the story is not fetched. signoff is copied for fetched and created stories About Dateline: Dateline has 3 parts: Located, Date (Format: Month Day) and Source. Dateline can either be simple: Sydney, July 30 AAP - or can be complex: Surat,Gujarat,IN, July 30 AAP -. Date in the dateline is timezone sensitive to the Located. Located is set on the article based on user preferences if available. If located is not available in user preferences then dateline in full will not be set. """ if repo_type == ARCHIVE: user = get_user() source = doc.get('source') or get_default_source() if doc.get('operation', '') != 'fetch': located = user.get('user_preferences', {}).get('dateline:located', {}).get('located') if 'dateline' not in doc and user and located: current_date_time = dateline_ts = utcnow() doc['dateline'] = {'date': current_date_time, 'source': source, 'located': located, 'text': format_dateline_to_locmmmddsrc(located, dateline_ts, source)} if BYLINE not in doc and user and user.get(BYLINE): doc[BYLINE] = user[BYLINE] if 'place' not in doc and user: place_in_preference = user.get('user_preferences', {}).get('article:default:place') if place_in_preference: doc['place'] = place_in_preference.get('place') set_sign_off(doc, repo_type=repo_type, user=user)
def set_original_creator(doc): usr = get_user() user = str(usr.get("_id", doc.get("original_creator", ""))) doc["original_creator"] = user
def set_original_creator(doc): usr = get_user() user = str(usr.get('_id', doc.get('original_creator', ''))) doc['original_creator'] = user