def push_content_notification(items, event='content:update'): """Push content:update notification for multiple items. It can be also 2 versions of same item in updated handler so that we sent event with both old and new desk/stage. :param list items: list of items :param event: custom event name """ ids = {} desks = {} stages = {} for item in items: ids[str(item.get('_id', ''))] = 1 task = item.get('task', {}) if task.get('desk'): desks[str(task.get('desk', ''))] = 1 if task.get('stage'): stages[str(task.get('stage', ''))] = 1 user = get_user() push_notification( event, user=str(user.get(config.ID_FIELD, '')), items=ids, desks=desks, stages=stages )
def _update(self, provider): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: return for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: if os.path.isfile(os.path.join(self.path, filename)): filepath = os.path.join(self.path, filename) stat = os.lstat(filepath) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): with open(os.path.join(self.path, filename), 'r') as f: item = self.parser.parse_message(etree.fromstring(f.read()), provider) self.add_timestamps(item) self.move_file(self.path, filename, provider=provider, success=True) yield [item] else: self.move_file(self.path, filename, provider=provider, success=True) except etreeParserError as ex: logger.exception("Ingest Type: AFP - File: {0} could not be processed".format(filename), ex) self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.newsmlOneParserError(ex, provider) except ParserError as ex: self.move_file(self.path, filename, provider=provider, success=False) except Exception as ex: self.move_file(self.path, filename, provider=provider, success=False) raise ProviderError.ingestError(ex, provider) push_notification('ingest:update')
def update(self, id, updates, original): original_state = original[config.CONTENT_STATE] if not is_workflow_state_transition_valid("spike", original_state): raise InvalidStateTransitionError() package_service = PackageService() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) expiry_minutes = app.settings["SPIKE_EXPIRY_MINUTES"] # check if item is in a desk. If it's then use the desks spike_expiry if is_assigned_to_a_desk(item): desk = get_resource_service("desks").find_one(_id=item["task"]["desk"], req=None) expiry_minutes = desk.get("spike_expiry", expiry_minutes) updates[EXPIRY] = get_expiry_date(expiry_minutes) updates[REVERT_STATE] = item.get(app.config["CONTENT_STATE"], None) if original.get("rewrite_of"): updates["rewrite_of"] = None item = self.backend.update(self.datasource, id, updates, original) push_notification("item:spike", item=str(item.get("_id")), user=str(user)) package_service.remove_spiked_refs_from_package(id) return item
def unlock(self, item_filter, user_id, session_id, etag): item_model = get_model(ItemModel) item = item_model.find_one(item_filter) if not item: raise SuperdeskApiError.notFoundError() if not item.get(LOCK_USER): raise SuperdeskApiError.badRequestError(message="Item is not locked.") can_user_unlock, error_message = self.can_unlock(item, user_id) if can_user_unlock: self.app.on_item_unlock(item, user_id) # delete the item if nothing is saved so far # version 0 created on lock item if item.get(config.VERSION, 0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT: superdesk.get_resource_service('archive').delete_action(lookup={'_id': item['_id']}) push_content_notification([item]) else: updates = {LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'force_unlock': True} item_model.update(item_filter, updates) self.app.on_item_unlocked(item, user_id) push_notification('item:unlock', item=str(item_filter.get(config.ID_FIELD)), item_version=str(item.get(config.VERSION)), state=item.get(ITEM_STATE), user=str(user_id), lock_session=str(session_id)) else: raise SuperdeskApiError.forbiddenError(message=error_message) item = item_model.find_one(item_filter) return item
def publish(self, doc, updates, target_output_channels=None): any_channel_closed, wrong_formatted_channels, queued = \ self.queue_transmission(doc=doc, target_output_channels=target_output_channels) if updates: desk = None if doc.get('task', {}).get('desk'): desk = get_resource_service('desks').find_one(req=None, _id=doc['task']['desk']) if not doc.get('ingest_provider'): updates['source'] = desk['source'] if desk and desk.get('source', '') \ else DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES user = get_user() if wrong_formatted_channels and len(wrong_formatted_channels) > 0: push_notification('item:publish:wrong:format', item=str(doc['_id']), unique_name=doc['unique_name'], desk=str(doc['task']['desk']), user=str(user.get('_id', '')), output_channels=[c['name'] for c in wrong_formatted_channels]) if not target_output_channels and not queued: raise PublishQueueError.item_not_queued_error(Exception('Nothing is saved to publish queue'), None) return any_channel_closed, queued
def on_replaced(self, document, original): get_component(ItemAutosave).clear(original['_id']) add_activity(ACTIVITY_UPDATE, 'replaced item {{ type }} about {{ subject }}', self.datasource, item=original, type=original['type'], subject=get_subject(original)) user = get_user() push_notification('item:replaced', item=str(original['_id']), user=str(user.get('_id')))
def lock(self, item_filter, user_id, session_id, etag): item_model = get_model(ItemModel) item = item_model.find_one(item_filter) if not item: raise SuperdeskApiError.notFoundError() can_user_lock, error_message = self.can_lock(item, user_id, session_id) if can_user_lock: self.app.on_item_lock(item, user_id) updates = {LOCK_USER: user_id, LOCK_SESSION: session_id, 'lock_time': utcnow()} item_model.update(item_filter, updates) if item.get(TASK): item[TASK]['user'] = user_id else: item[TASK] = {'user': user_id} superdesk.get_resource_service('tasks').assign_user(item[config.ID_FIELD], item[TASK]) self.app.on_item_locked(item, user_id) push_notification('item:lock', item=str(item.get(config.ID_FIELD)), item_version=str(item.get(config.VERSION)), user=str(user_id), lock_time=updates['lock_time'], lock_session=str(session_id)) else: raise SuperdeskApiError.forbiddenError(message=error_message) item = item_model.find_one(item_filter) return item
def restore(self, filter, user): item_model = get_model(ItemModel) item = item_model.find_one(filter) if item: updates = get_restore_updates(item) item_model.update(filter, updates) push_notification('item:restore', item=str(filter.get('_id')), user=str(user))
def on_updated(self, updates, original): do_notification = updates.get('notifications', {})\ .get('on_update', original.get('notifications', {}).get('on_update', True)) notify_and_add_activity(ACTIVITY_UPDATE, 'updated Ingest Channel {{name}}', self.datasource, item=None, user_list=self.user_service.get_users_by_user_type('administrator') if do_notification else None, name=updates.get('name', original.get('name')), provider_id=original.get('_id')) if updates.get('is_closed', False) != original.get('is_closed', False): status = '' do_notification = False if updates.get('is_closed'): status = 'closed' do_notification = updates.get('notifications', {}). \ get('on_close', original.get('notifications', {}).get('on_close', True)) elif not updates.get('is_closed'): status = 'opened' do_notification = updates.get('notifications', {}). \ get('on_open', original.get('notifications', {}).get('on_open', True)) notify_and_add_activity(ACTIVITY_EVENT, '{{status}} Ingest Channel {{name}}', self.datasource, item=None, user_list=self.user_service.get_users_by_user_type('administrator') if do_notification else None, name=updates.get('name', original.get('name')), status=status, provider_id=original.get('_id')) push_notification('ingest_provider:update', provider_id=str(original.get('_id'))) logger.info("Updated Ingest Channel. Data: {}".format(updates))
def _update(self, provider): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: return for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: if os.path.isfile(os.path.join(self.path, filename)): filepath = os.path.join(self.path, filename) stat = os.lstat(filepath) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): with open(os.path.join(self.path, filename), 'r') as f: item = self.parser.parse_message(etree.fromstring(f.read())) self.add_timestamps(item) self.move_file(self.path, filename, success=True) yield [item] else: self.move_file(self.path, filename, success=True) except Exception as err: logger.exception(err) self.move_file(self.path, filename, success=False) push_notification('ingest:update')
def create(self, docs, **kwargs): guid_of_item_to_be_duplicated = request.view_args['guid'] guid_of_duplicated_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_duplicated) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_duplicated) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item is None or str(current_desk_of_item) != str(doc.get('desk')): raise SuperdeskApiError.preconditionFailedError(message='Duplicate is allowed within the same desk.') send_to(doc=archived_doc, desk_id=doc.get('desk')) new_guid = archive_service.duplicate_content(archived_doc) guid_of_duplicated_items.append(new_guid) if kwargs.get('notify', True): task = archived_doc.get('task', {}) push_notification( 'content:update', duplicated=1, item=str(new_guid), desk=str(task.get('desk', '')), stage=str(task.get('stage', '')) ) return guid_of_duplicated_items
def update_provider(provider, rule_set=None, routing_scheme=None): """ Fetches items from ingest provider as per the configuration, ingests them into Superdesk and updates the provider. """ if ingest_for_provider_is_already_running(provider): return try: update = { LAST_UPDATED: utcnow() } for items in providers[provider.get('type')].update(provider): ingest_items(items, provider, rule_set, routing_scheme) stats.incr('ingest.ingested_items', len(items)) if items: update[LAST_ITEM_UPDATE] = utcnow() ingest_service = superdesk.get_resource_service('ingest_providers') ingest_service.system_update(provider[superdesk.config.ID_FIELD], update, provider) if LAST_ITEM_UPDATE not in update and get_is_idle(provider): notify_and_add_activity( ACTIVITY_EVENT, 'Provider {{name}} has gone strangely quiet. Last activity was on {{last}}', resource='ingest_providers', user_list=ingest_service._get_administrators(), name=provider.get('name'), last=provider[LAST_ITEM_UPDATE].replace(tzinfo=timezone.utc).astimezone(tz=None).strftime("%c")) logger.info('Provider {0} updated'.format(provider[superdesk.config.ID_FIELD])) push_notification('ingest:update', provider_id=str(provider[superdesk.config.ID_FIELD])) finally: mark_provider_as_not_running(provider)
def on_created(self, docs): for doc in docs: push_notification('item:comment', item=str(doc.get('item'))) mentioned_users = doc.get('mentioned_users', {}).values() add_activity('', type='comment', item=str(doc.get('item')), comment=doc.get('text'), comment_id=str(doc.get('_id')), notify=mentioned_users)
def unlock(self, item_filter, user_id, session_id, etag): item_model = get_model(ItemModel) item = item_model.find_one(item_filter) if not item: raise SuperdeskApiError.notFoundError() if not item.get(LOCK_USER): raise SuperdeskApiError.badRequestError(message="Item is not locked.") can_user_unlock, error_message = self.can_unlock(item, user_id) if can_user_unlock: self.app.on_item_unlock(item, user_id) # delete the item if nothing is saved so far if item['_version'] == 1 and item['state'] == 'draft': superdesk.get_resource_service('archive').delete(lookup={'_id': item['_id']}) return updates = {LOCK_USER: None, LOCK_SESSION: None, 'lock_time': None, 'force_unlock': True} item_model.update(item_filter, updates) self.app.on_item_unlocked(item, user_id) push_notification('item:unlock', item=str(item_filter.get(config.ID_FIELD)), user=str(user_id), lock_session=str(session_id)) else: raise SuperdeskApiError.forbiddenError(message=error_message) item = item_model.find_one(item_filter) return item
def create(self, docs, **kwargs): """Toggle highlight status for given highlight and item.""" service = get_resource_service('archive') ids = [] for doc in docs: item = service.find_one(req=None, guid=doc['marked_item']) if not item: ids.append(None) continue ids.append(item['_id']) highlights = item.get('highlights', []) if not highlights: highlights = [] if doc['highlights'] not in highlights: highlights.append(doc['highlights']) highlight_on = True # highlight toggled on else: highlights = [h for h in highlights if h != doc['highlights']] highlight_on = False # highlight toggled off updates = { 'highlights': highlights, '_updated': item['_updated'], '_etag': item['_etag'] } service.update(item['_id'], updates, item) push_notification( 'item:highlight', marked=int(highlight_on), item_id=item['_id'], highlight_id=str(doc['highlights'])) return ids
def on_updated(self, updates, original): push_notification(self.datasource, updated=1) updated = copy(original) updated.update(updates) if updated.get('task') and updated['task'].get('desk'): add_activity(ACTIVITY_UPDATE, 'updated task {{ subject }} for item {{ type }}', item=updated, subject=get_subject(updated))
def delete(self, lookup): target_id = request.view_args['target_id'] archive_service = get_resource_service(ARCHIVE) target = archive_service.find_one(req=None, _id=target_id) updates = {} if target.get('rewrite_of'): # remove the rewrite info ArchiveSpikeService().update_rewrite(target) if not target.get('rewrite_of'): # there is nothing to do raise SuperdeskApiError.badRequestError("Only updates can be unlinked!") if target.get('rewrite_of'): updates['rewrite_of'] = None if target.get('anpa_take_key'): updates['anpa_take_key'] = None if target.get('rewrite_sequence'): updates['rewrite_sequence'] = None if target.get('sequence'): updates['sequence'] = None updates['event_id'] = generate_guid(type=GUID_TAG) archive_service.system_update(target_id, updates, target) user = get_user(required=True) push_notification('item:unlink', item=target_id, user=str(user.get(config.ID_FIELD))) app.on_archive_item_updated(updates, target, ITEM_UNLINK)
def add_activity(msg, item=None, notify=None, **data): """Add an activity into activity log. This will became part of current user activity log. If there is someone set to be notified it will make it into his notifications box. """ activity = { 'message': msg, 'data': data, } user = getattr(flask.g, 'user', None) if user: activity['user'] = user.get('_id') if notify: activity['read'] = {str(_id): 0 for _id in notify} else: activity['read'] = {} if item: activity['item'] = str(item) post_internal(ActivityResource.endpoint_name, activity) push_notification(ActivityResource.endpoint_name, _dest=activity['read'])
def on_created(self, docs): push_notification(self.datasource, created=1) for doc in docs: insert_into_versions(doc['_id']) if self.__is_assigned_to_a_desk(doc): add_activity(ACTIVITY_CREATE, 'added new task {{ subject }} of type {{ type }}', item=doc, subject=get_subject(doc), type=doc['type'])
def remove_expired(self, provider): try: remove_expired_data(provider) push_notification('ingest:cleaned') except (Exception) as err: logger.exception(err) raise ProviderError.expiredContentError(err, provider)
def on_created(self, docs): super().on_created(docs) # invalidate cache for updated blog for doc in docs: app.blog_cache.invalidate(doc.get('blog')) # send notifications push_notification('posts', created=True)
def add_activity(activity_name, msg, resource=None, item=None, notify=None, notify_desks=None, can_push_notification=True, **data): """ Adds an activity into activity log. This will became part of current user activity log. If there is someone set to be notified it will make it into his notifications box. :param activity_name: Name of the activity :type activity_name: str :param msg: Message to be recorded in the activity log :type msg: str :param resource: resource name generating this activity :type resource: str :param item: article instance, if the activity is being recorded against an article, default None :type item: dict :param notify: user identifiers against whom the activity should be recorded, default None :type notify: list :param notify_desks: desk identifiers if someone mentions Desk Name in comments widget, default None :type notify_desks: list :param can_push_notification: flag indicating if a notification should be pushed via WebSocket, default True :type can_push_notification: bool :param data: kwargs :type data: dict :return: activity object :rtype: dict """ activity = { 'name': activity_name, 'message': msg, 'data': data, 'resource': resource } user = getattr(g, 'user', None) if user: activity['user'] = user.get('_id') activity['user_name'] = user.get('display_name', user.get('username')) activity['recipients'] = [] if notify: activity['recipients'] = [{'user_id': ObjectId(_id), 'read': False} for _id in notify] if notify_desks: activity['recipients'].extend([{'desk_id': ObjectId(_id), 'read': False} for _id in notify_desks]) if item: activity['item'] = str(item.get('guid', item.get('_id'))) activity['item_slugline'] = item.get('slugline', item.get('headline')) or item.get('unique_name') if item.get('task') and item['task'].get('desk'): activity['desk'] = ObjectId(item['task']['desk']) get_resource_service(ActivityResource.endpoint_name).post([activity]) if can_push_notification: push_notification(ActivityResource.endpoint_name, _dest=activity['recipients'], activity=activity) return activity
def on_created(self, docs): """ Send notification to clients that new contact(s) have been created :param docs: :return: """ push_notification('contacts:create', _id=[doc.get(config.ID_FIELD) for doc in docs])
def on_deleted(self, doc): """ Send a notification to clients that a contact has been deleted :param doc: :return: """ push_notification('contacts:deleted', _id=[doc.get(config.ID_FIELD)])
def add_activity(activity_name, msg, resource=None, item=None, notify=None, **data): """Add an activity into activity log. This will became part of current user activity log. If there is someone set to be notified it will make it into his notifications box. """ activity = {"name": activity_name, "message": msg, "data": data, "resource": resource} user = getattr(g, "user", None) if user: activity["user"] = user.get("_id") if notify: activity["read"] = {str(_id): 0 for _id in notify} else: activity["read"] = {} if item: activity["item"] = str(item.get("guid", item.get("_id"))) if item.get("task") and item["task"].get("desk"): activity["desk"] = ObjectId(item["task"]["desk"]) superdesk.get_resource_service(ActivityResource.endpoint_name).post([activity]) push_notification(ActivityResource.endpoint_name, _dest=activity["read"])
def on_updated(self, updates, original): updated = copy(original) updated.update(updates) if self._stage_changed(updates, original): insert_into_versions(doc=updated) new_task = updates.get("task", {}) old_task = original.get("task", {}) if new_task.get("stage") != old_task.get("stage"): push_notification( "task:stage", new_stage=str(new_task.get("stage", "")), old_stage=str(old_task.get("stage", "")), new_desk=str(new_task.get("desk", "")), old_desk=str(old_task.get("desk", "")), ) else: push_notification(self.datasource, updated=1) if is_assigned_to_a_desk(updated): if self.__is_content_assigned_to_new_desk(original, updates) and not self._stage_changed(updates, original): insert_into_versions(doc=updated) add_activity( ACTIVITY_UPDATE, "updated task {{ subject }} for item {{ type }}", self.datasource, item=updated, subject=get_subject(updated), type=updated["type"], )
def create(self, docs, **kwargs): guid_of_item_to_be_copied = request.view_args['guid'] guid_of_copied_items = [] for doc in docs: archive_service = get_resource_service(ARCHIVE) archived_doc = archive_service.find_one(req=None, _id=guid_of_item_to_be_copied) if not archived_doc: raise SuperdeskApiError.notFoundError('Fail to found item with guid: %s' % guid_of_item_to_be_copied) current_desk_of_item = archived_doc.get('task', {}).get('desk') if current_desk_of_item: raise SuperdeskApiError.preconditionFailedError(message='Copy is not allowed on items in a desk.') if not is_workflow_state_transition_valid('copy', archived_doc[ITEM_STATE]): raise InvalidStateTransitionError() new_guid = archive_service.duplicate_content(archived_doc) guid_of_copied_items.append(new_guid) if kwargs.get('notify', True): push_notification('item:copy', copied=1) return guid_of_copied_items
def update(self, id, updates, original): original_state = original[ITEM_STATE] if not is_workflow_state_transition_valid('spike', original_state): raise InvalidStateTransitionError() package_service = PackageService() user = get_user(required=True) item = get_resource_service(ARCHIVE).find_one(req=None, _id=id) expiry_minutes = app.settings['SPIKE_EXPIRY_MINUTES'] # check if item is in a desk. If it's then use the desks spike_expiry if is_assigned_to_a_desk(item): desk = get_resource_service('desks').find_one(_id=item['task']['desk'], req=None) expiry_minutes = desk.get('spike_expiry', expiry_minutes) updates[EXPIRY] = get_expiry_date(expiry_minutes) updates[REVERT_STATE] = item.get(ITEM_STATE, None) if original.get('rewrite_of'): updates['rewrite_of'] = None item = self.backend.update(self.datasource, id, updates, original) push_notification('item:spike', item=str(item.get('_id')), user=str(user)) package_service.remove_spiked_refs_from_package(id) return item
def update(self, id, updates, original): archived_item = super().find_one(req=None, _id=id) try: if archived_item['type'] == 'composite': self.__publish_package_items(archived_item, updates[config.LAST_UPDATED]) # document is saved to keep the initial changes self.backend.update(self.datasource, id, updates, original) original.update(updates) if archived_item['type'] != 'composite': # queue only text items self.queue_transmission(original) task = self.__send_to_publish_stage(original) if task: updates['task'] = task # document is saved to change the status updates[config.CONTENT_STATE] = 'published' item = self.backend.update(self.datasource, id, updates, original) original.update(updates) user = get_user() push_notification('item:publish', item=str(item.get('_id')), user=str(user)) original.update(super().find_one(req=None, _id=id)) except KeyError as e: raise SuperdeskApiError.badRequestError( message="Key is missing on article to be published: {}" .format(str(e))) except Exception as e: logger.error("Something bad happened while publishing %s".format(id), e) raise SuperdeskApiError.internalError(message="Failed to publish the item: {}" .format(str(e)))
def update(self, provider): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: return try: for filename in os.listdir(self.path): if os.path.isfile(os.path.join(self.path, filename)): filepath = os.path.join(self.path, filename) stat = os.lstat(filepath) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if is_ready(last_updated, provider.get('updated')): with open(os.path.join(self.path, filename), 'r') as f: item = Parser().parse_message(etree.fromstring(f.read())) item['_created'] = item['firstcreated'] = utc.localize(item['firstcreated']) item['_updated'] = item['versioncreated'] = utc.localize(item['versioncreated']) item.setdefault('provider', provider.get('name', provider['type'])) self.move_the_current_file(filename, success=True) yield [item] except (Exception) as err: logger.exception(err) self.move_the_current_file(filename, success=False) pass finally: push_notification('ingest:update')
def update(self, id, updates, original): user = get_user(required=True).get(config.ID_FIELD, '') session = get_auth().get(config.ID_FIELD, '') coverage_states = get_resource_service('vocabularies').find_one( req=None, _id='newscoveragestatus') event_cancellation = request.view_args.get('event_cancellation') cancel_all_coverage = updates.pop('cancel_all_coverage', False) event_reschedule = updates.pop('event_reschedule', False) coverage_cancel_state = None if coverage_states: coverage_cancel_state = next( (x for x in coverage_states.get('items', []) if x['qcode'] == 'ncostat:notint'), None) coverage_cancel_state.pop('is_active', None) ids = [] updates['coverages'] = deepcopy(original.get('coverages')) coverages = updates.get('coverages') or [] reason = updates.pop('reason', None) planning_service = get_resource_service('planning') for coverage in coverages: if coverage['workflow_status'] not in [ WORKFLOW_STATE.CANCELLED, ASSIGNMENT_WORKFLOW_STATE.COMPLETED ]: ids.append(coverage.get('coverage_id')) planning_service.cancel_coverage( coverage, coverage_cancel_state, coverage.get('workflow_status'), None, reason, event_cancellation, event_reschedule) if cancel_all_coverage: item = None if len(ids) > 0: item = self.backend.update(self.datasource, id, updates, original) push_notification('coverage:cancelled', planning_item=str(original[config.ID_FIELD]), user=str(user), session=str(session), reason=reason, coverage_state=coverage_cancel_state, etag=item.get('_etag'), ids=ids) return item if item else self.find_one(req=None, _id=id) self._cancel_plan(updates, reason) item = self.backend.update(self.datasource, id, updates, original) push_notification('planning:cancelled', item=str(original[config.ID_FIELD]), user=str(user), session=str(session), reason=reason, coverage_state=coverage_cancel_state, event_cancellation=event_cancellation) return item
def on_create(self, docs): for doc in docs: if 'user' not in doc and request: doc['user'] = request.view_args.get('user') self.process(doc) push_notification('savedsearch:update')
def on_deleted(self, doc): push_notification(UPDATE_NOTIFICATION)
def update_provider(provider): """Update given provider.""" if provider.get('type') in providers: for items in providers[provider.get('type')].update(provider): ingest_items(provider, items) push_notification('ingest:update')
def on_deleted(self, doc): super().on_deleted(doc) push_notification('items', deleted=1)
def on_updated(self, updates, original): super().on_updated(updates, original) push_notification('items', updated=1)
def on_created(self, docs): super().on_created(docs) push_notification('items', created=1)
def on_create(self, docs): for doc in docs: if 'user' not in doc and request: doc['user'] = request.view_args.get('user') self.process(doc) push_notification(UPDATE_NOTIFICATION)
def on_deleted(self, doc): push_notification(self.datasource, deleted=1)
def add_activity(activity_name, msg, resource=None, item=None, notify=None, notify_desks=None, can_push_notification=True, **data): """ Adds an activity into activity log. This will became part of current user activity log. If there is someone set to be notified it will make it into his notifications box. :param activity_name: Name of the activity :type activity_name: str :param msg: Message to be recorded in the activity log :type msg: str :param resource: resource name generating this activity :type resource: str :param item: article instance, if the activity is being recorded against an article, default None :type item: dict :param notify: user identifiers against whom the activity should be recorded, default None :type notify: list :param notify_desks: desk identifiers if someone mentions Desk Name in comments widget, default None :type notify_desks: list :param can_push_notification: flag indicating if a notification should be pushed via WebSocket, default True :type can_push_notification: bool :param data: kwargs :type data: dict :return: activity object :rtype: dict """ activity = { 'name': activity_name, 'message': msg, 'data': data, 'resource': resource } name = ActivityResource.endpoint_name user = getattr(g, 'user', None) if user: activity['user'] = user.get('_id') activity['recipients'] = [] if notify: activity['recipients'] = [{'user_id': ObjectId(_id), 'read': False} for _id in notify] name = activity_name if notify_desks: activity['recipients'].extend([{'desk_id': ObjectId(_id), 'read': False} for _id in notify_desks]) name = activity_name if item: activity['item'] = str(item.get('guid', item.get('_id'))) if item.get('task') and item['task'].get('desk'): activity['desk'] = ObjectId(item['task']['desk']) get_resource_service(ActivityResource.endpoint_name).post([activity]) if can_push_notification: push_notification(name, _dest=activity['recipients']) return activity
def on_updated(self, updates, original): user = get_user() push_notification("item:deleted:archived", item=str(original[config.ID_FIELD]), user=str(user.get(config.ID_FIELD)))
def on_created(self, docs): for doc in docs: push_notification(self.notification_key, created=1, desk_id=str(doc.get(config.ID_FIELD))) get_resource_service("users").update_stage_visibility_for_users()
def _update(self, provider, update): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: logger.warn( 'File Feeding Service {} is configured without path. Please check the configuration' .format(provider['name'])) return [] registered_parser = self.get_feed_parser(provider) for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: last_updated = None file_path = os.path.join(self.path, filename) if os.path.isfile(file_path): stat = os.lstat(file_path) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): if isinstance(registered_parser, NTBEventXMLFeedParser): logger.info('Ingesting xml events') with open(file_path, 'rb') as f: xml = ElementTree.parse(f) parser = self.get_feed_parser( provider, xml.getroot()) item = parser.parse(xml.getroot(), provider) elif isinstance(registered_parser, IcsTwoFeedParser): logger.info('Ingesting ics events') with open(file_path, 'rb') as f: cal = Calendar.from_ical(f.read()) parser = self.get_feed_parser(provider, cal) item = parser.parse(cal, provider) else: logger.info('Ingesting events with unknown parser') parser = self.get_feed_parser(provider, file_path) item = parser.parse(file_path, provider) self.after_extracting(item, provider) self.move_file(self.path, filename, provider=provider, success=True) if isinstance(item, list): yield item else: yield [item] else: self.move_file(self.path, filename, provider=provider, success=True) except Exception as ex: if last_updated and self.is_old_content(last_updated): self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.parseFileError( '{}-{}'.format(provider['name'], self.NAME), filename, ex, provider) push_notification('ingest:update')
def on_created(self, docs): for doc in docs: push_notification(self.notification_key, created=1, desk_id=str(doc.get(config.ID_FIELD)))
def on_replaced(self, document, original): user = get_user() push_notification('events-template:replaced', item=str(original[config.ID_FIELD]), user=str(user.get(config.ID_FIELD)))
def _push_notification(self, _id, event_name): """Push socket notification""" push_notification(event_name, item=str(_id), user=str(get_user_id()))
def on_created(self, docs): user = get_user() for doc in docs: push_notification('events-template:created', item=str(doc.get(config.ID_FIELD)), user=str(user.get(config.ID_FIELD)))
def on_deleted(self, doc): user = get_user() push_notification('events-template:deleted', item=str(doc[config.ID_FIELD]), user=str(user.get(config.ID_FIELD)))
def on_updated(self, updates, original): push_notification(self.notification_key, updated=1)
def on_updated(self, updates, original): user = get_user() push_notification('events-template:updated', item=str(original[config.ID_FIELD]), user=str(user.get(config.ID_FIELD)))
def update_provider(provider, rule_set=None, routing_scheme=None): """Fetch items from ingest provider, ingest them into Superdesk and update the provider. :param provider: Ingest Provider data :param rule_set: Translation Rule Set if one is associated with Ingest Provider. :param routing_scheme: Routing Scheme if one is associated with Ingest Provider. """ lock_name = get_lock_id('ingest', provider['name'], provider[superdesk.config.ID_FIELD]) if not lock(lock_name, expire=1810): return try: feeding_service = registered_feeding_services[ provider['feeding_service']] feeding_service = feeding_service.__class__() update = {LAST_UPDATED: utcnow()} for items in feeding_service.update(provider, update): ingest_items(items, provider, feeding_service, rule_set, routing_scheme) if items: last_item_update = max([ item['versioncreated'] for item in items if item.get('versioncreated') ], default=utcnow()) if not update.get(LAST_ITEM_UPDATE) or update[ LAST_ITEM_UPDATE] < last_item_update: update[LAST_ITEM_UPDATE] = last_item_update # Some Feeding Services update the collection and by this time the _etag might have been changed. # So it's necessary to fetch it once again. Otherwise, OriginalChangedError is raised. ingest_provider_service = superdesk.get_resource_service( 'ingest_providers') provider = ingest_provider_service.find_one( req=None, _id=provider[superdesk.config.ID_FIELD]) ingest_provider_service.system_update( provider[superdesk.config.ID_FIELD], update, provider) if LAST_ITEM_UPDATE not in update and get_is_idle(provider): admins = superdesk.get_resource_service( 'users').get_users_by_user_type('administrator') notify_and_add_activity( ACTIVITY_EVENT, 'Provider {{name}} has gone strangely quiet. Last activity was on {{last}}', resource='ingest_providers', user_list=admins, name=provider.get('name'), last=provider[LAST_ITEM_UPDATE].replace( tzinfo=timezone.utc).astimezone(tz=None).strftime("%c")) logger.info('Provider {0} updated'.format( provider[superdesk.config.ID_FIELD])) if LAST_ITEM_UPDATE in update: # Only push a notification if there has been an update push_notification('ingest:update', provider_id=str( provider[superdesk.config.ID_FIELD])) except Exception as e: logger.error("Failed to ingest file: {error}".format(error=e)) raise IngestFileError(3000, e, provider) finally: unlock(lock_name)
def on_deleted(self, doc): push_notification(self.notification_key, deleted=1)
def update(self, id, updates, original): user = get_user(required=True).get(config.ID_FIELD, '') session = get_auth().get(config.ID_FIELD, '') updates['assigned_to'] = deepcopy(original).get('assigned_to') # If we are confirming availability, save the revert state for revert action coverage_type = original.get('planning', {}).get('g2_content_type') if coverage_type != 'text': updates['assigned_to']['revert_state'] = updates['assigned_to'][ 'state'] updates['assigned_to']['state'] = ASSIGNMENT_WORKFLOW_STATE.COMPLETED remove_lock_information(updates) item = self.backend.update(self.datasource, id, updates, original) # publish the planning item get_resource_service('assignments').publish_planning( original['planning_item']) # Save history if user initiates complete if coverage_type == 'text': get_resource_service('assignments_history').on_item_complete( updates, original) else: get_resource_service( 'assignments_history').on_item_confirm_availability( updates, original) push_notification('assignments:completed', item=str(original[config.ID_FIELD]), planning=original.get('planning_item'), assigned_user=(original.get('assigned_to') or {}).get('user'), assigned_desk=(original.get('assigned_to') or {}).get('desk'), assignment_state=ASSIGNMENT_WORKFLOW_STATE.COMPLETED, user=str(user), session=str(session), coverage=original.get('coverage_item')) # Send notification that the work has been completed # Determine the display name of the assignee assigned_to_user = get_resource_service('users').find_one(req=None, _id=user) assignee = assigned_to_user.get( 'display_name') if assigned_to_user else 'Unknown' target_user = original.get('assigned_to', {}).get('assignor_user') if target_user is None: target_user = original.get('assigned_to', {}).get('assignor_desk') PlanningNotifications().notify_assignment( target_user=target_user, message='{{coverage_type}} coverage \"{{slugline}}\" has been ' 'completed by {{assignee}}', assignee=assignee, coverage_type=get_coverage_type_name( original.get('planning', {}).get('g2_content_type', '')), slugline=original.get('planning', {}).get('slugline'), omit_user=True) return item
def on_deleted(self, doc): push_notification(self.notification_key, deleted=1, stage_id=str(doc.get(config.ID_FIELD)), desk_id=str(doc.get("desk")))
def on_activity_updated(updates, original): if original.get('desk', '') != '': push_notification('desk:mention')
def create(self, docs, **kwargs): """Toggle marked desk status for given desk and item.""" service = get_resource_service('archive') published_service = get_resource_service('published') ids = [] for doc in docs: item = service.find_one(req=None, guid=doc['marked_item']) if not item: ids.append(None) continue ids.append(item['_id']) marked_desks = item.get('marked_desks', []) if not marked_desks: marked_desks = [] existing_mark = next( (m for m in marked_desks if m['desk_id'] == doc['marked_desk']), None) if existing_mark: # there is an existing mark so this is un-mark action marked_desks = [ m for m in marked_desks if m['desk_id'] != doc['marked_desk'] ] marked_desks_on = False # highlight toggled off else: # there is no existing mark so this is mark action user = get_user() or {} new_mark = {} new_mark['desk_id'] = doc['marked_desk'] new_mark['user_marked'] = str(user.get(config.ID_FIELD, '')) new_mark['date_marked'] = utcnow() marked_desks.append(new_mark) marked_desks_on = True updates = {'marked_desks': marked_desks} service.system_update(item['_id'], updates, item) publishedItems = published_service.find({'item_id': item['_id']}) for publishedItem in publishedItems: if publishedItem['_current_version'] == item[ '_current_version'] or not marked_desks_on: updates = {'marked_desks': marked_desks} published_service.system_update(publishedItem['_id'], updates, publishedItem) push_notification('item:marked_desks', marked=int(marked_desks_on), item_id=item['_id'], mark_id=str(doc['marked_desk'])) if marked_desks_on: app.on_archive_item_updated({'desk_id': doc['marked_desk']}, item, ITEM_MARK) else: app.on_archive_item_updated({'desk_id': doc['marked_desk']}, item, ITEM_UNMARK) return ids
def __send_notification(self, updates, user): user_id = user['_id'] if 'is_enabled' in updates and not updates['is_enabled']: push_notification('user_disabled', updated=1, user_id=str(user_id)) elif 'is_active' in updates and not updates['is_active']: push_notification('user_inactivated', updated=1, user_id=str(user_id)) elif 'role' in updates: push_notification('user_role_changed', updated=1, user_id=str(user_id)) elif 'privileges' in updates: added, removed, modified = compare_preferences( user.get('privileges', {}), updates['privileges']) if len(removed) > 0 or (1, 0) in modified.values(): push_notification('user_privileges_revoked', updated=1, user_id=str(user_id)) if len(added) > 0: add_activity( ACTIVITY_UPDATE, 'user {{user}} has been granted new privileges: Please re-login.', self.datasource, notify=[user_id], user=user.get('display_name', user.get('username'))) elif 'user_type' in updates: if not is_admin(updates): push_notification('user_type_changed', updated=1, user_id=str(user_id)) else: add_activity( ACTIVITY_UPDATE, 'user {{user}} is updated to administrator: Please re-login.', self.datasource, notify=[user_id], user=user.get('display_name', user.get('username'))) else: push_notification('user', updated=1, user_id=str(user_id))
def _publish_blog_embed_on_s3(blog_or_id, theme=None, output=None, safe=True, save=True): blogs = get_resource_service('client_blogs') if isinstance(blog_or_id, (str, ObjectId)): blog_id = blog_or_id blog = blogs.find_one(req=None, _id=blog_or_id) if not blog: return else: blog = blog_or_id blog_id = blog['_id'] blog_preferences = blog.get('blog_preferences', {}) blog_theme = blog_preferences.get('theme') # get the `output` data if the `output_id` is set. # if output and isinstance(output, str): # output = get_resource_service('outputs').find_one(req=None, _id=output) output_id = None if output: # get the output `_id` output_id = str(output.get('_id')) # compile a theme if there is an `output`. if output.get('theme'): theme = output.get('theme', blog_theme) if blog_theme: try: public_url = publish_embed(blog_id, theme, output, api_host='//{}/'.format(app.config['SERVER_NAME'])) except MediaStorageUnsupportedForBlogPublishing as e: if not safe: raise e logger.warning('Media storage not supported for blog "{}"'.format(blog_id)) # TODO: Add reverse url function. public_url = '{}://{}/embed/{}/{}{}'.format(app.config['URL_PROTOCOL'], app.config['SERVER_NAME'], blog_id, '{}'.format(output_id) if output_id else '', '/theme/{}'.format(theme) if theme else '') public_urls = blog.get('public_urls', {'output': {}, 'theme': {}}) updates = {'public_urls': public_urls} if (output_id and theme) or output_id: public_urls['output'][output_id] = public_url elif theme: public_urls['theme'][theme] = public_url else: updates['public_url'] = public_url if save: try: try: blogs.system_update(blog_id, updates, blog) except DataLayer.OriginalChangedError: blog = blogs.find_one(req=None, _id=blog_id) blogs.system_update(blog_id, updates, blog) except SuperdeskApiError: logger.warning('api error: unable to update blog "{}"'.format(blog_id)) push_notification('blog', published=1, blog_id=blog_id, **updates) return public_url, public_urls
def on_created(self, docs): for doc in docs: push_notification(self.notification_key, created=1, request_id=str(doc.get('_id'))) # and members with emails notify_the_owner(doc, app.config['CLIENT_URL'])
def create(self, docs): ids = [] production = get_resource_service('archive') assignments_service = get_resource_service('assignments') assignments_complete = get_resource_service('assignments_complete') items = [] for doc in docs: assignment = assignments_service.find_one( req=None, _id=doc.pop('assignment_id')) item = production.find_one(req=None, _id=doc.pop('item_id')) reassign = doc.pop('reassign') # set the state to in progress if item in published state updates = {'assigned_to': deepcopy(assignment.get('assigned_to'))} updates['assigned_to']['state'] = ASSIGNMENT_WORKFLOW_STATE.COMPLETED if \ item.get(ITEM_STATE) in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED] else \ ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS # on fulfiling the assignment the user is assigned the assignment, for add to planning it is not if reassign: user = get_user() if user and str(user.get(config.ID_FIELD)) != ( assignment.get('assigned_to') or {}).get('user'): updates['assigned_to']['user'] = str( user.get(config.ID_FIELD)) if item.get(ITEM_STATE) in [ CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED ]: assignments_complete.update(assignment[config.ID_FIELD], updates, assignment) else: assignments_service.patch(assignment[config.ID_FIELD], updates) # reference the item to the assignment production.system_update( item[config.ID_FIELD], {'assignment_id': assignment[config.ID_FIELD]}, item) # if the item is publish then update those items as well if item.get(ITEM_STATE) in PUBLISH_STATES: get_resource_service('published').update_published_items( item[config.ID_FIELD], 'assignment_id', assignment[config.ID_FIELD]) get_resource_service('delivery').post([{ 'item_id': item[config.ID_FIELD], 'assignment_id': assignment[config.ID_FIELD], 'planning_id': assignment['planning_item'], 'coverage_id': assignment['coverage_item'] }]) item['assignment_id'] = assignment[config.ID_FIELD] # Save assignment history assignment_history_service = get_resource_service( 'assignments_history') assignment_history_service.on_item_content_link( updates, assignment) doc.update(item) ids.append(doc[config.ID_FIELD]) items.append(item) push_content_notification(items) push_notification('content:link', item=str(item[config.ID_FIELD]), assignment=str(assignment[config.ID_FIELD])) return ids