def init_app(app): if not app.config['LEGAL_ARCHIVE']: return endpoint_name = LEGAL_ARCHIVE_NAME service = LegalArchiveService(endpoint_name, backend=get_backend()) LegalArchiveResource(endpoint_name, app=app, service=service) endpoint_name = LEGAL_ARCHIVE_VERSIONS_NAME service = LegalArchiveVersionsService(endpoint_name, backend=get_backend()) LegalArchiveVersionsResource(endpoint_name, app=app, service=service) endpoint_name = LEGAL_ARCHIVE_HISTORY_NAME service = LegalArchiveHistoryService(endpoint_name, backend=get_backend()) LegalArchiveHistoryResource(endpoint_name, app=app, service=service) endpoint_name = LEGAL_PUBLISH_QUEUE_NAME service = LegalPublishQueueService(endpoint_name, backend=get_backend()) LegalPublishQueueResource(endpoint_name, app=app, service=service) privilege(name=LEGAL_ARCHIVE_NAME, label='Legal Archive', description='Read from legal archive') superdesk.command('legal_publish_queue:import', ImportLegalPublishQueueCommand()) superdesk.command('legal_archive:import', ImportLegalArchiveCommand())
return updates class UpdateVocabulariesInItemsCommand(superdesk.Command): """ Update documents in `archive` and `published` collections which contain CV related fields: `subject`, `genre`, `place`, `anpa_category` with corresponding data from vocabularies. Example: :: $ python manage.py vocabularies:update_archive """ option_list = () def run(self): fields = ["subject", "genre", "place", "anpa_category"] lookup = {"type": "manageable", "service": {"$exists": True}} vocabularies_list = get_resource_service("vocabularies").get( req=None, lookup=lookup) vocabularies = get_vocabularies(vocabularies_list) update_items(vocabularies, fields, get_resource_service("archive")) update_items(vocabularies, fields, get_resource_service("published")) superdesk.command("vocabularies:update_archive", UpdateVocabulariesInItemsCommand())
# If there is an existing set of renditions we keep those if old_item: media = old_item.get('renditions', {}).get('original', {}).get('media', {}) if media: item['renditions'] = old_item['renditions'] item['mimetype'] = old_item.get('mimetype') item['filemeta'] = old_item.get('filemeta') logger.info("Reuters image not updated for GUID:{}".format(item[GUID_FIELD])) return content, filename, content_type = download_file_from_url(href) file_type, ext = content_type.split('/') metadata = process_file(content, file_type) file_guid = app.media.put(content, filename, content_type, metadata) inserted.append(file_guid) rendition_spec = app.config.get('RENDITIONS', {}).get('picture', {}) renditions = generate_renditions(content, file_guid, inserted, file_type, content_type, rendition_spec, url_for_media) item['renditions'] = renditions item['mimetype'] = content_type item['filemeta'] = metadata except Exception: for file_id in inserted: app.media.delete(file_id) raise superdesk.command('ingest:update', UpdateIngest())
""" Fetches the expired articles from published collection. Expiry Conditions: 1. can_be_removed flag is True 2. Item Expiry is less than or equal to expired_date_time, State of the Item is not SCHEDULED and allow_post_publish_actions flag is True :param expired_date_time: :param limit: :return: expired articles from published collection """ logger.info('Get expired content from published') query = { '$or': [ {'can_be_removed': True}, {'$and': [ {'expiry': {'$lte': expired_date_time}}, {ITEM_STATE: {'$ne': CONTENT_STATE.SCHEDULED}}, {'allow_post_publish_actions': True} ]} ] } req = ParsedRequest() req.sort = '_created' req.max_results = limit return superdesk.get_resource_service('published').get_from_mongo(req=req, lookup=query) superdesk.command('publish:remove_expired', RemoveExpiredPublishContent())
logger.info('Inserting {} items'.format(len(items))) archive_items = [] for item in items: dest_doc = dict(item) new_id = generate_guid(type=GUID_TAG) dest_doc[app.config['ID_FIELD']] = new_id dest_doc['guid'] = new_id generate_unique_id_and_name(dest_doc) dest_doc[app.config['VERSION']] = 1 dest_doc[ITEM_STATE] = CONTENT_STATE.FETCHED user_id = desk.get('members', [{'user': None}])[0].get('user') dest_doc['original_creator'] = user_id dest_doc['version_creator'] = user_id from apps.tasks import send_to send_to(dest_doc, desk_id=desk_id, stage_id=stage_id, user_id=user_id) dest_doc[app.config['VERSION']] = 1 # Above step increments the version and needs to reset dest_doc[FAMILY_ID] = item['_id'] remove_unwanted(dest_doc) archive_items.append(dest_doc) get_resource_service(ARCHIVE).post(archive_items) for item in archive_items: insert_into_versions(id_=item[app.config['ID_FIELD']]) superdesk.command('app:scaffold_data', AppScaffoldDataCommand())
user = superdesk.get_resource_service('users').find_one(username=userdata.get('username'), req=None) if user: logger.info('updating user %s' % (userdata)) superdesk.get_resource_service('users').patch(user.get('_id'), userdata) return userdata else: logger.info('creating user %s' % (userdata)) userdata[app.config['DATE_CREATED']] = userdata[app.config['LAST_UPDATED']] superdesk.get_resource_service('users').post([userdata]) logger.info('user saved %s' % (userdata)) return userdata class HashUserPasswordsCommand(superdesk.Command): def run(self): users = superdesk.get_resource_service('auth_users').get(req=None, lookup={}) for user in users: pwd = user.get('password') if not is_hashed(pwd): updates = {} hashed = get_hash(user['password'], app.config.get('BCRYPT_GENSALT_WORK_FACTOR', 12)) user_id = user.get('_id') updates['password'] = hashed superdesk.get_resource_service('users').patch(user_id, updates=updates) superdesk.command('users:create', CreateUserCommand()) superdesk.command('users:hash_passwords', HashUserPasswordsCommand())
class GetAuthTokenCommand(superdesk.Command): """ Generate an authorization token to be able to authenticate against the REST api without starting the client the copy the authorization header. """ option_list = ( superdesk.Option('--username', '-u', dest='username', required=True), superdesk.Option('--password', '-p', dest='password', required=True) ) def run(self, username, password): credentials = { 'username': username, 'password': password } service = superdesk.get_resource_service('auth') id = str(service.post([credentials])[0]) print('Session ID:', id) creds = service.find_one(req=None, _id=id) token = creds.get('token').encode('ascii') encoded_token = b'basic ' + b64encode(token + b':') print('Generated token: ', encoded_token) return encoded_token superdesk.command('users:create', CreateUserCommand()) superdesk.command('users:hash_passwords', HashUserPasswordsCommand()) superdesk.command('users:get_auth_token', GetAuthTokenCommand())
items = self._get_children(service, parent) items.append(parent) for item in items: if not self._has_expired(item, expiry_datetime): return [] return items def _has_expired(self, item, expiry_datetime): """Checks if the item has expired :param dict item: The item to check :param datetime expiry_datetime: The date and time items should be expired :return bool: True if the item has expired, otherwise False """ item.setdefault('expiry', item['_updated'] + timedelta(days=self.expiry_days)) return item.get('expiry') <= expiry_datetime def _get_children(self, service, item): """Get the list of children to the root item using the ancestors dictionary key :param service: The content_api items service :param dict item: The root item to get the children of :return list: The list of children for this root item """ return list(service.find({'ancestors': item['_id']})) superdesk.command('content_api:remove_expired', RemoveExpiredItems())
lookup=query)) def enqueue_items(self, published_items): """Creates the corresponding entries in the publish queue for each item :param list published_items: the list of items marked for publishing """ failed_items = {} for queue_item in published_items: try: self.enqueue_item(queue_item) except Exception: logger.exception('Failed to queue item {}'.format( queue_item.get('_id'))) failed_items[str(queue_item.get('_id'))] = queue_item if len(failed_items) > 0: logger.error('Failed to publish the following items: {}'.format( failed_items.keys())) superdesk.command('publish:enqueue', EnqueueContent()) @celery.task(soft_time_limit=300) def enqueue_published(): """Pick new items from ``published`` collection and enqueue it.""" with ProfileManager('publish:enqueue'): EnqueueContent().run()
"display_name": name, "type": "manageable", "unique_field": "qcode", "service": { "all": 1 }, "items": [], } if base_extra: current_voc.update(base_extra) if extra: current_voc.update(extra) voc.append(current_voc) elif not skip: if current_voc is None: raise SystemExit( "Invalid source file! Your file must start with a vocabularies label (it must end with a " "colon)") item = { "name": line, "qcode": line, "is_active": True, } current_voc["items"].append(item) json.dump(voc, open("vocabularies.json", "w"), indent=4) print('Data generated in "vocabularies.json"') superdesk.command("vocabularies:generate", GenerateVocabularies())
'item_version': item.get(config.VERSION), 'subscriber_id': self.subscriber.get('_id'), 'destination': destination, 'formatted_item': json.dumps(format_callback(item), default=json_serialize_datetime_objectId), 'content_type': item.get('type') } def _get_destination(self, destionation_format): """Get the destination :param str destionation_format: destination format as `json_event` or `json_planning` """ return { 'delivery_type': 'http_push', 'format': destionation_format, 'config': { 'resource_url': self.resource_url, 'assets_url': self.assets_url }, 'name': destionation_format } command('planning:export_to_newsroom', ExportToNewsroom())
plans.update({item[config.ID_FIELD]: item for item in items}) plans_deleted = set() assignments_deleted = set() assignments_to_delete = [] for plan_id, plan in plans.items(): for coverage in plan.get('coverages') or []: assignment_id = (coverage.get('assigned_to') or {}).get('assignment_id') if assignment_id: assignments_to_delete.append(assignment_id) # Now, delete the planning item planning_service.delete_action(lookup={'_id': plan_id}) plans_deleted.add(plan_id) # Delete assignments assignment_service = get_resource_service('assignments') for assign_id in assignments_to_delete: assignment_service.delete(lookup={'_id': assign_id}) assignments_deleted.add(assign_id) logger.info('{} {} Assignments deleted: {}'.format( self.log_msg, len(assignments_deleted), list(assignments_deleted))) logger.info('{} {} Planning items deleted: {}'.format( self.log_msg, len(plans_deleted), list(plans_deleted))) command('planning:delete_spiked', DeleteSpikedItems())
def get_app(config=None): """App factory. :param config: configuration that can override config from `settings.py` :return: a new SuperdeskEve app instance """ if config is None: config = {} config['APP_ABSPATH'] = os.path.abspath(os.path.dirname(__file__)) for key in dir(settings): if key.isupper(): config.setdefault(key, getattr(settings, key)) media_storage = SuperdeskGridFSMediaStorage if config['AMAZON_CONTAINER_NAME']: from superdesk.storage.amazon.amazon_media_storage import AmazonMediaStorage from superdesk.storage.amazon.import_from_amazon import ImportFromAmazonCommand media_storage = AmazonMediaStorage superdesk.command('import:amazon', ImportFromAmazonCommand()) config['DOMAIN'] = {} app = eve.Eve(data=superdesk.SuperdeskDataLayer, auth=TokenAuth, media=media_storage, settings=config, json_encoder=MongoJSONEncoder, validator=SuperdeskValidator) superdesk.app = app custom_loader = jinja2.ChoiceLoader( [app.jinja_loader, jinja2.FileSystemLoader(['superdesk/templates'])]) app.jinja_loader = custom_loader app.mail = Mail(app) @app.errorhandler(superdesk.SuperdeskError) def client_error_handler(error): """Return json error response. :param error: an instance of :attr:`superdesk.SuperdeskError` class """ return send_response(None, (error.to_dict(), None, None, error.status_code)) @app.errorhandler(500) def server_error_handler(error): """Log server errors.""" app.sentry.captureException() logger.exception(error) return_error = superdesk.SuperdeskError(status_code=500) return client_error_handler(return_error) init_celery(app) for module_name in app.config['INSTALLED_APPS']: app_module = importlib.import_module(module_name) try: app_module.init_app(app) except AttributeError: pass for resource in superdesk.DOMAIN: app.register_resource(resource, superdesk.DOMAIN[resource]) for blueprint in superdesk.BLUEPRINTS: prefix = app.api_prefix or None app.register_blueprint(blueprint, url_prefix=prefix) # we can only put mapping when all resources are registered app.data.elastic.put_mapping(app) app.sentry = sentry sentry.init_app(app) return app
if config.ID_FIELD not in user and required: raise SuperdeskApiError.notFoundError(_("Invalid user.")) return user def get_user_id(required=False): """Get authenticated user id. :param boolean required: if True and there is no user it will raise an error """ user = get_user(required) return user.get(config.ID_FIELD) def get_auth(): """Get authenticated session data.""" auth = flask.g.get("auth", {}) return auth def is_current_user_admin(required=False): """Test if current user is administrator. :param required: raise an error if required and there is no user context """ user = get_user(required) or {} return user.get("user_type", "") == "administrator" superdesk.command("session:gc", RemoveExpiredSessions())
extracted_files.append(local_filepath) # Save or update the theme in the database result = themes_service.save_or_update_theme(description_file, extracted_files, force_update=True) return json.dumps(dict(_status='OK', _action=result.get('status'), theme=description_file), cls=MongoJSONEncoder) class ThemesCommand(superdesk.Command): def run(self): theme_service = get_resource_service('themes') created, updated = theme_service.update_registered_theme_with_local_files( force=True) print('%d themes registered' % (len(created) + len(updated))) if created: print('added:') for theme in created: print('\t+ %s %s (%s)' % (theme.get( 'label', theme['name']), theme['version'], theme['name'])) if updated: print('updated:') for theme in updated: print('\t* %s %s (%s)' % (theme.get( 'label', theme['name']), theme['version'], theme['name'])) superdesk.command('register_local_themes', ThemesCommand())
option_list = { superdesk.Option('--provider', '-p', dest='provider'), } def run(self, provider=None): if provider: data = superdesk.json.loads(provider) data.setdefault('_created', utcnow()) data.setdefault('_updated', utcnow()) data.setdefault('name', data['type']) db = superdesk.get_db() db['ingest_providers'].save(data) return data superdesk.command('ingest:update', UpdateIngest()) superdesk.command('ingest:provider', AddProvider()) # load providers now to have available types for the schema import superdesk.io.reuters import superdesk.io.aap def init_app(app): IngestProviderModel(app=app) class IngestProviderModel(BaseModel): schema = { 'name': { 'type': 'string',
schema = blogs_schema datasource = {"source": "blogs", "default_sort": [("title", 1)]} resource_methods = ["GET"] class UserBlogsService(BaseService): def get(self, req, lookup): if lookup.get("user_id"): lookup["members.user"] = ObjectId(lookup["user_id"]) del lookup["user_id"] return super().get(req, lookup) class PublishBlogsCommand(superdesk.Command): """ Republish blogs on s3 with the right theme """ def run(self): # retrieves all opened blogs blogs_service = get_resource_service("blogs") blogs = blogs_service.get(req=None, lookup=dict(blog_status="open")) # republish on s3 print("\n* Republishing blogs:\n") for blog in blogs: url = publish_blog_embed_on_s3(blog_id=str(blog["_id"]), safe=False) print(' - Blog "%s" republished: %s' % (blog["title"], url)) superdesk.command("publish_blogs", PublishBlogsCommand())
try: print('Starting index rebuilding for index: {}'.format( index_name)) es = get_es(superdesk.app.config['ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) superdesk.app.data.elastic.create_index( clone_name, superdesk.app.config['ELASTICSEARCH_SETTINGS']) real_name = superdesk.app.data.elastic.get_index_by_alias( clone_name) print('Putting mapping for index: ', clone_name) superdesk.app.data.elastic.put_mapping(superdesk.app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index=real_name, name=index_name) print('Alias created.') print('Deleting clone name alias') get_indices(es).delete_alias(name=clone_name, index=real_name) print('Deleted clone name alias') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name)) superdesk.command('app:rebuild_elastic_index', RebuildElasticIndex())
updates = {config.LAST_UPDATED: utcnow()} if orig_item.get("retry_attempt", 0) < max_retry_attempt: updates["retry_attempt"] = orig_item.get("retry_attempt", 0) + 1 updates["state"] = QueueState.RETRYING.value updates["next_retry_attempt_at"] = utcnow() + timedelta(minutes=retry_attempt_delay) else: # all retry attempts exhausted marking the item as failed. updates["state"] = QueueState.FAILED.value publish_queue_service.system_update(orig_item.get(config.ID_FIELD), updates, orig_item) except: logger.error("Failed to set the state for failed publish queue item {}.".format(item_id)) logger.error("Failed to publish the following items: {}".format(failed_items.keys())) def can_transmit_queue_item(queue_item): """ Check if the queue item can be tranmitted or not :param dict queue_item: queue item :return boolean: True or False """ if queue_item.get("state") == QueueState.RETRYING: if not queue_item.get("next_retry_attempt_at") <= utcnow(): return False return True superdesk.command("publish:transmit", PublishContent())
def update_overdue_scheduled(self): """ Updates the overdue scheduled content on published collection. """ logger.info('Updating overdue scheduled content') if is_task_running("publish", "update_overdue_scheduled", UPDATE_OVERDUE_SCHEDULED_DEFAULT): return try: now = date_to_str(utcnow()) items = get_overdue_scheduled_items(now, 'published') for item in items: logger.info( 'updating overdue scheduled article with id {} and headline {} -- expired on: {} now: {}' .format(item[config.ID_FIELD], item['headline'], item['publish_schedule'], now)) superdesk.get_resource_service('published').\ update_published_items(item['item_id'], ITEM_STATE, CONTENT_STATE.PUBLISHED) finally: mark_task_as_not_running("publish", "update_overdue_scheduled") superdesk.command('publish:remove_overdue_scheduled', UpdateOverdueScheduledPublishedContent())
archived_service = superdesk.get_resource_service('archived') for item in items: articles_to_kill = archived_service.find_articles_to_kill({'_id': item[config.ID_FIELD]}, False) if not articles_to_kill: continue for article in articles_to_kill: archived_service.command_delete({'_id': article[config.ID_FIELD]}) print("Deleted item {} ".format(article[config.ID_FIELD])) def run(self, ids): if ids and len(ids) > 0: items = self.get_archived_items(ids) if not items: print("No archived story found with given ids(s)!") return if self.can_delete_items(items): self.delete(items) print("Delete has been completed") else: print("Please provide at least one id!") return superdesk.command('app:deleteArchivedDocument', DeleteArchivedDocumentCommand())
# Authenticate and fetch profile from AD settings = app.settings ad_auth = ADAuth(settings['LDAP_SERVER'], settings['LDAP_SERVER_PORT'], settings['LDAP_BASE_FILTER'], settings['LDAP_USER_FILTER'], settings['LDAP_USER_ATTRIBUTES'], settings['LDAP_FQDN']) user_data = ad_auth.authenticate_and_fetch_profile( ad_username, ad_password, username) if len(user_data) == 0: raise SuperdeskApiError.notFoundError('Username not found') # Check if User Profile already exists in Mongo user = superdesk.get_resource_service('users').find_one( req=None, **get_user_query(username)) if user: superdesk.get_resource_service('users').patch( user.get('_id'), user_data) else: add_default_values(user_data, username, user_type=user_type) superdesk.get_resource_service('users').post([user_data]) return user_data superdesk.command('users:copyfromad', ImportUserProfileFromADCommand())
"""Upsert the vocabularies into the vocabularies collections. The format of the file used is JSON. :param filepath: absolute filepath :return: nothing """ if not os.path.exists(filepath): raise FileNotFoundError [table_name, ext] = os.path.basename(filepath).split('.') with open(filepath, 'rt') as vocabularies: json_data = json.loads(vocabularies.read()) populate_table_json(table_name, json_data) class VocabulariesPopulateCommand(superdesk.Command): """ Class defining the populate vocabularies command. """ option_list = ( superdesk.Option('--filepath', '-f', dest='filepath', required=True), ) def run(self, filepath): process_vocabularies(filepath) superdesk.command('vocabularies:populate', VocabulariesPopulateCommand())
"the item {item_id} reached compliance end-of-life, but it cannot " "be corrected: {e}".format(item_id=item_id, e=e)) nb_failed += 1 continue else: try: insert_into_versions(item_id) except Exception as e: logger.error( "the item {item_id} could not be inserted into versions: {e}" .format(item_id=item_id, e=e)) # archive_service.update(item_id, updates, item) logger.info("item {item_id} reached compliance end-of-life, it has been corrected".format(item_id=item_id)) nb_corrected += 1 logger.info("{nb_corrected} article(s) have been corrected due to compliance eol reached".format( nb_corrected=nb_corrected)) if nb_failed > 0: logger.warning( "{nb_failed} article(s) could *NOT* be corrected despite reaching " "compliance end of life".format(nb_failed=nb_failed)) @celery.task(soft_time_limit=300) def eol_check(): ComplianceEOLCheck().run() superdesk.command('compliance:eol_check', ComplianceEOLCheck())
with open(local_filepath, 'wb') as file_in_local_storage: file_in_local_storage.write(zip_file.read(name)) extracted_files.append(local_filepath) # Save or update the theme in the database result = themes_service.save_or_update_theme(description_file, extracted_files, force_update=True) return json.dumps( dict( _status='OK', _action=result.get('status'), theme=description_file ), cls=MongoJSONEncoder) class ThemesCommand(superdesk.Command): def run(self): theme_service = get_resource_service('themes') created, updated = theme_service.update_registered_theme_with_local_files(force=True) print('%d themes registered' % (len(created) + len(updated))) if created: print('added:') for theme in created: print('\t+ %s %s (%s)' % (theme.get('label', theme['name']), theme['version'], theme['name'])) if updated: print('updated:') for theme in updated: print('\t* %s %s (%s)' % (theme.get('label', theme['name']), theme['version'], theme['name'])) superdesk.command('register_local_themes', ThemesCommand())
} } } } req = ParsedRequest() repos = 'archived' req.args = {'source': json.dumps(query), 'repo': repos} search_res = superdesk.get_resource_service('search') existing = search_res.get(req=req, lookup=None) existing_guids = [e['guid'] for e in existing] not_existing = [g for g in guids if g not in existing_guids] for missing_guid in not_existing: i = [m for m in items if m['guid'] == missing_guid] original = res.find_one(req=None, guid=i[0]['guid']) if not original: try: s = time.time() res.post(i) print( 'Post single item to Superdesk in {:.2f} seconds' .format(time.time() - s)) except Exception as ex: print('Exception posting single item') else: print('Exception posting batch') superdesk.command('app:import_text_archive', AppImportTextArchiveCommand())
if queue_item.get('publish_schedule'): publish_schedule = queue_item['publish_schedule'] if type(publish_schedule) is not datetime: raise PublishQueueError.bad_schedule_error(Exception("Schedule is not datetime"), destination) return utcnow() >= publish_schedule return True except PublishQueueError: raise except Exception as ex: raise PublishQueueError.bad_schedule_error(ex, destination) def update_content_state(queue_item): """ Updates the state of the content item to published In archive and published repos :param queue_item: :return: """ if queue_item.get('publish_schedule'): try: item_update = {'state': 'published'} superdesk.get_resource_service('archive').patch(queue_item['item_id'], item_update) superdesk.get_resource_service('published').\ update_published_items(queue_item['item_id'], 'state', 'published') except Exception as ex: raise PublishQueueError.content_update_error(ex) superdesk.command('publish:transmit', PublishContent())
req = ParsedRequest() req.sort = '[("%s", 1)]' % config.ID_FIELD cursor = service.get_from_mongo(req, {}) count = cursor.count() no_of_buckets = len(range(0, count, bucket_size)) water_mark = cursor[0][config.ID_FIELD] print('Number of items to index: {}, pages={}'.format( count, no_of_buckets)) for x in range(0, no_of_buckets): print('{} Page : {}'.format(time.strftime('%X %x %Z'), x + 1)) s = time.time() req = ParsedRequest() req.sort = '[("%s", 1)]' % config.ID_FIELD req.max_results = bucket_size if x == 0: lookup = {config.ID_FIELD: {'$gte': water_mark}} else: lookup = {config.ID_FIELD: {'$gt': water_mark}} cursor = service.get_from_mongo(req, lookup) items = list(cursor) water_mark = items[len(items) - 1][config.ID_FIELD] print('{} Retrieved from Mongo in {:.3f} seconds to {}'.format( time.strftime('%X %x %Z'), time.time() - s, water_mark)) yield items superdesk.command('app:index_from_mongo', IndexFromMongo())
public_methods = ['POST'] class PrepopulateService(BaseService): def create(self, docs, **kwargs): for doc in docs: if doc.get('remove_first'): drop_elastic(superdesk.app) drop_mongo(superdesk.app) user = get_resource_service('users').find_one(username=get_default_user()['username'], req=None) if not user: get_resource_service('users').post([get_default_user()]) prepopulate_data(doc.get('profile') + '.json', get_default_user()) return ['OK'] class AppPrepopulateCommand(superdesk.Command): option_list = [ superdesk.Option('--file', '-f', dest='prepopulate_file', default='app_prepopulate_data.json') ] def run(self, prepopulate_file): user = get_resource_service('users').find_one(username=get_default_user()['username'], req=None) if not user: get_resource_service('users').post([get_default_user()]) prepopulate_data(prepopulate_file, get_default_user()) superdesk.command('app:prepopulate', AppPrepopulateCommand())
updates = {config.LAST_UPDATED: utcnow()} if orig_item.get('retry_attempt', 0) < max_retry_attempt and \ not isinstance(e, PublishHTTPPushClientError): updates['retry_attempt'] = orig_item.get('retry_attempt', 0) + 1 updates['state'] = QueueState.RETRYING.value updates['next_retry_attempt_at'] = utcnow() + timedelta(minutes=retry_attempt_delay) else: # all retry attempts exhausted marking the item as failed. updates['state'] = QueueState.FAILED.value publish_queue_service.system_update(orig_item.get(config.ID_FIELD), updates, orig_item) except: logger.error('Failed to set the state for failed publish queue item {}.'.format(queue_item['_id'])) # Release the lock for the subscriber unlock(lock_name) def transmit_items(queue_items): # get a distinct list of the subscribers that have queued items subscribers = list(set([q['subscriber_id'] for q in queue_items])) # extract the queued items for each subscriber and transmit them for subscriber in subscribers: sub_queue_items = [item for item in queue_items if item['subscriber_id'] == subscriber] transmit_subscriber_items.apply_async(kwargs={'queue_items': sub_queue_items, 'subscriber': str(subscriber)}) superdesk.command('publish:transmit', PublishContent())
def process_validators(filepath): """ This function upserts the validators into the validators collections. The format of the file used is JSON. :param filepath: absolute filepath :return: nothing """ if not os.path.exists(filepath): raise FileNotFoundError with open(filepath, 'rt') as validators: json_data = json.loads(validators.read()) populate_validators(json_data) class ValidatorsPopulateCommand(superdesk.Command): """ Class defining the populate validators command. """ option_list = ( superdesk.Option('--filepath', '-f', dest='filepath', required=True), ) def run(self, filepath): process_validators(filepath) superdesk.command('validators:populate', ValidatorsPopulateCommand())
articles_to_kill = archived_service.find_articles_to_kill( {'_id': item[config.ID_FIELD]}, False) if not articles_to_kill: continue for article in articles_to_kill: archived_service.command_delete( {'_id': article[config.ID_FIELD]}) print("Deleted item {} ".format(article[config.ID_FIELD])) def run(self, ids): if ids and len(ids) > 0: items = self.get_archived_items(ids) if not items: print("No archived story found with given ids(s)!") return if self.can_delete_items(items): self.delete(items) print("Delete has been completed") else: print("Please provide at least one id!") return superdesk.command('app:deleteArchivedDocument', DeleteArchivedDocumentCommand())
def get_expired_items(self, now): query_filter = self.get_query_for_expired_items(now) req = ParsedRequest() req.max_results = 100 req.args = {'filter': query_filter} return superdesk.get_resource_service('archive').get(req, None) def get_query_for_expired_items(self, now): query = {'and': [ {'range': {'expiry': {'lte': now}}} ] } return superdesk.json.dumps(query) superdesk.command('archive:remove_expired', ArchiveRemoveExpiredContent()) superdesk.workflow_state('in_progress') superdesk.workflow_action( name='save', include_states=['draft', 'fetched', 'routed', 'submitted'], privileges=['archive'] ) superdesk.workflow_state('submitted') superdesk.workflow_action( name='move', exclude_states=['ingested', 'spiked', 'on-hold', 'published', 'killed'], privileges=['archive'] )
def enqueue_items(published_items): """ Creates the corresponding entries in the publish queue for each item :param list published_items: the list of items marked for publishing """ failed_items = {} current_utc = utcnow() for queue_item in published_items: try: schedule_utc_datetime = get_utc_schedule(queue_item, PUBLISH_SCHEDULE) if not schedule_utc_datetime or schedule_utc_datetime < current_utc: enqueue_item(queue_item) except: logger.exception('Failed to queue item {}'.format(queue_item.get('_id'))) failed_items[str(queue_item.get('_id'))] = queue_item # mark failed items as pending so that Celery tasks will try again if len(failed_items) > 0: logger.error('Failed to publish the following items: {}'.format(failed_items.keys())) superdesk.command('publish:enqueue', EnqueueContent()) @celery.task def enqueue_published(): EnqueueContent().run()
if profile is not None: content_types_service = get_resource_service("content_types") try: content_profile = content_types_service.find({ 'label': profile }).next() except StopIteration: print("Can't find content profile with this label") sys.exit(1) else: profile_id = content_profile['_id'] with open(path, 'rb') as f: buf = f.read() buf = buf.replace(b'\r', b' ') xml_parser = etree.XMLParser(recover=True) parsed = etree.fromstring(buf, xml_parser) articles = feed_parser.parse(parsed) updates = {ITEM_STATE: 'published'} if profile is not None: updates['profile'] = profile_id for article in articles: article.update(updates) article.setdefault('source', parser) archived_service = get_resource_service("archived") archived_service.post(articles) superdesk.command('xml:import', ImportCommand())
service = superdesk.get_resource_service(mongo_collection_name) req = ParsedRequest() req.sort = '[("%s", 1)]' % config.ID_FIELD cursor = service.get_from_mongo(req, {}) count = cursor.count() no_of_buckets = len(range(0, count, bucket_size)) water_mark = cursor[0][config.ID_FIELD] print('Number of items to index: {}, pages={}'.format(count, no_of_buckets)) for x in range(0, no_of_buckets): print('{} Page : {}'.format(time.strftime('%X %x %Z'), x + 1)) s = time.time() req = ParsedRequest() req.sort = '[("%s", 1)]' % config.ID_FIELD req.max_results = bucket_size if x == 0: lookup = {config.ID_FIELD: {'$gte': water_mark}} else: lookup = {config.ID_FIELD: {'$gt': water_mark}} cursor = service.get_from_mongo(req, lookup) items = list(cursor) water_mark = items[len(items) - 1][config.ID_FIELD] print('{} Retrieved from Mongo in {:.3f} seconds to {}'.format(time.strftime('%X %x %Z'), time.time() - s, water_mark)) yield items superdesk.command('app:index_from_mongo', IndexFromMongo())
def init_app(app): endpoint_name = 'freetypes' service = FreetypesService(endpoint_name, backend=superdesk.get_backend()) FreetypesResource(endpoint_name, app=app, service=service) superdesk.command('register_freetype', RegisterFreetypeCommand())
finally: mark_task_as_not_running("archive", "update_overdue_scheduled") def get_overdue_scheduled_items(expired_date_time, resource, limit=100): """ Fetches the overdue scheduled articles from given collection. Overdue Conditions: 1. it should be in 'scheduled' state 2. publish_schedule is less than or equal to expired_date_time :param expired_date_time: DateTime that scheduled tate will be checked against :param resource: Name of the resource to check the data from :param limit: Number of return items :return: overdue scheduled articles from published collection """ logger.info('Get overdue scheduled content from {}'.format(resource)) query = {'$and': [ {'publish_schedule': {'$lte': expired_date_time}}, {ITEM_STATE: CONTENT_STATE.SCHEDULED} ]} req = ParsedRequest() req.sort = '_modified' req.max_results = limit return superdesk.get_resource_service(resource).get_from_mongo(req=req, lookup=query) superdesk.command('archive:remove_spiked_if_expired', RemoveExpiredSpikeContent()) superdesk.command('archive:remove_overdue_scheduled', UpdateOverdueScheduledContent())
uri=uri) return item NEW_VERSION_IGNORE_FIELS = ("expiry", ) def is_new_version(item, old_item): # explicit version info for field in ("version", "versioncreated"): if item.get(field) and old_item.get(field): try: return int(item[field], 10) > int(old_item[field], 10) except (ValueError, TypeError): try: return item[field] > old_item[field] except TypeError as ex: logger.exception(ex) # can't compare the values, assuming these are different return True # no version info, check content for field in item: if field in NEW_VERSION_IGNORE_FIELS or item[field] is None: continue if not old_item.get(field) or item[field] != old_item[field]: return True return False superdesk.command("ingest:update", UpdateIngest())
@celery.task def session_purge(): try: RemoveExpiredSessions().run() except Exception as ex: logger.error(ex) def get_user(required=False): """Get user authenticated for current request. :param boolean required: if True and there is no user it will raise an error """ user = flask.g.get('user', {}) if config.ID_FIELD not in user and required: raise SuperdeskApiError.notFoundError('Invalid user.') return user def get_user_id(required=False): """Get authenticated user id. :param boolean requred: if True and there is no user it will raise an error """ user = get_user(required) return user.get(config.ID_FIELD) superdesk.command('session:gc', RemoveExpiredSessions())
superdesk.Option("--value", "-v", dest="value", required=True), superdesk.Option("--parseNeeded", "-p", dest="parseNeeded", default=False), ] def run(self, ids, field, value, parseNeeded=False): ids = ast.literal_eval(ids) if parseNeeded: try: value = json.loads(value) except Exception as e: print("Error in parsing the value: {}".format(value)) print(e) return if ids and len(ids) > 0: items = DeleteArchivedDocumentCommand().get_archived_items(ids) for item in items: superdesk.get_resource_service("archived").system_update( bson.ObjectId(item["_id"]), {field: value}, item) print("Archived item {} has been updated.".format(item["_id"])) print("-" * 45) superdesk.command("app:updateArchivedDocument", UpdateArchivedDocumentCommand())
# AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license import superdesk from superdesk.io.ingest_provider_model import INGEST_EXPIRY_MINUTES from superdesk.errors import ProviderError class AddProvider(superdesk.Command): """Add ingest provider.""" option_list = { superdesk.Option('--provider', '-p', dest='provider'), } def run(self, provider=None): if provider: try: data = {} data = superdesk.json.loads(provider) data.setdefault('name', data['type']) data.setdefault('source', data['type']) data.setdefault('content_expiry', INGEST_EXPIRY_MINUTES) db = superdesk.get_db() db['ingest_providers'].save(data) return data except Exception as ex: raise ProviderError.providerAddError(ex, data) superdesk.command('ingest:provider', AddProvider())
crt_index = list(index) if isinstance(index, list) else index options = crt_index.pop() if isinstance( crt_index[-1], dict) and isinstance(index, list) else {} collection = app.data.mongo.pymongo( resource=entity_name).db[entity_name] options['background'] = True index_name = collection.create_index(crt_index, **options) logger.info( ' - index: %s for collection %s created successfully.', index_name, entity_name) def fillEnvironmentVariables(item): variables = {} text = json.dumps(item) for variable in re.findall('#ENV_([^#"]+)#', text): value = os.environ.get(variable, None) if not value: return None else: variables[variable] = value for name in variables: text = text.replace('#ENV_%s#' % name, variables[name]) return json.loads(text) superdesk.command('app:initialize_data', AppInitializeWithDataCommand())
class RebuildElasticIndex(superdesk.Command): """ Rebuild the elastic index from existing data by creating a new index with the same alias as the configured index, puts the new mapping and delete the old index. """ def run(self): index_name = superdesk.app.config['ELASTICSEARCH_INDEX'] print('Starting index rebuilding for index: ', index_name) try: es = get_es(superdesk.app.config['ELASTICSEARCH_URL']) clone_name = index_name + '-' + get_random_string() print('Creating index: ', clone_name) superdesk.app.data.elastic.create_index(clone_name, superdesk.app.config['ELASTICSEARCH_SETTINGS']) print('Putting mapping for index: ', clone_name) superdesk.app.data.elastic.put_mapping(superdesk.app, clone_name) print('Starting index rebuilding.') reindex(es, index_name, clone_name) print('Finished index rebuilding.') print('Deleting index: ', index_name) get_indices(es).delete(index_name) print('Creating alias: ', index_name) get_indices(es).put_alias(index=clone_name, name=index_name) print('Alias created.') except elasticsearch.exceptions.NotFoundError as nfe: print(nfe) print('Index {0} rebuilt successfully.'.format(index_name)) superdesk.command('app:rebuild_elastic_index', RebuildElasticIndex())
# if the feed is versioned and this is not a new version if 'version' in item and 'version' in old_item and item.get( 'version') == old_item.get('version'): new_version = False else: if item.get('ingest_provider_sequence') is None: ingest_service.set_ingest_provider_sequence(item, provider) try: items_ids.extend(ingest_service.post_in_mongo([item])) except HTTPException as e: logger.error( 'Exception while persisting item in %s collection: %s', ingest_collection, e) raise e if routing_scheme and new_version: routed = ingest_service.find_one( _id=item[superdesk.config.ID_FIELD], req=None) superdesk.get_resource_service( 'routing_schemes').apply_routing_scheme( routed, provider, routing_scheme) except Exception as ex: logger.exception(ex) ProviderError.ingestItemError(ex, provider, item=item) return False, [] return True, items_ids superdesk.command('ingest:update', UpdateIngest())
:param ad_password: Password of Active Directory Username :param username: Username as in Active Directory whose profile needs to be imported to Superdesk. :return: User Profile. """ # force type conversion to boolean user_type = 'administrator' if admin is not None and admin.lower() == 'true' else 'user' # Authenticate and fetch profile from AD settings = app.settings ad_auth = ADAuth(settings['LDAP_SERVER'], settings['LDAP_SERVER_PORT'], settings['LDAP_BASE_FILTER'], settings['LDAP_USER_FILTER'], settings['LDAP_USER_ATTRIBUTES'], settings['LDAP_FQDN']) user_data = ad_auth.authenticate_and_fetch_profile(ad_username, ad_password, username) if len(user_data) == 0: raise SuperdeskApiError.notFoundError('Username not found') # Check if User Profile already exists in Mongo user = superdesk.get_resource_service('users').find_one(username=username, req=None) if user: superdesk.get_resource_service('users').patch(user.get('_id'), user_data) else: add_default_values(user_data, username, user_type=user_type) superdesk.get_resource_service('users').post([user_data]) return user_data superdesk.command('users:copyfromad', ImportUserProfileFromADCommand())
"""Gets auth token. Generate an authorization token to be able to authenticate against the REST api without starting the client the copy the authorization header. """ option_list = (superdesk.Option('--username', '-u', dest='username', required=True), superdesk.Option('--password', '-p', dest='password', required=True)) def run(self, username, password): credentials = {'username': username, 'password': password} service = superdesk.get_resource_service('auth') id = str(service.post([credentials])[0]) print('Session ID:', id) creds = service.find_one(req=None, _id=id) token = creds.get('token').encode('ascii') encoded_token = b'basic ' + b64encode(token + b':') print('Generated token: ', encoded_token) return encoded_token superdesk.command('users:create', CreateUserCommand()) superdesk.command('users:hash_passwords', HashUserPasswordsCommand()) superdesk.command('users:get_auth_token', GetAuthTokenCommand())
if app.config.get('LEGAL_ARCHIVE'): legal_archive_items = superdesk.get_resource_service('legal_archive').get_from_mongo(None, query) self.__add_existing_files(used_images, legal_archive_items) legal_archive_version_items = superdesk.get_resource_service('legal_archive_versions').\ get_from_mongo(None, query) self.__add_existing_files(used_images, legal_archive_version_items) print('Number of used files: ', len(used_images)) superdesk.app.media.remove_unreferenced_files(used_images) def __add_existing_files(self, used_images, items): for item in items: if 'media' in item: used_images.add(str(item['media'])) if item.get('renditions', {}): used_images.update([str(rend.get('media')) for rend in item.get('renditions', {}).values() if rend.get('media')]) associations = [assoc.get('renditions') for assoc in (item.get(ASSOCIATIONS) or {}).values() if assoc and assoc.get('renditions')] for renditions in associations: used_images.update([str(rend.get('media')) for rend in renditions.values() if rend.get('media')]) superdesk.command('app:clean_images', CleanImages())
logger.info('Starting to fullfill assignments.') lock_name = get_lock_id('planning', 'fulfill_assignments') if not lock(lock_name, expire=610): logger.info('{} Fulfill Assignments task is already running') return # Get a list of the outstanding photo assignments assignments = list(self._get_outstanding_photo_assignments()) # query for any images available from the image site API with those assigment id's completed_assignments = self._check_complete(assignments) self._mark_as_complete(completed_assignments) complete = [ c.get('assignment').get('_id') for c in completed_assignments ] # check if any of the outstanding assignments are in either the picedit or aapimage pools in_progress_assignments = self._check_in_progress( assignments, complete) self._mark_as_in_progress(in_progress_assignments) unlock(lock_name) logger.info('Finished fulfilling assignments') superdesk.command('app:fullfill_image_assignments', FullfillImageAssignments())
class UpdateIngest(superdesk.Command): """Update ingest providers.""" option_list = (superdesk.Option("--provider", "-p", dest="provider_type"),) def run(self, provider_type=None): for provider in app.data.find_all("ingest_providers"): if not provider_type or provider_type == provider.get("type"): try: update_provider.delay(provider) except (Exception) as err: logger.exception(err) pass superdesk.command("ingest:update", UpdateIngest()) @celery.task() def update_provider(provider): """Update given provider.""" if provider.get("type") in providers: for items in providers[provider.get("type")].update(provider): ingest_items(provider, items) push_notification("ingest:update") def ingest_items(provider, items): start = utcnow() ingested_count = provider.get("ingested_count", 0) for item in items:
import superdesk from superdesk.io.ingest_provider_model import DAYS_TO_KEEP class AddProvider(superdesk.Command): """Add ingest provider.""" option_list = { superdesk.Option('--provider', '-p', dest='provider'), } def run(self, provider=None): if provider: data = superdesk.json.loads(provider) data.setdefault('name', data['type']) data.setdefault('source', data['type']) data.setdefault('days_to_keep', DAYS_TO_KEEP) db = superdesk.get_db() db['ingest_providers'].save(data) return data superdesk.command('ingest:provider', AddProvider())
superdesk.Option('--provider', '-p', dest='provider_type'), ) def run(self, provider_type=None): for provider in app.data.find_all('ingest_providers'): if not provider_type or provider_type == provider.get('type'): try: remove_expired_data(provider) except (Exception) as err: logger.exception(err) pass finally: push_notification('ingest:cleaned') superdesk.command('ingest:clean_expired', RemoveExpiredContent()) def remove_expired_data(provider): """Remove expired data for provider""" print('Removing expired content for provider: %s' % provider['_id']) days_to_keep_content = provider.get('days_to_keep', DAYS_TO_KEEP) expiration_date = utcnow() - timedelta(days=days_to_keep_content) items = get_expired_items(str(provider['_id']), expiration_date) while items.count() > 0: for item in items: print('Removing item %s' % item['_id']) app.data.remove('ingest', {'_id': str(item['_id'])}) items = get_expired_items(str(provider['_id']), expiration_date)
if not lock(lock_name, expire=300): return try: remove_expired_data(provider) push_notification("ingest:cleaned") except Exception as err: logger.exception(err) raise ProviderError.expiredContentError(err, provider) finally: unlock(lock_name) superdesk.command("ingest:clean_expired", RemoveExpiredContent()) def remove_expired_data(provider): """Remove expired data for provider""" logger.info("Removing expired content for provider: %s" % provider.get("_id", "Detached items")) try: feeding_service = get_feeding_service(provider["feeding_service"]) ingest_collection = feeding_service.service if hasattr( feeding_service, "service") else "ingest" except KeyError: ingest_collection = "ingest" ingest_service = superdesk.get_resource_service(ingest_collection)
] def run(self, republish): # update themes theme_service = get_resource_service('themes') created, updated = theme_service.update_registered_theme_with_local_files() print('\n* %d themes updated from local files\n' % (len(created) + len(updated))) # retrieves all opened blogs blogs_service = get_resource_service('blogs') blogs = blogs_service.get(req=None, lookup=dict(blog_status='open')) print('* Update the theme for every blog\n') for blog in blogs: theme = blogs_service.get_theme_snapshot(blog['blog_preferences']['theme']) try: blogs_service.system_update(ObjectId(blog['_id']), {'theme': theme}, blog) except eve.io.base.DataLayer.OriginalChangedError: print(u'! an error occured during saving blog "%s".' % (blog['title']), 'Can be a broken relationship (with user for instance)') else: print('- Blog "%s"\'s theme was updated to %s %s' % ( blog['title'], theme['name'], theme['version'])) # republish on s3 if republish: print('\n* Republishing blogs:\n') for blog in blogs: url = publish_blog_embed_on_s3(blog_id=str(blog['_id']), safe=False) print(' - Blog "%s" republished: %s' % (blog['title'], url)) superdesk.command('update_blogs_themes', UpdateThemesBlogsCommand())
lookup = { '$and': [ self.not_item_entry_query, { '_updated': { '$lte': date_to_str(self.expiry) } } ] } if current_id: lookup['$and'].append({'_id': {'$gt': current_id}}) req = ParsedRequest() req.sort = '[("_id", 1)]' req.projection = '{"_id": 1}' req.max_results = 1000 audits = service.get_from_mongo(req=req, lookup=lookup) items = list(item.get('_id') for item in audits) if len(items) == 0: logger.info( 'Finished purging audit logs of none content items at {}'. format(utcnow())) return logger.info('Found {} audit items at {}'.format( len(items), utcnow())) current_id = items[len(items) - 1] logger.info('Deleting {} old audit items'.format(len(items))) service.delete_ids_from_mongo(items) superdesk.command('audit:purge', PurgeAudit())
logger.info(' - file imported successfully: %s', file_name) if index_params: for index in index_params: crt_index = list(index) if isinstance(index, list) else index options = crt_index.pop() if isinstance(crt_index[-1], dict) and isinstance(index, list) else {} collection = app.data.mongo.pymongo(resource=entity_name).db[entity_name] options['background'] = True index_name = collection.create_index(crt_index, **options) logger.info(' - index: %s for collection %s created successfully.', index_name, entity_name) def fillEnvironmentVariables(item): variables = {} text = json.dumps(item) for variable in re.findall('#ENV_([^#"]+)#', text): value = os.environ.get(variable, None) if not value: return None else: variables[variable] = value for name in variables: text = text.replace('#ENV_%s#' % name, variables[name]) return json.loads(text) superdesk.command('app:initialize_data', AppInitializeWithDataCommand())
for doc in docs: if doc.get('remove_first'): drop_elastic(superdesk.app) drop_mongo(superdesk.app) user = get_resource_service('users').find_one( username=get_default_user()['username'], req=None) if not user: get_resource_service('users').post([get_default_user()]) prepopulate_data(doc.get('profile') + '.json', get_default_user()) return ['OK'] class AppPrepopulateCommand(superdesk.Command): option_list = [ superdesk.Option('--file', '-f', dest='prepopulate_file', default='app_prepopulate_data.json') ] def run(self, prepopulate_file): user = get_resource_service('users').find_one( username=get_default_user()['username'], req=None) if not user: get_resource_service('users').post([get_default_user()]) prepopulate_data(prepopulate_file, get_default_user()) superdesk.command('app:prepopulate', AppPrepopulateCommand())
def get_app(config=None): """App factory. :param config: configuration that can override config from `settings.py` :return: a new SuperdeskEve app instance """ if config is None: config = {} config['APP_ABSPATH'] = os.path.abspath(os.path.dirname(__file__)) config['CONTENT_STATE'] = 'state' for key in dir(settings): if key.isupper(): config.setdefault(key, getattr(settings, key)) media_storage = SuperdeskGridFSMediaStorage if config['AMAZON_CONTAINER_NAME']: from superdesk.storage.amazon.amazon_media_storage import AmazonMediaStorage from superdesk.storage.amazon.import_from_amazon import ImportFromAmazonCommand media_storage = AmazonMediaStorage superdesk.command('import:amazon', ImportFromAmazonCommand()) config['DOMAIN'] = {} app = eve.Eve( data=superdesk.SuperdeskDataLayer, auth=TokenAuth, media=media_storage, settings=config, json_encoder=MongoJSONEncoder, validator=SuperdeskValidator) superdesk.app = app custom_loader = jinja2.ChoiceLoader([ app.jinja_loader, jinja2.FileSystemLoader(['superdesk/templates']) ]) app.jinja_loader = custom_loader # cache app.cache = Cache(app, config={'CACHE_TYPE': 'simple'}) app.blog_cache = BlogCache(cache=app.cache) # mail app.mail = Mail(app) @app.errorhandler(SuperdeskError) def client_error_handler(error): """Return json error response. :param error: an instance of :attr:`superdesk.SuperdeskError` class """ return send_response(None, (error.to_dict(), None, None, error.status_code)) @app.errorhandler(500) def server_error_handler(error): """Log server errors.""" app.sentry.captureException() logger.exception(error) return_error = SuperdeskApiError.internalError() return client_error_handler(return_error) init_celery(app) for module_name in app.config['INSTALLED_APPS']: app_module = importlib.import_module(module_name) try: app_module.init_app(app) except AttributeError: pass for resource in superdesk.DOMAIN: app.register_resource(resource, superdesk.DOMAIN[resource]) for blueprint in superdesk.BLUEPRINTS: prefix = app.api_prefix or None app.register_blueprint(blueprint, url_prefix=prefix) # s3 s3.init_app(app) # embed feature app.register_blueprint(embed_blueprint) # we can only put mapping when all resources are registered app.data.elastic.put_mapping(app) app.sentry = sentry sentry.init_app(app) return app
# get all the lookup = { '$and': [{ 'item_id': { '$in': list(items_to_expire.keys()) } }, { 'moved_to_legal': False }] } items_not_moved_to_legal = list( get_resource_service('publish_queue').get(req=None, lookup=lookup)) if len(items_not_moved_to_legal) > 0: publish_queue_items = set( [item.get('item_id') for item in items_not_moved_to_legal]) items_not_moved.update({ item_id: items_to_expire[item_id] for item_id in publish_queue_items }) logger.warning( '{} Items are not moved to legal publish queue {}.'.format( self.log_msg, publish_queue_items)) return items_not_moved superdesk.command('archive:remove_expired', RemoveExpiredContent())