def handle_get(self): while True: deleted_something = False for library_key in Library.query().fetch(keys_only=True, limit=10): delete_library(library_key, response_for_logging=self.response) deleted_something = True for author_key in Author.query().fetch(keys_only=True, limit=10): delete_author(author_key, response_for_logging=self.response) deleted_something = True if not deleted_something: break # Delete any remaining entries in the search index. index = search.Index('repo') while True: docs = [ document.doc_id for document in index.get_range(ids_only=True)] if not docs: break self.response.write('search docs: %s\n' + repr(docs)) index.delete(docs) self.response.write('Finished')
def handle_get(self): keys = (Library.query() .filter(Library.kind == 'element') # pylint: disable=singleton-comparison .filter(Library.shallow_ingestion == False) .filter(Library.status == Status.ready) .fetch(keys_only=True, read_policy=ndb.EVENTUAL_CONSISTENCY)) elements = Sitemap(id='elements') elements.pages = [key.id() for key in keys] elements.put() logging.info('%d elements', len(elements.pages)) keys = (Library.query() .filter(Library.kind == 'collection') # pylint: disable=singleton-comparison .filter(Library.shallow_ingestion == False) .filter(Library.status == Status.ready) .fetch(keys_only=True, read_policy=ndb.EVENTUAL_CONSISTENCY)) collections = Sitemap(id='collections') collections.pages = [key.id() for key in keys] collections.put() logging.info('%d collections', len(collections.pages)) keys = Author.query().fetch(keys_only=True, read_policy=ndb.EVENTUAL_CONSISTENCY) authors = Sitemap(id='authors') authors.pages = [key.id() for key in keys] authors.put() logging.info('%d authors', len(authors.pages))
def handle_get(self): queue = taskqueue.Queue('update') if queue.fetch_statistics().tasks > 0: self.response.write('update already in progress') return query = Library.query() cursor = None more = True task_count = 0 while more: keys, cursor, more = query.fetch_page(50, keys_only=True, start_cursor=cursor) for key in keys: task_count = task_count + 1 task_url = util.update_library_task(key.id()) util.new_task(task_url, target='manage', queue_name='update') logging.info('triggered %d library updates', task_count) query = Author.query() cursor = None more = True task_count = 0 while more: keys, cursor, more = query.fetch_page(50, keys_only=True, start_cursor=cursor) for key in keys: task_count = task_count + 1 task_url = util.update_author_task(key.id()) util.new_task(task_url, target='manage', queue_name='update') logging.info('triggered %d author updates', task_count)
def handle_get(self): while True: deleted_something = False for library_key in Library.query().fetch(keys_only=True, limit=10): delete_library(library_key, response_for_logging=self.response) deleted_something = True for author_key in Author.query().fetch(keys_only=True, limit=10): delete_author(author_key, response_for_logging=self.response) deleted_something = True if not deleted_something: break # Delete any remaining entries in the search index. index = search.Index('repo') while True: docs = [ document.doc_id for document in index.get_range(ids_only=True) ] if not docs: break self.response.write('search docs: %s\n' + repr(docs)) index.delete(docs) self.response.write('Finished')
def handle_get(self): keys = ( Library.query().filter(Library.kind == 'element') # pylint: disable=singleton-comparison .filter(Library.shallow_ingestion == False).filter( Library.status == Status.ready).fetch( keys_only=True, read_policy=ndb.EVENTUAL_CONSISTENCY)) elements = Sitemap(id='elements') elements.pages = [key.id() for key in keys] elements.put() logging.info('%d elements', len(elements.pages)) keys = ( Library.query().filter(Library.kind == 'collection') # pylint: disable=singleton-comparison .filter(Library.shallow_ingestion == False).filter( Library.status == Status.ready).fetch( keys_only=True, read_policy=ndb.EVENTUAL_CONSISTENCY)) collections = Sitemap(id='collections') collections.pages = [key.id() for key in keys] collections.put() logging.info('%d collections', len(collections.pages)) keys = Author.query().fetch(keys_only=True, read_policy=ndb.EVENTUAL_CONSISTENCY) authors = Sitemap(id='authors') authors.pages = [key.id() for key in keys] authors.put() logging.info('%d authors', len(authors.pages))