def handle(self, *args, **options): # this is overridden so we can also do the trick on couchapps for app in get_apps(): couchdbkit_handler.copy_designs(app, temp='tmp', verbosity=2) for plugin in get_preindex_plugins(): plugin.copy_designs(temp='tmp')
def handle(self, **options): for preindex_plugin in get_preindex_plugins(): plugin_class_name = preindex_plugin.__class__.__name__ if plugin_class_name == 'DefaultPreindexPlugin': print("Syncing design docs for {}".format(preindex_plugin.app_label)) else: print("Syncing design docs for {} (using {})".format( preindex_plugin.app_label, plugin_class_name)) preindex_plugin.sync_design_docs()
def handle(self, *args, **options): start = datetime.utcnow() if len(args) == 0: num_pool = POOL_SIZE else: num_pool = int(args[0]) if len(args) > 1: username = args[1] else: username = '******' no_email = options['no_mail'] pool = Pool(num_pool) apps = get_apps() completed = set() app_ids = set(range(len(apps))) for app_id in sorted(app_ids.difference(completed)): # keep trying all the preindexes # until they all complete satisfactorily. print "Trying to preindex view (%d/%d) %s" % ( app_id, len(apps), apps[app_id]) pool.spawn(do_sync, app_id) for plugin in get_preindex_plugins(): print "Custom preindex for plugin %s" % ( plugin.app_label ) pool.spawn(plugin.sync_design_docs, temp='tmp') print "All apps loaded into jobs, waiting..." pool.join() # reraise any error for greenlet in pool.greenlets: greenlet.get() print "All apps reported complete." message = "Preindex results:\n" message += "\tInitiated by: %s\n" % username delta = datetime.utcnow() - start message += "Total time: %d seconds" % delta.seconds if not no_email: print message send_mail( '%s CouchDB Preindex Complete' % settings.EMAIL_SUBJECT_PREFIX, message, settings.SERVER_EMAIL, [x[1] for x in settings.ADMINS], fail_silently=True, )
def handle(self, *args, **options): start = datetime.utcnow() if len(args) == 0: num_pool = POOL_SIZE else: num_pool = int(args[0]) if len(args) > 1: username = args[1] else: username = '******' no_email = options['no_mail'] pool = Pool(num_pool) apps = get_apps() completed = set() app_ids = set(range(len(apps))) for app_id in sorted(app_ids.difference(completed)): # keep trying all the preindexes # until they all complete satisfactorily. print "Trying to preindex view (%d/%d) %s" % (app_id, len(apps), apps[app_id]) pool.spawn(do_sync, app_id) for plugin in get_preindex_plugins(): print "Custom preindex for plugin %s" % (plugin.app_label) pool.spawn(plugin.sync_design_docs, temp='tmp') print "All apps loaded into jobs, waiting..." pool.join() # reraise any error for greenlet in pool.greenlets: greenlet.get() print "All apps reported complete." message = "Preindex results:\n" message += "\tInitiated by: %s\n" % username delta = datetime.utcnow() - start message += "Total time: %d seconds" % delta.seconds if not no_email: print message send_mail( '%s CouchDB Preindex Complete' % settings.EMAIL_SUBJECT_PREFIX, message, settings.SERVER_EMAIL, [x[1] for x in settings.ADMINS], fail_silently=True, )
def handle(self, **options): # build a data structure indexing databases to relevant design docs db_label_map = defaultdict(lambda: set()) # pull design docs from preindex plugins plugins = get_preindex_plugins() for plugin in plugins: for design in plugin.get_designs(): if design.design_path: db_label_map[design.db.uri].add(design.app_label) designs_to_delete = {} for db_uri in db_label_map: db = Database(db_uri) expected_designs = db_label_map[db_uri] design_docs = get_design_docs(db) found_designs = set(dd.name for dd in design_docs) to_delete = found_designs - expected_designs if to_delete: designs_to_delete[db] = [ ddoc._doc for ddoc in design_docs if ddoc.name in to_delete ] print('\ndeleting from {}:\n---------------------'.format( db.dbname)) print('\n'.join(sorted(to_delete))) if designs_to_delete: if options['noinput'] or input('\n'.join([ '\n\nReally delete all the above design docs?', 'If any of these views are actually live, bad things will happen. ' '(Type "delete designs" to continue):', '', ])).lower() == 'delete designs': for db, design_docs in designs_to_delete.items(): for design_doc in design_docs: # If we don't delete conflicts, then they take the place of the # document when it's deleted. (That's how couch works.) # This results in a huge reindex for an old conflicted version # of a design doc we don't even want anymore. delete_conflicts(db, design_doc['_id']) db.delete_docs(design_docs) else: print('aborted!') else: print('database already completely pruned!')
def handle(self, *args, **options): # build a data structure indexing databases to relevant design docs db_label_map = defaultdict(lambda: set()) # pull design docs from normal couchbkit apps app_infos = [get_app_sync_info(app) for app in get_apps()] for info in app_infos: for design in info.designs: if design.design_path: db_label_map[design.db.uri].add(design.app_label) # pull design docs from preindex plugins plugins = get_preindex_plugins() for plugin in plugins: for design in plugin.get_designs(): if design.design_path: db_label_map[design.db.uri].add(design.app_label) designs_to_delete = {} for db_uri in db_label_map: db = Database(db_uri) expected_designs = db_label_map[db_uri] design_docs = get_design_docs(db) found_designs = set(dd.name for dd in design_docs) to_delete = found_designs - expected_designs if to_delete: designs_to_delete[db] = [ddoc._doc for ddoc in design_docs if ddoc.name in to_delete] print '\ndeleting from {}:\n---------------------'.format(db.dbname) print '\n'.join(sorted(to_delete)) if designs_to_delete: if options['noinput'] or raw_input('\n'.join([ '\n\nReally delete all the above design docs?', 'If any of these views are actually live, bad things will happen. ' '(Type "delete designs" to continue):', '', ])).lower() == 'delete designs': for db, design_docs in designs_to_delete.items(): db.delete_docs(design_docs) else: print 'aborted!' else: print 'database already completely pruned!'
def handle(self, *args, **options): # build a data structure indexing databases to relevant design docs db_label_map = defaultdict(lambda: set()) # pull design docs from preindex plugins plugins = get_preindex_plugins() for plugin in plugins: for design in plugin.get_designs(): if design.design_path: db_label_map[design.db.uri].add(design.app_label) designs_to_delete = {} for db_uri in db_label_map: db = Database(db_uri) expected_designs = db_label_map[db_uri] design_docs = get_design_docs(db) found_designs = set(dd.name for dd in design_docs) to_delete = found_designs - expected_designs if to_delete: designs_to_delete[db] = [ddoc._doc for ddoc in design_docs if ddoc.name in to_delete] print '\ndeleting from {}:\n---------------------'.format(db.dbname) print '\n'.join(sorted(to_delete)) if designs_to_delete: if options['noinput'] or raw_input('\n'.join([ '\n\nReally delete all the above design docs?', 'If any of these views are actually live, bad things will happen. ' '(Type "delete designs" to continue):', '', ])).lower() == 'delete designs': for db, design_docs in designs_to_delete.items(): for design_doc in design_docs: # If we don't delete conflicts, then they take the place of the # document when it's deleted. (That's how couch works.) # This results in a huge reindex for an old conflicted version # of a design doc we don't even want anymore. delete_conflicts(db, design_doc['_id']) db.delete_docs(design_docs) else: print 'aborted!' else: print 'database already completely pruned!'
def handle(self, *args, **options): for plugin in get_preindex_plugins(): print "Copying design docs for plugin {}".format(plugin.app_label) plugin.copy_designs(temp='tmp')