def command(cls, config_ini, cache_dir, delete_files): common.load_config(config_ini) common.register_translator() #rev = model.repo.new_revision() #rev.author = 'fix_secondary_theme.py' files_to_delete = [] count = 0 for f in glob.iglob(os.path.join(cache_dir, '*/*/*')): a = model.Session.query(Archival).filter( Archival.cache_filepath == f).first() size = os.path.getsize(f) if a == None: print stats.add('Not in archival table', f.decode('utf8'), size) files_to_delete.append(f) else: res = model.Resource.get(a.resource_id) if not res: print stats.add('No matching resouce', f.decode('utf8'), size) files_to_delete.append(f) elif res.state == 'deleted': print stats.add('Resource is deleted', f.decode('utf8'), size) files_to_delete.append(f) else: pkg = res.resource_group.package if pkg.state == 'deleted': print stats.add('Package is deleted', f.decode('utf8'), size) files_to_delete.append(f) else: stats.add('OK', f.decode('utf8'), size) count += 1 if count % 250 == 0: print '\n\nProgress after %s:' % count print stats.report() print stats.report() if delete_files: print 'Deleting %s files' % len(files_to_delete) for f in files_to_delete: try: os.unlink(f) except OSError: print stats.add('ERROR Deleting', f.decode('utf8'), 0) print 'Done'
def command(cls, config_ini, cache_dir, delete_files): common.load_config(config_ini) common.register_translator() #rev = model.repo.new_revision() #rev.author = 'fix_secondary_theme.py' files_to_delete = [] count = 0 for f in glob.iglob(os.path.join(cache_dir, '*/*/*')): a = model.Session.query(Archival).filter(Archival.cache_filepath==f).first() size = os.path.getsize(f) if a == None: print stats.add('Not in archival table', f.decode('utf8'), size) files_to_delete.append(f) else: res = model.Resource.get(a.resource_id) if not res: print stats.add('No matching resouce', f.decode('utf8'), size) files_to_delete.append(f) elif res.state == 'deleted': print stats.add('Resource is deleted', f.decode('utf8'), size) files_to_delete.append(f) else: pkg = res.resource_group.package if pkg.state == 'deleted': print stats.add('Package is deleted', f.decode('utf8'), size) files_to_delete.append(f) else: stats.add('OK', f.decode('utf8'), size) count += 1 if count % 250 == 0: print '\n\nProgress after %s:' % count print stats.report() print stats.report() if delete_files: print 'Deleting %s files' % len(files_to_delete) for f in files_to_delete: try: os.unlink(f) except OSError: print stats.add('ERROR Deleting', f.decode('utf8'), 0) print 'Done'
def fix(config_ini, write): common.load_config(config_ini) common.register_translator() if write: rev = model.repo.new_revision() rev.author = 'script-' + __file__ # rev.author "script-" prefix stops the changes appearing in the # publisher_activity report packages = model.Session.query(model.Package) \ .filter_by(state='active') \ .join(model.PackageExtra) \ .filter_by(state='active') \ .filter(or_(model.PackageExtra.key == 'theme-secondary', model.PackageExtra.key == 'themes-secondary'))\ .all() for pkg in packages: withs = pkg.extras.get('themes-secondary') withouts = pkg.extras.get('theme-secondary') if withs and not withouts: print stats.add('Dropped the s', pkg.name) if write: pkg.extras['theme-secondary'] = withs del pkg.extras['themes-secondary'] elif withouts and not withs: print stats.add('Already without an s', pkg.name) elif withouts and withs: print stats.add('Merged the two', pkg.name) withouts = json.loads(withouts) if isinstance(withouts, basestring): withouts = [withouts] withs = set(json.loads(withs)) mix = json.dumps(withouts + list((withs - set(withouts)))) if write: pkg.extras['theme-secondary'] = mix del pkg.extras['themes-secondary'] if write: model.Session.commit() print stats.report()
def command(cls, config_ini, cache_dir, delete): common.load_config(config_ini) common.register_translator() #rev = model.repo.new_revision() #rev.author = 'fix_secondary_theme.py' no_archival = [] deleted_res = [] for f in glob.glob(os.path.join(cache_dir, '*/*/*')): a = model.Session.query(Archival).filter( Archival.cache_filepath == f).first() if a == None: stats.add('No archival', f.decode('utf8')) no_archival.append(f) else: res = model.Resource.get(a.resource_id) if res.state == 'deleted': stats.add('Deleted Resouce', f.decode('utf8')) deleted_res.append(f) else: stats.add('OK', f.decode('utf8')) if delete: for f in chain(deleted_res, no_archival): try: os.unlink(f) except OSError: stats.add('Error Deleting', f.decode('utf8')) #with open('no-archival.txt', 'w') as outfile: # for f in no_archival: # outfile.write("%s\n" % f) #with open('deleted-res.txt', 'w') as outfile: # for f in deleted_res: # outfile.write("%s\n" % f) print stats.report()
def command(cls, config_ini, package_zip_dir, delete_files): common.load_config(config_ini) common.register_translator() files_to_delete = [] count = 0 for f in glob.iglob(os.path.join(package_zip_dir, '*')): a = model.Session.query(PackageZip) \ .filter(PackageZip.filepath==f) \ .first() size = os.path.getsize(f) if a == None: print stats.add('Not in packagezip table', f.decode('utf8'), size) files_to_delete.append(f) else: pkg = model.Package.get(a.package_id) if pkg.state == 'deleted': print stats.add('Package is deleted', f.decode('utf8'), size) files_to_delete.append(f) else: stats.add('OK', f.decode('utf8'), size) count += 1 if count % 250 == 0: print '\n\nProgress after %s:' % count print stats.report() print stats.report() if delete_files: print 'Deleting %s files' % len(files_to_delete) for f in files_to_delete: try: os.unlink(f) except OSError: print stats.add('ERROR Deleting', f.decode('utf8'), 0) print 'Done'
def command(cls, config_ini, cache_dir, delete): common.load_config(config_ini) common.register_translator() #rev = model.repo.new_revision() #rev.author = 'fix_secondary_theme.py' no_archival = [] deleted_res = [] for f in glob.glob(os.path.join(cache_dir, '*/*/*')): a = model.Session.query(Archival).filter(Archival.cache_filepath==f).first() if a == None: stats.add('No archival', f.decode('utf8')) no_archival.append(f) else: res = model.Resource.get(a.resource_id) if res.state == 'deleted': stats.add('Deleted Resouce', f.decode('utf8')) deleted_res.append(f) else: stats.add('OK', f.decode('utf8')) if delete: for f in chain(deleted_res, no_archival): try: os.unlink(f) except OSError: stats.add('Error Deleting', f.decode('utf8')) #with open('no-archival.txt', 'w') as outfile: # for f in no_archival: # outfile.write("%s\n" % f) #with open('deleted-res.txt', 'w') as outfile: # for f in deleted_res: # outfile.write("%s\n" % f) print stats.report()
warnings = [] log = None def warn(msg, *params): global warnings warnings.append(msg % params) global_log.warn(msg, *params) def usage(): print """ Coupled Resources tool Usage: python coupled_resources.py <CKAN config ini filepath> detect - finds datasets that are coupled and adds their harvest_source_reference """ if __name__ == '__main__': if len(sys.argv) != 3: print 'Wrong number of arguments %i' % len(sys.argv) usage() sys.exit(0) cmd, config_ini, action = sys.argv common.load_config(config_ini) CoupledResources.setup_logging(config_ini) common.register_translator() if action == 'detect': CoupledResources.detect() else: raise NotImplementedError
admin_ids = [i[0] for i in model.Session.query(model.User.id).filter_by(sysadmin=True).all()] members = model.Session.query(model.Member)\ .filter_by(table_name='user', capacity='admin', state='active')\ .filter(model.Member.table_id.in_(admin_ids)) print "There are %s memberships to be deleted" % members.count() members.update({'state': 'deleted'}, synchronize_session='fetch') model.Session.commit() if __name__ == '__main__': usage = __doc__ + """ usage: %prog <ckan.ini> """ if len(sys.argv) < 2: print usage sys.exit("Wrong number of args") config_filepath = sys.argv[1] print 'Loading CKAN config...' common.load_config(config_filepath) common.register_translator() print 'Done' remove_admins()
usage = __doc__ + """ usage: %prog [-w] [YY-MM-DD|all] <ckan.ini> """ parser = OptionParser(usage=usage) #parser.add_option("-w", "--write", # action="store_true", dest="write", # help="write the theme to the datasets") parser.add_option('-f', '--filename', dest='filename') parser.add_option('-o', '--organization', dest='organization') (options, args) = parser.parse_args() if len(args) != 2: parser.error('Wrong number of arguments (%i)' % len(args)) date_str, config_filepath = args print 'Loading CKAN config...' common.load_config(config_filepath) common.register_translator() print 'Done' if date_str == 'all': now = datetime.date.today() date = datetime.date(2012, 6, 1) while date <= now: dump_groups(date, options) date += datetime.timedelta(days=31) while date.day != 1: date -= datetime.timedelta(days=1) else: date = datetime.date(*[int(chunk) for chunk in date_str.split('-')]) dump_groups(date, options)