def update_collection_from_remote(url_remote_couchdb, url_api_collection, delete_first=True): '''Update a collection from a remote couchdb. ''' if delete_first: delete_collection(url_api_collection.rsplit('/', 2)[1]) collection = Collection(url_api_collection) # guard against updating production for not ready_for_publication # collections if 'prod' in environ.get('DATA_BRANCH', ''): if not collection.ready_for_publication: raise Exception( 'In PRODUCTION ENV and collection {} not ready for ' 'publication'.format(collection.id)) doc_ids = get_collection_doc_ids(collection.id, url_remote_couchdb) couchdb_remote = get_couchdb(url_remote_couchdb) couchdb_env = get_couchdb() created = 0 updated = 0 for doc_id in doc_ids: msg = update_from_remote( doc_id, couchdb_remote=couchdb_remote, couchdb_env=couchdb_env) if 'created' in msg: created += 1 else: updated += 1 return len(doc_ids), updated, created
def rollback_collection_docs(collection_key, dry_run=False, auth=''): dids = get_collection_doc_ids(collection_key, couch_stg.format('')) url = couch_stg.format(auth) cserver_stg = couchdb.Server(url) cdb_stg = cserver_stg['ucldc'] i = 0 try: os.mkdir(collection_key) except OSError: pass for did in dids: print did doc = cdb_stg[did] revs = cdb_stg.revisions(doc['_id']) rlist = [] for i, x in enumerate(revs): print x['_rev'] rlist.append(x) if i > 1: break rev_doc = rlist[2] rev_doc['_rev'] = rlist[0]['_rev'] with open('{}/{}_old.json'.format(collection_key, doc['id']), 'w') as foo: json.dump(rlist[0], foo) # revert by updating if not dry_run: pass cdb_stg[did] = rev_doc
def update_collection_from_remote(url_remote_couchdb, url_api_collection, delete_first=True): '''Update a collection from a remote couchdb. ''' if delete_first: delete_collection(url_api_collection.rsplit('/', 2)[1]) collection = Collection(url_api_collection) # guard against updating production for not ready_for_publication # collections if 'prod' in environ.get('DATA_BRANCH', ''): if not collection.ready_for_publication: raise Exception( 'In PRODUCTION ENV and collection {} not ready for ' 'publication'.format(collection.id)) doc_ids = get_collection_doc_ids(collection.id, url_remote_couchdb) couchdb_remote = get_couchdb(url_remote_couchdb) couchdb_env = get_couchdb() created = 0 updated = 0 for doc_id in doc_ids: msg = update_from_remote(doc_id, couchdb_remote=couchdb_remote, couchdb_env=couchdb_env) if 'created' in msg: created += 1 else: updated += 1 return len(doc_ids), updated, created
def create_redis_deletion_script(url_remote_couchdb, collection_id): doc_ids = get_collection_doc_ids(collection_id, url_remote_couchdb) redis_cmd_base = 'HDEL {}'.format(redis_hash_key) # for every 100 ids create a new line with open('delete_image_cache-{}'.format(collection_id), 'w') as foo: for doc_id in doc_ids: redis_cmd = ' '.join((redis_cmd_base, doc_id, '\n')) foo.write(redis_cmd)