def dump_db(dburl, username=None, password=None, boundary=None): envelope = MIMEMultipart('mixed', boundary) db = Database(dburl) if username is not None and password is not None: db.resource.http.add_credentials(username, password) for docid in db: doc = db.get(docid, attachments=True) print >> sys.stderr, 'Dumping document %r' % doc.id attachments = doc.pop('_attachments', {}) part = MIMEBase('application', 'json') part.set_payload(json.dumps(doc, sort_keys=True, indent=2)) if attachments: inner = MIMEMultipart('mixed') inner.attach(part) for name, info in attachments.items(): content_type = info.get('content_type') if content_type is None: # CouchDB < 0.8 content_type = info.get('content-type') maintype, subtype = content_type.split('/', 1) subpart = MIMEBase(maintype, subtype) subpart['Content-ID'] = name subpart.set_payload(b64decode(info['data'])) inner.attach(subpart) part = inner part['Content-ID'] = doc.id part['ETag'] = doc.rev envelope.attach(part) return envelope.as_string()
def load_db(fileobj, dburl, username=None, password=None, ignore_errors=False): db = Database(dburl) if username is not None and password is not None: db.resource.credentials = (username, password) for headers, is_multipart, payload in read_multipart(fileobj): docid = headers['content-id'] if is_multipart: # doc has attachments for headers, _, payload in payload: if 'content-id' not in headers: doc = json.decode(payload) doc['_attachments'] = {} else: doc['_attachments'][headers['content-id']] = { 'data': b64encode(payload), 'content_type': headers['content-type'], 'length': len(payload) } else: # no attachments, just the JSON doc = json.decode(payload) del doc['_rev'] print >> sys.stderr, 'Loading document %r' % docid try: db[docid] = doc except Exception as e: if not ignore_errors: raise print >> sys.stderr, 'Error: %s' % e
def find_max_upload_size(db_uri): db = CouchDatabase.open_database(db_uri, False) couch_db = Database(db_uri) logger.debug('Database URI: %s' % db_uri) # delete eventual leftover from last run if 'largedoc' in couch_db: delete_doc(couch_db) # phase 1: increase upload size exponentially logger.info('Starting phase 1: increasing size exponentially.') size = 1 #import ipdb; ipdb.set_trace() while True: if upload(db, size, couch_db): size *= 2 else: break # phase 2: binary search for maximum value unable = size able = size / 2 logger.info('Starting phase 2: binary search for maximum value.') while unable - able > 1: size = able + ((unable - able) / 2) if upload(db, size, couch_db): able = size else: unable = size return able
def create_couchdb_client(self, db_url, require_exists=True, **args): db = Database(db_url) if require_exists: try: db.info() except ResourceNotFound: raise Exception('No database found at %s' % db.resource.url) return db
def dump_db(dburl, username=None, password=None, boundary=None, output=sys.stdout, bulk_size=BULK_SIZE): db = Database(dburl) if username is not None and password is not None: db.resource.credentials = username, password envelope = write_multipart(output, boundary=boundary) start, num = 0, db.info()['doc_count'] while start < num: opts = {'limit': bulk_size, 'skip': start, 'include_docs': True} dump_docs(envelope, [row.doc for row in db.view('_all_docs', **opts)]) start += bulk_size envelope.close()
def get_couch_database(self, url, dbname): """ Generate a couchdb.Database instance given a url and dbname. :param url: CouchDB's server url with credentials :type url: str :param dbname: Database name :type dbname: str :return: couch library database instance :rtype: couchdb.Database :raise DatabaseDoesNotExist: Raised if database does not exist. """ try: return Database(urljoin(url, dbname), self._session) except ResourceNotFound: raise DatabaseDoesNotExist()
def load_db(fileobj, dburl, username=None, password=None, ignore_errors=False): db = Database(dburl) if username is not None and password is not None: db.resource.credentials = (username, password) hostname = Is_Server(dburl, db.name) for headers, is_multipart, payload in read_multipart(fileobj): docid = headers['content-id'] time.sleep(1) if '-db' in db.name: if 'credential' in docid or 'flow' in docid or 'setting' in docid or 'functions' in docid: docid = Rename_docid(docid, hostname) obj = db.get(docid) if obj == None: new_doc = {'_id': docid} db.save(new_doc) obj = db.get(docid) if is_multipart: # doc has attachments for headers, _, payload in payload: if 'content-id' not in headers: doc = json.decode(payload) doc['_attachments'] = {} else: doc['_attachments'][headers['content-id']] = { 'data': b64encode(payload).decode('ascii'), 'content_type': headers['content-type'], 'length': len(payload) } else: # no attachments, just the JSON doc = json.decode(payload) doc['_rev'] = obj['_rev'] doc['_id'] = obj['_id'] print('Loading document %r' % docid, file=sys.stderr) try: db[docid] = doc except Exception as e: if not ignore_errors: raise print('Error: %s' % e, file=sys.stderr)
def dump_db(dburl, username=None, password=None, boundary=None, output=sys.stdout): db = Database(dburl) if username is not None and password is not None: db.resource.credentials = username, password envelope = write_multipart(output, boundary=boundary) for docid in db: doc = db.get(docid, attachments=True) print('Dumping document %r' % doc.id, file=sys.stderr) attachments = doc.pop('_attachments', {}) jsondoc = json.encode(doc) if attachments: parts = envelope.open({ 'Content-ID': doc.id, 'ETag': '"%s"' % doc.rev }) parts.add('application/json', jsondoc) for name, info in list(attachments.items()): content_type = info.get('content_type') if content_type is None: # CouchDB < 0.8 content_type = info.get('content-type') parts.add(content_type, b64decode(info['data']), { 'Content-ID': name }) parts.close() else: envelope.add('application/json', jsondoc, { 'Content-ID': doc.id, 'ETag': '"%s"' % doc.rev }) envelope.close()
from datetime import datetime from django.contrib.auth.models import User from django.db.models.signals import class_prepared from django.contrib.auth.models import UserManager as Manager from couchdb.client import Server, Database from couchdb.http import PreconditionFailed from django.db.models.signals import post_save import json COUCHDB_HOST = 'http://211.101.12.224' COUCHDB_PORT = '5984' COUCHDB_ROOT_USERNAME = '******' COUCHDB_ROOT_PASSWORD = '******' server = Server('{0}:{1}'.format(COUCHDB_HOST, COUCHDB_PORT)) server.resource.credentials = (COUCHDB_ROOT_USERNAME, COUCHDB_ROOT_PASSWORD) userdb = Database('{0}:{1}/_users'.format(COUCHDB_HOST, COUCHDB_PORT)) userdb.resource.credentials = (COUCHDB_ROOT_USERNAME, COUCHDB_ROOT_PASSWORD) class UserManager(Manager): """automatically create corresponding user syncable database in couchdb""" def create_user(self, username, email=None, password=None): user = super(UserManager, self).create_user(username, email, password) if user: try: server.create('user_sync_db_{0}'.format(user.pk)) except PreconditionFailed: user.delete() raise PreconditionFailed return user
def __init__(self, mountpoint, uri=None, *args, **kwargs): fuse.Fuse.__init__(self, *args, **kwargs) db_uri, doc_id = uri.rsplit('/', 1) self.doc_id = unquote(doc_id) self.db = Database(db_uri)
def __init__(self, mountpoint, db_uri=None, *args, **kwargs): print "db_uri: " + repr(db_uri) fuse.Fuse.__init__(self, *args, **kwargs) self.db = Database(db_uri)