def init_db(): server = CouchDBServer(db_addr) try: server.create(db_name) except PreconditionFailed: del server[db_name] server.create(db_name)
def main(global_config, **settings): config = Configurator(settings=settings) config.set_authentication_policy(AuthenticationPolicy(None)) config.set_authorization_policy(AuthorizationPolicy()) config.add_renderer('prettyjson', JSON(indent=4)) config.add_renderer('jsonp', JSONP(param_name='opt_jsonp')) config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp')) config.add_subscriber(set_renderer, NewRequest) config.include("cornice") config.route_prefix = '/api/{}'.format(VERSION) config.scan("openprocurement.api.views") # CouchDB connection server = Server(settings.get('couchdb.url')) config.registry.couchdb_server = server db_name = os.environ.get('DB_NAME', settings['couchdb.db_name']) if db_name not in server: server.create(db_name) config.registry.db = server[db_name] # sync couchdb views sync_design(config.registry.db) # migrate data migrate_data(config.registry.db) # S3 connection if 'aws.access_key' in settings and 'aws.secret_key' in settings and 'aws.bucket' in settings: connection = S3Connection(settings['aws.access_key'], settings['aws.secret_key']) config.registry.s3_connection = connection bucket_name = settings['aws.bucket'] if bucket_name not in [b.name for b in connection.get_all_buckets()]: connection.create_bucket(bucket_name, location=Location.EU) config.registry.bucket_name = bucket_name return config.make_wsgi_app()
def create_test_users_db(name=TEST_USERS_DB, server=TEST_SERVER): try: couchdb = Server(server) return couchdb.create(name) except PreconditionFailed: del couchdb[name] return couchdb.create(name)
def __init__(self, convoy_conf): LOGGER.info('Init Convoy...') self.convoy_conf = convoy_conf self.stop_transmitting = False self.transmitter_timeout = self.convoy_conf.get('transmitter_timeout', 10) self.documents_transfer_queue = Queue() self.timeout = self.convoy_conf.get('timeout', 10) self.api_client = APIClient(**self.convoy_conf['cdb']) self.lots_client = LotsClient(**self.convoy_conf['lots_db']) self.assets_client = AssetsClient(**self.convoy_conf['assets_db']) self.keys = ['classification', 'additionalClassifications', 'address', 'unit', 'quantity', 'location', 'id'] self.document_keys = ['hash', 'description', 'title', 'url', 'format', 'documentType'] user = self.convoy_conf['couchdb'].get('user', '') password = self.convoy_conf['couchdb'].get('password', '') if user and password: server = Server( "http://{user}:{password}@{host}:{port}".format( **self.convoy_conf['couchdb']), session=Session(retry_delays=range(10))) self.db = server[self.convoy_conf['couchdb']['db']] if \ self.convoy_conf['couchdb']['db'] in server else \ server.create(self.convoy_conf['couchdb']['db']) else: server = Server( "http://{host}:{port}".format( **self.convoy_conf['couchdb']), session=Session(retry_delays=range(10))) self.db = server[self.convoy_conf['couchdb']['db']] if \ self.convoy_conf['couchdb']['db'] in server else \ server.create(self.convoy_conf['couchdb']['db']) push_filter_doc(self.db) LOGGER.info('Added filters doc to db.')
def initialize(self): couch = Server() if "stories" not in couch: couch.create("stories") self.db_stories = couch['stories'] if "posts" not in couch: couch.create("posts") self.db_posts = couch['posts']
def couchdb_connection(config): LOGGER = getLogger("BILLING") LOGGER.info("Start database initialization") # CouchDB connection db_name = config.get('db').get('name') db_host = config.get('db').get('host') db_port = config.get('db').get('port') admin_name = config.get('admin').get('username') admin_pass = config.get('admin').get('password') aserver = Server( create_db_url(db_host, db_port, admin_name, admin_pass), session=Session(retry_delays=range(10))) users_db = aserver['_users'] username = config.get('user').get('username') password = config.get('user').get('password') user_doc = users_db.get( 'org.couchdb.user:{}'.format(username), {'_id': 'org.couchdb.user:{}'.format(username)} ) if not user_doc.get('derived_key', '') or PBKDF2(password, user_doc.get('salt', ''), user_doc.get('iterations', 10)).hexread(int(len(user_doc.get('derived_key', '')) / 2)) != user_doc.get('derived_key', ''): user_doc.update({ "name": username, "roles": [], "type": "user", "password": password }) LOGGER.info( "Updating api db main user", extra={'MESSAGE_ID': 'update_api_main_user'} ) users_db.save(user_doc) if db_name not in aserver: aserver.create(db_name) db = aserver[db_name] SECURITY[u'members'][u'names'] = [username, ] if SECURITY != db.security: LOGGER.info( "Updating api db security", extra={'MESSAGE_ID': 'update_api_security'} ) db.security = SECURITY auth_doc = db.get(VALIDATE_DOC_ID, {'_id': VALIDATE_DOC_ID}) if auth_doc.get('validate_doc_update') != VALIDATE_DOC_UPDATE % username: auth_doc['validate_doc_update'] = VALIDATE_DOC_UPDATE % username LOGGER.info( "Updating api db validate doc", extra={'MESSAGE_ID': 'update_api_validate_doc'} ) db.save(auth_doc)
def get_db(self): try: server=Server("http://127.0.0.1:5984/") if 'cia_factbook' not in server: server.create('cia_factbook') db = server['cia_factbook'] except: print ("DB_ERROR") os._exit(-1) return(db)
def create_test_users_db(config=None): if config is None: config = load_test_config() name = config['couchdb.users_database'] try: couchdb = Server(config['couchdb.address']) return couchdb.create(name) except PreconditionFailed: del couchdb[name] return couchdb.create(name)
def couchdb_connection(config): LOGGER = getLogger("BILLING") LOGGER.info("Start database initialization") # CouchDB connection db_name = config.get('db').get('name') db_host = config.get('db').get('host') db_port = config.get('db').get('port') admin_name = config.get('admin').get('username') admin_pass = config.get('admin').get('password') aserver = Server(create_db_url(db_host, db_port, admin_name, admin_pass), session=Session(retry_delays=range(10))) users_db = aserver['_users'] username = config.get('user').get('username') password = config.get('user').get('password') user_doc = users_db.get('org.couchdb.user:{}'.format(username), {'_id': 'org.couchdb.user:{}'.format(username)}) if not user_doc.get('derived_key', '') or PBKDF2( password, user_doc.get('salt', ''), user_doc.get('iterations', 10)).hexread( int(len(user_doc.get('derived_key', '')) / 2)) != user_doc.get( 'derived_key', ''): user_doc.update({ "name": username, "roles": [], "type": "user", "password": password }) LOGGER.info("Updating api db main user", extra={'MESSAGE_ID': 'update_api_main_user'}) users_db.save(user_doc) if db_name not in aserver: aserver.create(db_name) db = aserver[db_name] SECURITY[u'members'][u'names'] = [ username, ] if SECURITY != db.security: LOGGER.info("Updating api db security", extra={'MESSAGE_ID': 'update_api_security'}) db.security = SECURITY auth_doc = db.get(VALIDATE_DOC_ID, {'_id': VALIDATE_DOC_ID}) if auth_doc.get('validate_doc_update') != VALIDATE_DOC_UPDATE % username: auth_doc['validate_doc_update'] = VALIDATE_DOC_UPDATE % username LOGGER.info("Updating api db validate doc", extra={'MESSAGE_ID': 'update_api_validate_doc'}) db.save(auth_doc)
def main(bulk_size, up_to, num_threads): global timer s = Server('http://localhost:5984') if 'test' in s: del s['test'] db = s.create('test') stats_file = 'bulk-perf-%s-%s-%s.dat' % (bulk_size, up_to, num_threads) title_file = stats_file + '.meta' f = open(title_file, 'w') f.write('Bulk size: %s, num threads: %s' % (bulk_size, num_threads)) f.close() stats_file = open(stats_file, 'w') stats_lock = Lock() exit_event = Event() chunks = Queue.Queue() def process_chunks(): global count, timer, internal_counter s = Server('http://localhost:5984') db = s['test'] while not exit_event.isSet(): try: chunk = list(chunks.get(timeout=5)) chunks.task_done() db.update(chunk) stats_lock.acquire() try: count += bulk_size internal_counter += bulk_size if internal_counter >= max(num_threads * bulk_size, up_to / 1000): end = time() stats_file.write( '%s %s\n' % (count, internal_counter / float(end - timer))) stats_file.flush() timer = end internal_counter = 0 print '%.1f%%' % (float(count) / up_to * 100) finally: stats_lock.release() except Queue.Empty: pass except Exception, e: print 'Exception: %r' % (e, ) chunks.put(chunk) sleep(1)
def _prepare_couchdb(self): server = Server(self.couch_url, session=Session(retry_delays=range(10))) try: if self.db_name not in server: self.db = server.create(self.db_name) else: self.db = server[self.db_name] except Exception as e: LOGGER.error('Database error: {}'.format(repr(e))) raise by_date_modified_view = ViewDefinition( self.resource, 'by_dateModified', '''function(doc) { if (doc.doc_type == '%(resource)s') { var fields=['%(doc_type)sID'], data={}; for (var i in fields) { if (doc[fields[i]]) { data[fields[i]] = doc[fields[i]] } } emit(doc.dateModified, data); }}''' % dict(resource=self.resource[:-1].title(), doc_type=self.resource[:-1]) ) by_date_modified_view.sync(self.db) validate_doc = self.db.get(VALIDATE_BULK_DOCS_ID, {'_id': VALIDATE_BULK_DOCS_ID}) if validate_doc.get('validate_doc_update') != VALIDATE_BULK_DOCS_UPDATE: validate_doc['validate_doc_update'] = VALIDATE_BULK_DOCS_UPDATE self.db.save(validate_doc) LOGGER.info('Validate document update view saved.') else: LOGGER.info('Validate document update view already exist.')
def test_start(self): service = self._start_service() couch = Server("http://%s:%d" % (service.host, service.port)) database = couch.create("test-database") posted = {"this": "value"} identifier, revision = database.save(posted) self.assertEqual(posted, database[identifier])
class PostTestCase(unittest.TestCase): def setUp(self): self.server = Server(settings.COUCHDB_SERVER) try: self.db = self.server.create('comfy_blog_test') except: self.db = self.server['comfy_blog_test'] self.post1 = Post(title=u"Hello, World!", slug=u"foo-bar", published=datetime(2008, 8, 8), author={'name': 'Myles Braithwaite', 'email': '*****@*****.**'}) self.post2 = Post(title=u"Hello, World!", published=datetime(2007, 7, 7)) self.post1.store() self.post2.store() def testURL(self): self.assertEquals(self.post1.get_absolute_url(), '/blog/2008/8/8/foo-bar/') self.assertEquals(self.post2.get_absolute_url(), '/blog/2007/7/7/hello-world/') def testSlugify(self): self.assertEquals(self.post2.slug, 'hello-world') #def testAddComment(self): # post = Post.load(self.db, self.post1.id) # coment = post.comments() # comment.author = {'name': u"Myles Braithwaite", 'email': "*****@*****.**", 'url': u"http://mylesbraithwaite.com/"} # comment.comment = u"Hello, World!" # comment.time = datetime.now() # comment.user_agent = u"Python Unit Test" # comment.ip_address = u"127.0.0.1" # comment.is_spam = False # post.store() # # TODO Still working on doing something here to see if the test actually worked. def tearDown(self): del self.server['comfy_blog_test']
def handle(self, *args, **options): verbosity = options['verbosity'] if verbosity == '0': self.logger.setLevel(logging.ERROR) elif verbosity == '1': self.logger.setLevel(logging.WARNING) elif verbosity == '2': self.logger.setLevel(logging.INFO) elif verbosity == '3': self.logger.setLevel(logging.DEBUG) self.offset = int(options['offset']) self.limit = int(options['limit']) self.logger.info("Starting export!") server = Server() if 'maps-places' not in server: self.db = server.create('maps-places') else: self.db = server['maps-places'] # places uri startup places_uri = "{0}/maps/places".format(settings.OP_API_URI) self.logger.debug("GET {0}".format(places_uri)) # get all places, following the next link places_json = self.export_page(places_uri) while places_json and places_json['next']: places_json = self.export_page(places_json['next'])
class CouchStore(object): """ represents one couch DB instance """ def __init__(self, instance, server='localhost', port=5984, username=None, password=None, can_create=False, must_create=False, max_retry=2): self._url = 'http://%s:%s@%s:%d' % (username, password, server, port) \ if username else 'http://%s:%d' % (server, port) self._db_name = instance self._server_name = server self._max_retry = max_retry self._server = Server(self._url) if can_create or must_create: try: log.debug('creating %s on %s', instance, self._url) self._db = self._server.create(self._db_name) except Exception, e: if must_create: raise self._wrap_error('initialize', self._db_name, e) else:
def __init__(self, config): super(LogsCollector, self).__init__() self.config = config self.storage = self.config_get('storage') if not self.storage: raise LogsCollectorConfigError('Configuration Error: Missing logs' ' storage.') if self.storage == 'couchdb': self.couch_url = self.config_get('couch_url') self.db_name = self.config_get('log_db') if not self.couch_url: raise LogsCollectorConfigError('Configuration Error: Missing ' 'couch_url') else: couch_url = urlparse(self.couch_url) if couch_url.scheme == '' or couch_url.netloc == '': raise LogsCollectorConfigError('Configuration Error:' 'Invalid couch_url') if not self.db_name: raise LogsCollectorConfigError('ConnectionError: Missing ' 'couchdb name') server = Server(self.couch_url, session=Session(retry_delays=range(10)), full_commit=False) try: if self.db_name not in server: self.db = server.create(self.db_name) else: self.db = server[self.db_name] except error as e: logger.error('Database error: {}'.format(e.message)) raise LogsCollectorConfigError(e.message)
def setUp(self): user = self.config['main']['couch_url'].get('user', '') password = self.config['main']['couch_url'].get('password', '') if (user and password): self.couch_url = "http://{user}:{password}@{host}:{port}".format( **self.config['main']['couch_url']) else: self.couch_url = "http://{host}:{port}".format( **self.config['main']['couch_url']) server = Server(self.couch_url) if self.config['main']['db_name'] in server: self.db = server[self.config['main']['db_name']] else: self.db = server.create(self.config['main']['db_name']) array_path = os.path.dirname(os.path.abspath(__file__)).split('/') app_path = "" for p in array_path[:-1]: app_path += p + '/' app_path += 'couch_views' # for resource in ('/lots'): # push_views(couchapp_path=app_path + resource, # couch_url=couchdb_url) validate_doc = { '_id': VALIDATE_BULK_DOCS_ID, 'validate_doc_update': VALIDATE_BULK_DOCS_UPDATE } try: self.db.save(validate_doc) except Exception: pass
def __init__(self, config): """ Check CouchDB availability and set _db attribute """ server, db = config.get("COUCH_DATABASE").rsplit('/', 1) server = Server(server, session=Session(retry_delays=range(10))) database = server[db] if db in server else server.create(db) self._db = database
class Connection(object): """ Facet for CouchDB client module Server object. https://pythonhosted.org/CouchDB/client.html#server Most DDL operations required Database Admin or Server Admin privilege. And these operations can be also done directly on CouchDB Fauxton UI or through curl API calls. So minimal DDL implementation is required here. """ def __init__(self, conn): self.conn = conn self.server = None self.establish() def establish(self): try: self.server = Server(url=self.conn) except Unauthorized: msg = 'Unauthorized error. Check connection string: {}'.format( self.conn) logging.error(msg) except ConnectionRefusedError: msg = 'Can not connect to {}'.format(self.conn) logging.error(msg) def spawn(self, name): try: new_db = self.server.create(name) if new_db is not None: msg = '[SPAWN DB] spawning new database --: {}'.format(name) logging.info(msg) return new_db except PreconditionFailed: msg = '[SPAWN DB] Existing database --: {}'.format(name) logging.warning(msg) return self.server[name] except Unauthorized: msg = 'Unauthorized error. You are not a server admin: {}'.format( self.conn) logging.error(msg) def drop(self, name): try: self.server.delete(name) msg = '[DROP DB] --: {}'.format(name) logging.warning(msg) return True except ResourceNotFound: msg = '[DROP DB] Database does not exist --: {}'.format(name) logging.warning(msg) return False except Unauthorized: msg = 'Unauthorized error. You are not a server admin: {}'.format( self.conn) logging.error(msg) return False
def main(bulk_size, up_to, num_threads): global timer s = Server('http://localhost:5984') if 'test' in s: del s['test'] db = s.create('test') stats_file = 'bulk-perf-%s-%s-%s.dat' % (bulk_size, up_to, num_threads) title_file = stats_file + '.meta' f = open(title_file, 'w') f.write('Bulk size: %s, num threads: %s' % (bulk_size, num_threads)) f.close() stats_file = open(stats_file, 'w') stats_lock = Lock() exit_event = Event() chunks = Queue.Queue() def process_chunks(): global count, timer, internal_counter s = Server('http://localhost:5984') db = s['test'] while not exit_event.isSet(): try: chunk = list(chunks.get(timeout=5)) chunks.task_done() db.update(chunk) stats_lock.acquire() try: count += bulk_size internal_counter += bulk_size if internal_counter >= max(num_threads*bulk_size, up_to/1000): end = time() stats_file.write('%s %s\n' % (count, internal_counter/float(end-timer))) stats_file.flush() timer = end internal_counter = 0 print '%.1f%%' % (float(count) / up_to * 100) finally: stats_lock.release() except Queue.Empty: pass except Exception, e: print 'Exception: %r' % (e,) chunks.put(chunk) sleep(1)
def __init__(self): couchdb_host = getattr(settings, 'COUCHDB_HOST') server = Server(couchdb_host) try: self.db = server['wiki'] except ResourceNotFound: self.db = server.create('wiki') Page.get_pages.sync(self.db) self.wiki_form = curry(PageForm, db = self.db)
def connect_to_db_server(url, retries, check_connection=True): """Connect to the CouchDB :param retries: list like [1, 2, 3] """ server = Server(url, session=Session(retry_delays=retries)) if check_connection: db_name = ''.join(('test_', uuid4().hex)) try: server.create(db_name) except socket.error: LOGGER.error("Cannot use DB due to socket error") return None except Exception: LOGGER.error("Cannot use DB due to some error") return None else: server.delete(db_name) return server
def get_couchdb(dbname, uri=None): if uri: server = Server(uri) else: server = Server() if dbname in server: db = server[dbname] else: db = server.create(dbname) return db
def create_db(database): server = Server('http://localhost:5984/') try: db = server.create(database) logger.info('[DB] Database %s created' % database) except PreconditionFailed: db = server[database] logger.info('[DB] Database %s already exists.' % database) return db
class GeonamesLoader: total_inserts = 0 def __init__(self): uri = 'http://localhost:5984/' self.cache_dir = tempfile.mkdtemp(prefix='couchdb') self.server = Server(uri, cache=self.cache_dir) try: self.db = self.server['geonames'] except ResourceNotFound: self.db = self.server.create('geonames') def load_data(self, data_file_path, field_keys): data_file = open(data_file_path, 'r') docs = [] max_docs_per_insert = 1000 i = 0 for line in data_file: values = line.decode('utf8') values = values.rstrip().split("\t") doc = dict(zip(field_keys, values)) # remove blank values for k, v in doc.items(): if len(v) == 0: del doc[k] doc['_id'] = uuid4().hex docs.append(doc) if i == max_docs_per_insert: self.bulk_insert_docs(docs) docs = [] i = 0 i = i+1 self.bulk_insert_docs(docs) data_file.close() def bulk_insert_docs(self, docs): num_docs = len(docs) if num_docs != 0: self.db.update(docs) print "inserted %d docs" % num_docs self.total_inserts = self.total_inserts + num_docs def __del__(self): shutil.rmtree(self.cache_dir)
def prepare_couchdb(couch_url, db_name, logger): server = Server(couch_url, session=Session(retry_delays=range(10))) try: if db_name not in server: db = server.create(db_name) else: db = server[db_name] except error as e: logger.error('Database error: {}'.format(e.message)) raise ConfigError(e.strerror) sync_design(db) return db
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ config = Configurator(settings=settings) config.set_root_factory(root_factory) # security policies authn_policy = AuthenticationPolicy() authz_policy = ACLAuthorizationPolicy() config.set_authentication_policy(authn_policy) config.set_authorization_policy(authz_policy) config.add_request_method(authenticated_role, reify=True) config.add_request_method(request_params, 'params', reify=True) config.add_request_method(extract_corporation, 'corporation', reify=True) config.add_request_method(corporation_from_data) config.registry.model = Corporation # database settings server = CouchdbServer(settings['couchdb.uri']) config.registry.couchdb_server = server if settings['couchdb.db'] not in server: server.create(settings['couchdb.db']) config.registry.db = server[settings['couchdb.db']] sync_design(config.registry.db) config.include('cornice') config.include('pyramid_jinja2') config.include('.routes') config.add_static_view('static', 'static', cache_max_age=3600) config.scan() # config.add_view('messaging.traversal.root_factory', renderer='json') # config.add_view('messaging.views.my_view', renderer='json') #config.add_view('messaging.views.my_view', renderer='json') # config.add_route('home', '/') # config.scan() return config.make_wsgi_app()
def create_db(name): ''' Create a new name for given name. ''' server = Server('http://localhost:5984/') try: db = server.create(name) logger.info('[DB] Database %s created' % name) except PreconditionFailed: db = server[name] logger.info('[DB] Database %s already exists.' % name) return db
def get_db(): from couchdb import Server server = getenv("CDB_SERVER") db = getenv("JOB_CURRENT") db = str(db).strip().lower() + "_render_log" try: server = Server(server) if db in server: db = server[db] else: db = server.create(db) except: return 0 return db
def setUp(self): user = self.config['main']['storage'].get('user', '') password = self.config['main']['storage'].get('password', '') if (user and password): self.couch_url = "http://{user}:{password}@{host}:{port}".format( **self.config['main']['storage']) else: self.couch_url = "http://{host}:{port}".format( **self.config['main']['storage']) server = Server(self.couch_url) if self.config['main']['storage']['db_name'] in server: self.db = server[self.config['main']['storage']['db_name']] else: self.db = server.create(self.config['main']['storage']['db_name'])
class DatabaseBackend(object): def __init__(self, server=SERVER_HOST, database_name = DATABASE_NAME, *args, **kwargs): self.url = server self.server = Server(self.url) try: self.database = self.server[database_name] except ResourceNotFound: self.database = self.server.create(database_name) def save(self, document, obj): document.store(self.database) return obj def get_data_records_type(self, entity): endkey = [entity.uuid, int(mktime(datetime.datetime.now().timetuple())) * 1000] return self.filter_rows_by_uuid(entity.uuid,'data_types', endkey = endkey) def get_data_records_aggregated(self, entity, data_records_func, asof): aggregated_result = {} endkey = [entity.uuid, int(mktime(asof.timetuple())) * 1000] for name, aggregation_type in data_records_func.items(): data = self.filter_rows_by_uuid(entity.uuid, aggregation_type, endkey) aggregated_result[name] = data[name] if name in data.keys() else None return aggregated_result def filter_rows_by_uuid(self, uuid, aggregation_type, endkey = None): view_url = '_design/' + DESIGN_DOCUMENT_NAME + '/_view/' + VIEWS[aggregation_type] rows = self.database.view(view_url, group=True, group_level = 1, endkey = endkey).rows value = None for row in rows: if row.key[0] == uuid: value = row.value break return value def get(self, uuid, document): return document.load(self.database, uuid) def __unicode__(self): return u"Connected on %s - working on %s" % (self.url, self.database_name) def __str__(self): return unicode(self) def __repr__(self): return repr(self.database)
def init(): log.info('Initialize database') from ctx import config from couchdb import Server global db, server server = Server(url=config.DB_URL) log.info('db server: {!r}'.format(config.DB_URL)) try: db = server[config.DB_NAME] except couchdb.http.ResourceNotFound: db = server.create(config.DB_NAME) log.info('db {!r} created'.format(config.DB_NAME))
def main(global_config, **settings): config = Configurator(settings=settings) config.add_renderer('prettyjson', JSON(indent=4)) config.add_renderer('jsonp', JSONP(param_name='opt_jsonp')) config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp')) config.add_subscriber(set_renderer, NewRequest) config.include("cornice") config.route_prefix = '/api/{}'.format(VERSION) config.scan("openprocurement.api.views") # CouchDB connection server = Server(settings.get('couchdb.url')) config.registry.couchdb_server = server db_name = os.environ.get('DB_NAME', settings['couchdb.db_name']) if db_name not in server: server.create(db_name) config.registry.db = server[db_name] # sync couchdb views sync_design(config.registry.db) # migrate data migrate_data(config.registry.db) return config.make_wsgi_app()
def load_database(entries): ''' Loads entries data into database ''' # Open connection server = Server(DB_HOST) # Create a clean database if DB_NAME in server: del server[DB_NAME] db = server.create(DB_NAME) keys = [] for entry in entries: db.create(json.dumps(entry))
def init_clients(config): exceptions = [] clients_from_config = { 'api_client': {'section': 'cdb', 'client_instance': APIClient}, 'lots_client': {'section': 'lots_db', 'client_instance': LotsClient}, 'assets_client': {'section': 'assets_db', 'client_instance': AssetsClient}, } result = '' for key, item in clients_from_config.items(): try: client = item['client_instance'](**config[item['section']]) clients_from_config[key] = client result = ('ok', None) except Exception as e: exceptions.append(e) result = ('failed', e) LOGGER.check('{} - {}'.format(key, result[0]), result[1]) if not hasattr(clients_from_config['api_client'], 'ds_client'): LOGGER.warning("Document Service configuration is not available.") try: user = config['couchdb'].get('user', '') password = config['couchdb'].get('password', '') url = "http://{host}:{port}".format(**config['couchdb']) result = 'couchdb without user' if user and password: url = "http://{user}:{password}@{host}:{port}".format(**config['couchdb']) result = 'couchdb - authorized' LOGGER.info(result) server = Server(url, session=Session(retry_delays=range(10))) db = server[config['couchdb']['db']] if \ config['couchdb']['db'] in server else \ server.create(config['couchdb']['db']) clients_from_config['db'] = db result = ('ok', None) push_filter_doc(db) LOGGER.info('Added filters doc to db.') except Exception as e: exceptions.append(e) result = ('failed', e) LOGGER.check('couchdb - {}'.format(result[0]), result[1]) if exceptions: raise exceptions[0] return clients_from_config
def create_database(): server = Server('http://localhost:8888/') db = server.create('unit_tasks') #XXX: figure out how to run python view functions? db.create({ "_id": "_design/users", "views": { "isvalid": """ function(doc) { if (doc.type == 'User') map([doc._id, doc.password], doc); }""" } }) User(_id="llimllib", password=md5("tao").hexdigest()).store(db)
def insert_value(body): print("[X] Received time:" + str(body["t"]) + " and temperature: " + str(body["T"])) try: couch = Server(os.environ["COUCH_URI"]) dbname = "plant" if dbname in couch: db = couch[dbname] else: db = couch.create(dbname) messurment = Record(host=body["h"], messurement=body["T"]) db.save(messurment) print(messurment, "Record inserted successfully into weather table") except Exception as error: print("Error while connecting to CouchDB", error)
def prepare_couchdb(couch_url, db_name, logger, errors_doc, couchdb_filter): server = Server(couch_url, session=Session(retry_delays=range(10))) try: if db_name not in server: db = server.create(db_name) else: db = server[db_name] broken_lots = db.get(errors_doc, None) if broken_lots is None: db[errors_doc] = {} prepare_couchdb_filter(db, 'lots', 'status', couchdb_filter, logger) except error as e: logger.error('Database error: {}'.format(e.message)) raise ConfigError(e.strerror) return db
class CouchStore(object): """ represents one couch DB instance """ def __init__(self, instance, server='localhost', port=5984, username=None, password=None, can_create=False, must_create=False, max_retry=2): self._url = 'http://%s:%s@%s:%d' % (username, password, server, port) \ if username else 'http://%s:%d' % (server, port) self._db_name = instance self._server_name = server self._max_retry = max_retry self._server = Server(self._url) if can_create or must_create: try: log.debug('creating %s on %s', instance, self._url) self._db = self._server.create(self._db_name) except Exception,e: if must_create: raise self._wrap_error('initialize',self._db_name,e) else:
def couchdb_initializer(): try: server = Server(url=URL) try: db = server[DB_NAME] except: db = server.create(DB_NAME) with open(TWEETS_PATH, 'r') as initial_tweets: for line in initial_tweets: tweet = json.loads(line) tweet = reformattweet(tweet) db.save(tweet) callSync(db) return db except: logger.log_error("Cannot find CouchDB Server...") raise
def prepare_couchdb(couch_url, db_name, logger): server = Server(couch_url, session=Session(retry_delays=range(10))) try: if db_name not in server: db = server.create(db_name) else: db = server[db_name] except error as e: logger.error('Database error: {}'.format(e.message)) raise DataBridgeConfigError(e.strerror) validate_doc = db.get(VALIDATE_BULK_DOCS_ID, {'_id': VALIDATE_BULK_DOCS_ID}) if validate_doc.get('validate_doc_update') != VALIDATE_BULK_DOCS_UPDATE: validate_doc['validate_doc_update'] = VALIDATE_BULK_DOCS_UPDATE db.save(validate_doc) logger.info('Validate document update view saved.') else: logger.info('Validate document update view already exist.') return db
class CouchdbApiClient(ClientInterface): """.. note:: Prefer :class:`CloudantApiClient`.""" def __init__(self, url): from couchdb import Server self.server = Server(url=url) # noinspection PyBroadException def get_database(self, db_name): try: return self.server[db_name] except e: return self.server.create(db_name) def get_database_interface(self, db_name_backend, db_name_frontend=None, external_file_store=None, db_type=None): db_name_frontend_final = db_name_frontend if db_name_frontend is not None else db_name_backend return CouchdbApiDatabase(db=self.get_database(db_name=db_name_backend), db_name_frontend=db_name_frontend_final, external_file_store=external_file_store) def delete_database(self, db_name): self.server.delete(db_name)
class PresentationTestCase(unittest.TestCase): def setUp(self): self.server = Server(settings.COUCHDB_SERVER) try: self.db = self.server.create('comfy_blog_test') except: self.db = self.server['comfy_blog_test'] self.presentation = Presentation(title=u"Hello, World!", slug=u"hello-world", published=datetime(2008, 8, 8), author={'name': 'Myles Braithwaite', 'email': '*****@*****.**'}) self.presentation.store(self.db) def testAddSlides(self): self.presentation.slides = [{ 'title': u"First Slide", 'body': u"Hello, World!", 'notes': u"<p>Hello, World!", }] self.presentation.store(self.db) def tearDown(self): del self.server['comfy_blog_test']
def test_push_views(self): with self.assertRaises(Exception) as e: push_views() self.assertEqual( e.exception.message, 'Can\'t push couchapp. Please check ' '\'couchapp_path\' or \'couch_url\'.') server = Server(self.couch_url) db_name = 'test_' + uuid.uuid4().hex db = server.create(db_name) self.assertEqual(db.get('_design/auctions'), None) array_path = os.path.dirname(os.path.abspath(__file__)).split('/') app_path = "" for p in array_path[:-1]: app_path += p + '/' app_path += 'couch_views' push_views(couchapp_path=app_path + '/auctions', couch_url=self.couch_url + '/' + db_name) self.assertNotEqual(db.get('_design/auctions'), None) with self.assertRaises(DataBridgeConfigError) as e: push_views(couchapp_path='/haha', couch_url='') self.assertEqual(e.exception.message, 'Invalid path to couchapp.')
def OnLogin(self, event): self.user = User() with dialog( dict(dialog = LoginDialog, user = self.user)) as val: """ do validation here """ if self.user.username == FAKE_USER and self.user.password == FAKE_PASSWORD: try: s = Server(self.URL) blog = s.create(BLOG) dlg = wx.MessageDialog(self, "Database {0} does not exist. Do you want to create it?".format(BLOG), "Database not found", style = wx.YES_NO) if dlg.ShowModal() == wx.ID_YES: from couchdb.design import ViewDefinition ViewDefinition.sync_many( blog, [Design.all, Design.by_date, Design.by_author, Design.tags, Design.attachments]) p = Post() p.author = self.user.username p.subject = "Welcome Post" p.content = "First Post. See that a <b>screenshot</b> of your computer is included as attachment." p.date = datetime.now() p.tags = ["GENERAL", "WELCOME"] p.store(blog) sfile = REGEXP.sub("","screenshot{0}".format(datetime.now())) sfile = "{0}.png".format(sfile) screenshot = Screenshot(filename = sfile) doc = blog[p.id] f = open(sfile,"rb") blog.put_attachment(doc,f, sfile) f.close() else: del s[BLOG] dlg.Destroy() self.Close() dlg.Destroy() except: pass else: self.user.username = None if not self.user.username: self.Close()
def setUp(self): server = Server(self.config['main']['couch_url']) if self.config['main']['db_name'] in server: self.db = server[self.config['main']['db_name']] else: self.db = server.create(self.config['main']['db_name']) array_path = os.path.dirname(os.path.abspath(__file__)).split('/') app_path = "" for p in array_path[:-1]: app_path += p + '/' app_path += 'couch_views' couchdb_url = self.config['main']['couch_url'] \ + '/' + self.config['main']['db_name'] for resource in ('/tenders', '/plans', '/contracts', '/auctions'): push_views(couchapp_path=app_path + resource, couch_url=couchdb_url) validate_doc = { '_id': VALIDATE_BULK_DOCS_ID, 'validate_doc_update': VALIDATE_BULK_DOCS_UPDATE } try: self.db.save(validate_doc) except Exception: pass
def buildup_test(self=test_couch_fuse): # Initialize database server = Server() del server['test_couch_fuse'] db = server.create('test_couch_fuse') self.db = db self.db["_design/file"] = {"views": {"all": {"map": "function (doc) {\n emit(doc.id, doc) \n}"}}} # Add directory to test fuse self.directory = id_generator() os.system("mkdir " + self.directory) print "directory : %s" %self.directory # Add files in database doc1 = {"name": "file1.txt", "slug": "file1.txt"} doc_id = self.db.create(doc1) self.db.put_attachment(self.db[doc_id], "File1 :", filename="file1.txt") doc2 = {"name": "file2.txt", "slug": "file2.txt"} doc_id = self.db.create(doc2) self.db.put_attachment(self.db[doc_id], "File2 :", filename="file2.txt") # Run tests mname = 'test_Initialisation' def doTest(self): self._test_initialisation() self._test_read_file("file1.txt") self._test_add_empty_file("file3.txt") self._test_modify_file("file3.txt") self._test_create_directory("foo") self._test_add_empty_file("foo/test.txt") self._test_copy_file() self._test_remove_file_fs("file1.txt") self._test_remove_file_db("file2.txt") self._test_add_file_db("file_db.txt") setattr(self, mname, doTest)
DB_Name = 'tweets_stream_' + sys.argv[1][0:3] else: print("wrong city name should be (sydney or melbourne)") sys.exit() auth_id = 0 if len(sys.argv) > 2: auth_id = int(sys.argv[2]) # connet to couchdb # server = Server('http://*****:*****@127.0.0.1:5984/') server = Server(SERVER_ADDR) try: db = server[DB_Name] except: db = server.create(DB_Name) # instance of do sentiment analysis analyzer = SentimentIntensityAnalyzer() # time_label added def time_label(tweet_time): time_parse = tweet_time.split(' ')[3] time_tag = time_parse[:2] return time_tag class MyStreamListener(tweepy.StreamListener): def on_data(self, data): try:
class CouchDbPersister(Persister): """ A basic couchDB persister. Note that the couchDB persister is designed not to overwrite the value of a key if the key already exists. You can subclass it and use update_one instead of insert_one if you want to be able to overwrite data. >>> s = CouchDbPersister() >>> for _id in s: # deleting all docs in tmp ... del s[_id] >>> k = {'_id': 'foo'} >>> v = {'val': 'bar'} >>> k in s # see that key is not in store (and testing __contains__) False >>> len(s) 0 >>> s[k] = v >>> len(s) 1 >>> list(s) [{'_id': 'foo'}] >>> s[k] {'val': 'bar'} >>> s.get(k) {'val': 'bar'} >>> s.get({'not': 'a key'}, {'default': 'val'}) # testing s.get with default {'default': 'val'} >>> list(s.values()) [{'val': 'bar'}] >>> k in s # testing __contains__ again True >>> del s[k] >>> len(s) 0 >>> >>> s = CouchDbPersister(db_name='py2store', key_fields=('name',), data_fields=('yob', 'proj', 'bdfl')) >>> for _id in s: # deleting all docs in tmp ... del s[_id] >>> s[{'name': 'guido'}] = {'yob': 1956, 'proj': 'python', 'bdfl': False} >>> s[{'name': 'vitalik'}] = {'yob': 1994, 'proj': 'ethereum', 'bdfl': True} >>> for key, val in s.items(): ... print(f"{key}: {val}") {'name': 'guido'}: {'yob': 1956, 'proj': 'python', 'bdfl': False} {'name': 'vitalik'}: {'yob': 1994, 'proj': 'ethereum', 'bdfl': True} """ def clear(self): raise NotImplementedError( "clear is disabled by default, for your own protection! " "Loop and delete if you really want to." ) def __init__( self, user='******', password='******', url='http://127.0.0.1:5984', db_name='py2store', key_fields=('_id', ), data_fields=None, couchdb_client_kwargs=None ): if couchdb_client_kwargs is None: couchdb_client_kwargs = {} if user and password: # put credentials in url if provided like https://username:[email protected]:5984/ if '//' in url: # if scheme present url = f'{url.split("//")[0]}//{user}:{password}@{url.split("//")[1]}' else: url = f'http//{user}:{password}@{url}' self._couchdb_server = Server(url=url, **couchdb_client_kwargs) self._db_name = db_name # if db not created if db_name not in self._couchdb_server: self._couchdb_server.create(db_name) self._cdb = self._couchdb_server[db_name] if isinstance(key_fields, str): key_fields = (key_fields,) if data_fields is None: pass # filter out _rev field on output if data_fields is None: self._data_fields = {k: False for k in key_fields} if '_rev' not in key_fields: self._data_fields['_rev'] = False elif not isinstance(data_fields, dict) and isinstance(data_fields, Iterable): self._data_fields = {k: True for k in data_fields} if '_id' not in data_fields: self._data_fields['_id'] = False if '_rev' not in self._data_fields: self._data_fields['_rev'] = False self._key_fields = key_fields def __getitem__(self, k): mango_q = { 'selector': k, } docs = self._cdb.find(mango_q, self.__return_doc_filter) docs = list(docs) if len(docs) != 0: return docs[0] else: raise KeyError(f"No document found for query: {k}") def __setitem__(self, k, v): return self._cdb.save(dict(k, **v)) def __delitem__(self, k): if len(k) > 0: mango_q = { 'selector': k, } docs = self._cdb.find(mango_q) # to delete document we need _rev and _id fields, so skip output filtering for doc in docs: self._cdb.delete(doc) else: raise KeyError(f"You can't removed that key: {k}") def __iter__(self): mango_q = { 'selector': {}, 'fields': self._key_fields } yield from self._cdb.find(mango_q) def __len__(self): return self._cdb.info()['doc_count'] def __return_doc_filter(self, doc): doc = dict(doc) for data_field in self._data_fields: if not self._data_fields[data_field]: del doc[data_field] return doc
from django.http import Http404,HttpResponseRedirect from django.shortcuts import render_to_response from couchdb import Server from couchdb import ResourceNotFound from django.template import RequestContext SERVER = Server('http://*****:*****@programs.iriscouch.com/') if (len(SERVER) == 0): SERVER.create('programs') map_tve1= '''function(doc) { if (doc.canal=="tve1") emit(doc._id, (doc.name + " :"+doc.hinicio+"/"+doc.hfin)); }''' map_clan= '''function(doc) { if (doc.canal=="clan") emit(doc._id, (doc.name + " :"+doc.hinicio+"/"+doc.hfin)); }''' map_tve2= '''function(doc) { if (doc.canal=="tve2") emit(doc._id, (doc.name + " :"+doc.hinicio+"/"+doc.hfin)); }''' map_antena3= '''function(doc) { if (doc.canal=="antena3") emit(doc._id, (doc.name + " :"+doc.hinicio+"/"+doc.hfin)); }'''
def main(): parser = argparse.ArgumentParser(description="Initialize the database") parser.add_argument('--fixture', required=False) args = parser.parse_args() server = Server() # Copy the couchdb config file into the correct directory config_file_path = os.path.join(os.path.dirname(__file__), "couchdb.ini") if os.path.isdir("/etc/couchdb/default.d"): shutil.copy(config_file_path, "/etc/couchdb/default.d/openag.ini") elif os.path.isdir("/usr/local/etc/couchdb/default.d"): shutil.copy( config_file_path, "/usr/local/etc/couchdb/default.d/openag.ini" ) else: raise RuntimeError("Failed to install couchdb configuration file") # Create all of the databases for k,v in DbName.__dict__.items(): if k.isupper(): try: server.create(v) except PreconditionFailed: pass # Push design documents to all of the databases design_path = os.path.dirname(_design.__file__) for db_name in os.listdir(design_path): if db_name.startswith('__'): continue db_path = os.path.join(design_path, db_name) db = server[db_name] doc = get_or_create(db, "_design/openag") update_doc(db, doc, folder_to_dict(db_path)) # Create entries in the MODULE_TYPE database for all of the module types brain_dir = os.path.dirname(os.path.dirname(__file__)) mod_dir = os.path.join(brain_dir, 'modules') for f_name in os.listdir(mod_dir): if not f_name.endswith('.py'): continue if f_name.startswith('__'): continue package_name = '.'.join(__package__.split('.')[:-1]) py_mod_name = package_name + '.modules.' + f_name.split('.')[0] py_mod = import_module(py_mod_name) for name, cls in inspect.getmembers(py_mod, inspect.isclass): if issubclass(cls, Module) and cls.__module__ == py_mod_name: register_module_type(cls, server[DbName.MODULE_TYPE]) # Create entries in the MODULE_GROUP database for all of the module groups mod_group_dir = os.path.join(brain_dir, 'module_groups') for f_name in os.listdir(mod_group_dir): if not f_name.endswith('.py'): continue if f_name.startswith('__'): continue package_name = '.'.join(__package__.split('.')[:-1]) py_mod_name = package_name + '.module_groups.' + f_name.split('.')[0] py_mod = import_module(py_mod_name) for name, cls in inspect.getmembers(py_mod, inspect.isclass): if issubclass(cls, ModuleGroup) and cls.__module__ == py_mod_name: register_module_group(cls, server[DbName.MODULE_GROUP]) # Create modules based on the fixture passed in from the command line if args.fixture: fixture_file_name = os.path.join( os.path.dirname(fixtures.__file__), args.fixture + ".json" ) with open(fixture_file_name) as fixture_file: fixture = json.load(fixture_file) for db_name, items in fixture.items(): db = server[db_name] for item in items: item_id = item["_id"] doc = get_or_create(db, item_id) update_doc(db, doc, item)
class Couch(object): """A class to hold the couchdb-python functionality used during ingestion. Includes methods to bulk post, load views from a view directory, backup and rollback ingestions, as well as track changes in documents between ingestions. """ def __init__(self, config_file="akara.ini", **kwargs): """ Default Args: config_file: The configuration file that includes the Couch server url, dpla and dashboard database names, the views directory path, and the batch size to use with iterview Optional Args (if provided, config_file is not used: server_url: The server url with login credentials included. dpla_db_name: The name of the DPLA database. dashboard_db_name: The name of the Dashboard database. views_directory: The path where the view JavaScript files are located. batch_size: The batch size to use with iterview """ if not kwargs: config = ConfigParser.ConfigParser() config.readfp(open(config_file)) server_url = config.get("CouchDb", "Server") dpla_db_name = config.get("CouchDb", "DPLADatabase") dashboard_db_name = config.get("CouchDb", "DashboardDatabase") views_directory = config.get("CouchDb", "ViewsDirectory") batch_size = config.get("CouchDb", "BatchSize") log_level = config.get("CouchDb", "LogLevel") else: server_url = kwargs.get("server_url") dpla_db_name = kwargs.get("dpla_db_name") dashboard_db_name = kwargs.get("dashboard_db_name") views_directory = kwargs.get("views_directory") batch_size = kwargs.get("batch_size") log_level = "DEBUG" self.server_url = server_url self.server = Server(server_url) self.dpla_db = self._get_db(dpla_db_name) self.dashboard_db = self._get_db(dashboard_db_name) self.views_directory = views_directory self.batch_size = int(batch_size) self.logger = logging.getLogger("couch") handler = logging.FileHandler("logs/couch.log") formatter = logging.Formatter( "%(asctime)s %(name)s[%(process)s]: [%(levelname)s] %(message)s", "%b %d %H:%M:%S") handler.setFormatter(formatter) self.logger.addHandler(handler) self.logger.setLevel(log_level) def _get_db(self, name): """Return a database given the database name, creating the database if it does not exist. """ try: db = self.server.create(name) except Exception: db = self.server[name] return db def _sync_views(self, db_name): """Fetches design documents from the views_directory, saves/updates them in the appropriate database, then build the views. """ build_views_from_file = ["dpla_db_all_provider_docs.js", # Uncomment when QA views have been built #"dpla_db_qa_reports.js", "dashboard_db_all_provider_docs.js", "dashboard_db_all_ingestion_docs.js"] if db_name == "dpla": db = self.dpla_db elif db_name == "dashboard": db = self.dashboard_db for file in os.listdir(self.views_directory): if file.startswith(db_name): fname = os.path.join(self.views_directory, file) with open(fname, "r") as f: design_doc = json.load(f) # Check if the design doc has changed prev_design_doc = db.get(design_doc["_id"], {}) prev_revision = prev_design_doc.pop("_rev", None) diff = DictDiffer(design_doc, prev_design_doc) if diff.differences(): # Save thew design document if prev_revision: design_doc["_rev"] = prev_revision db[design_doc["_id"]] = design_doc # Build views if file in build_views_from_file: design_doc_name = design_doc["_id"].split("_design/")[-1] for view in design_doc["views"]: view_path = "%s/%s" % (design_doc_name, view) start = time.time() try: for doc in db.view(view_path, limit=0): pass self.logger.debug("Built %s view %s in %s seconds" % (db.name, view_path, time.time() - start)) except Exception, e: self.logger.error("Error building %s view %s: %s" % (db.name, view_path, e))
from couchdb import Server, http import json from os import path fname = "%s/../config.json" % path.dirname( path.abspath(__file__)) config = json.loads(open(fname).read()) couch = Server("%s:%d" % (config['host'], config['port'])) couch.resource.credentials = (config['auth']['username'], config['auth']['password']) try: db = couch.create(config['database']) except http.PreconditionFailed: db = couch[config['database']]
parser.add_argument('--source', type=str, help='Source address') parser.add_argument('--dest', type=str, help='Destination address') args = parser.parse_args() src = Server(args.source) dest = Server(args.dest) count = 0 for dbname in src : db = src[dbname] if (len(dbname) >= 4 and dbname[:4] == "mica" ) or dbname == "_users" : try : newdb = dest[dbname] except couchdb.http.ResourceNotFound, e : dest.create(dbname) newdb = dest[dbname] security = db.security print "Copying " + str(dbname) + " security parameters: " + str(security) newdb.security = security if db.info()["doc_count"] != newdb.info()["doc_count"] : print "Replicating: " + str(dbname) src.replicate(args.source + "/" + dbname, args.dest + "/" + dbname, continuous = True) else : print "Already replicated: " + str(dbname) continue while db.info()["doc_count"] > newdb.info()["doc_count"] : print "Source count: " + str(db.info()["doc_count"]) + " dest count: " + str(newdb.info()["doc_count"]) sleep(5)
class Couch(object): """ >>> config = {} >>> config['db_name'] = 'urldammit_doctest' >>> cdb = Couch(config) >>> del cdb.server['urldammit_doctest'] >>> cdb = Couch(config) >>> print cdb.load("123abc") None >>> u = URI() >>> u.status = 200 >>> u.uri = "http://local.ch/load_1.html" >>> cdb.insert(u) >>> u1 = cdb.load(u.id) >>> u1.uri == u.uri True >>> print u1.uri http://local.ch/load_1.html >>> cdb.delete(u.id) >>> cdb.insert(u) >>> u2 = cdb.load(u.id) >>> u2.uri == u.uri True >>> cdb.delete(u.id) >>> cdb.insert(u) >>> u.tags = ['foo','bar'] >>> cdb.update(u) >>> u3 = cdb.load(u.id) >>> print u3.tags ['foo', 'bar'] >>> del cdb.server['urldammit_doctest'] """ def __init__(self, config = None): self.config = self._default_config(config) self.server = Server(config['db_host']) self.bootstrap() self.db = self.server[config['db_name']] @db_cache.load def load(self, id): record = self.db.get(id, None) if not record: return None data = {} data['meta'] = {} for k, v in record.items(): k = k.encode('utf-8') if k == 'tags': try: data[k] = [tag.encode('utf-8') for tag in v] except: data[k] = None elif k == 'pairs': data[k] = contract_dict(v) elif k == '_rev': data['meta']['_rev'] = v elif isinstance(v, unicode): data[k] = v.encode('utf-8') else: data[k] = v return URI.load(data) @db_cache.insert def insert(self, uri): self.db[uri.id] = uri.data() @db_cache.update def update(self, uri): stored_uri = self.load(uri.id) if not stored_uri: self.insert(uri) return data = {} try: data['_rev'] = stored_uri.meta['_rev'] except: pass for k, v in uri.data().items(): data[k] = v self.db[uri.id] = data @db_cache.delete def delete(self, id): del self.db[id] def bootstrap(self, **kwargs): dbname = self.config['db_name'] if not dbname in self.server: self.server.create(dbname) def purge(self, **kwargs): pass def _default_config(self, config): """ Setup default values for configuration """ if not config: config = {} config['db_host'] = config.get('db_host', 'http://localhost:5984') config['db_name'] = config.get('db_name', 'urldammit') return config def _load(self, id): return
from couchmail import archive_msg, headers, truly_unique_id argparser = argparse.ArgumentParser() argparser.add_argument('config_file', type=file, help="Config INI file. See `config.sample.ini` for info.") args = argparser.parse_args() config = ConfigParser.RawConfigParser() config.readfp(args.config_file) # CouchDB/Cloudant setup server = Server(config.get('couch', 'server')) try: couch = server[config.get('couch', 'db')] except ResourceNotFound: couch = server.create(config.get('couch', 'db')) # IMAP setup host = config.get('imap', 'host') user = config.get('imap', 'user') try: password = config.get('imap', 'password') except ConfigParser.NoOptionError: password = getpass.getpass("Password for %s on %s: " % (user, host)) mailbox = config.get('imap', 'mailbox') if __name__ == '__main__': print 'Connecting...' imapper = easyimap.connect(host, user, password, mailbox, read_only=True, ssl=True, port=993) print 'Connected. Couple more questions...' amount = int(raw_input('How many mail items would you like to archive? '))