def run(self): source_server = CONF.getCouchURI() if not source_server: logger.info("""No DB configuration found. To upgrade your DB please configure a valid CouchDB URI in: ~/.faraday/config/user.xml configuration file.""") return serv = couchdbkit.Server(source_server) logger.info('We are about to upgrade dbs in Server [%s]' % source_server) dbs = filter( lambda x: not x.startswith("_") and 'backup' not in x and x not in CONST_BLACKDBS, serv.all_dbs()) logger.info('Dbs to upgrade: %s' % (', '.join(dbs))) if not query_yes_no('Proceed?', 'no'): return logger.info('Preparing updates on Couchdbs') processed = 0 logger.info('About to upgrade %d dbs' % len(dbs)) for db_name in dbs: logger.info('Updating db %s' % db_name) try: self.update_db(db_name) processed = processed + 1 except Exception as e: logger.error(e) logger.info('Updated DB [%s]. %d remaining' % (db_name, len(dbs) - processed)) logger.info( "Update process finish, be kind to review the process.\nBackuped databases won't be accesible" )
def main(uri, db): server = couchdbkit.Server(uri) database = server[db] for row in database.all_docs(startkey="_design/", endkey="_design0", include_docs=True): did = row["id"] assert did.startswith("_design/") design = did[len("_design/"):] doc = row["doc"] if "views" not in doc: continue for key in doc["views"]: view = design + "/" + key print "Refreshing view", view if "reduce" in doc["views"][key]: r = database.view(view, reduce=True, group=True, limit=1) else: r = database.view(view, limit=1) # It's an iterator, and it's lazily loaded: list(r)
def upload(server, db, docs): """upload all docs to couch, overwriting existing""" server = couchdbkit.Server(server) db = server[db] for doc in docs: db.save_doc(doc, force_update=True)
def run(self): source_server = CONF.getCouchURI() if not source_server: logger.info("""No DB configuration found. To upgrade your DB please configure a valid CouchDB URI in: ~/.faraday/config/user.xml configuration file.""") return serv = couchdbkit.Server(source_server) logger.info('We are about to upload CouchdbViews in Server [%s]' % source_server) # if not query_yes_no("Faraday won't behave correctly with older versions, proceed?", 'no'): # return dbs = filter( lambda x: not x.startswith("_") and 'backup' not in x and x not in CONST_BLACKDBS, serv.all_dbs()) logger.info('Dbs to upgrade: %s' % (', '.join(dbs))) logger.info('Preparing updates on Couchdbs') processed = 0 views_uploader = ViewsManager() for db_name in dbs: db_source = couchdbkit.Database("/".join((source_server, db_name))) views_uploader.addViews(db_source, force=True)
def __init__(self, config): """ On construction, it will: * Use ``config[daemon_name]`` as ``self.config`` (defaults to 'parser'). * Load modules from ``self.config["modules"]``. * Connects to CouchDB using ``self.config["couch_uri"]`` and ``config["couch_db"]``. """ config = copy.deepcopy(config) parser_config = config["parser"] self.loadable_manager = loadable_manager.LoadableManager(config) # loadable_manager used by ParserFiltering and ParserModules. self.filtering = ParserFiltering(config, self.loadable_manager) self.modules = [] for module in parser_config["modules"]: m = dynamicloader.load(module["class"]) dynamicloader.expecthasmethod(m, "pre_parse") dynamicloader.expecthasmethod(m, "parse") dynamicloader.expecthasnumargs(m.pre_parse, 1) dynamicloader.expecthasnumargs(m.parse, 2) module["module"] = m(self) self.modules.append(module) self.couch_server = couchdbkit.Server(config["couch_uri"]) self.db = self.couch_server[config["couch_db"]]
def hook_connect(self, **kwargs): if self.username is None or self.password is None: connection_string = "http://%s:%s" % (self.host, self.port) else: connection_string = "http://%s:%s@%s:%s" % ( self.username, self.password, self.host, self.port) self._server = couch.Server(uri=connection_string)
def run(self): logFile = '/tmp/tweetmonitor.log' handler = logging.handlers.RotatingFileHandler(logFile, maxBytes=5485760, backupCount=3) # 10MB files formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) mainLogger = logging.getLogger('main') mainLogger.setLevel(logging.DEBUG) mainLogger.addHandler(handler) mainLogger.info('--') mainLogger.info('Tweetmonitor started') mainLogger.info('--') server = couchdbkit.Server(uri=COUCHDB_URI) db = server.get_or_create_db('tweetmonitor') try: with FilterStream(USER, PASS, track=words) as stream: for tweet in stream: db.save_doc(tweet) mainLogger.info("Got tweet from %-16s\t( tweet %d, rate %.1f tweets/sec)" % ( tweet["user"]["screen_name"], stream.count, stream.rate )) except ConnectionError, e: mainLogger.error("Disconnected from twitter. Reason: %s", e.reason) sys.exit(1)
def __init__(self, server, user, password, database): energykit.DataSource.__init__(self) energykit.PubSub.__init__(self) self._user = user auth = restkit.BasicAuth(user, password) server = couchdbkit.Server(uri=server, filters=[auth]) self.db = server.get_db(database) self._listening = False
def upload_views(): """All wrongdoing is sin, but there is sin that does not lead to death. John 5:17 """ from managers.all import ViewsManager # Blessed are the merciful, for they shall receive mercy. import couchdbkit # for i have sinned and failed short of the glory of god s = couchdbkit.Server(uri=CONF.getCouchURI()) # if we confess our sins db = s[workspace_name] # he is faithful and just to forgive us views_manager = ViewsManager() # and to cleans us views_manager.addViews(db) # from all unrightousness
def __init__(self): super(Worker, self).__init__() self.go = False # server object server = couchdbkit.Server() # create database db = server.get_or_create_db('timelapse') StorageFile.set_db(db)
def __init__(self, config, daemon_name): self.tracker = config[daemon_name]["tracker"] server = couchdbkit.Server(config["couch_uri"]) self.db = server[config["couch_db"]] self.recent_doc_ids = [] self.recent_doc_receivers = {} self.upload_queue = Queue.Queue() self.recent_lock = threading.RLock()
def __init__(self, callsign, couch_uri="http://habhub.org/", couch_db="habitat", max_merge_attempts=20): self._callsign = callsign self._latest = {} self._max_merge_attempts = max_merge_attempts server = couchdbkit.Server(couch_uri) self._db = server[couch_db]
def __init__(self, config='config.json'): print("load config") try: os.stat(config) conf = json.loads(open('config.json').read()) self.local_config = conf except: print("no config") print("connect to couch") try: self.server = couchdbkit.Server(conf['server']) current = self.server[conf['database']][conf['constructor']] self.db = self.server[current['databases'][0]] brokers = current['broker'] #print(json.dumps(current,sort_keys=True,indent=4)) self.config = current except: print("couchdb fail") try: print("connect to broker") credentials = pika.PlainCredentials(conf['broker_cred'][0], conf['broker_cred'][1]) print brokers connection = pika.BlockingConnection( pika.ConnectionParameters(credentials=credentials, host=str(brokers[0]))) channel = connection.channel() channel.basic_qos(prefetch_count=1) self.channel = channel print("connected") except: print("broker failed") try: print("connect to redis") svr = self.local_config['redis'] print(svr) r = redis.Redis(svr) self.redis = r print("connected") print("build queries") qs = self.config['redis_query'] # dict access to queries self.queries = {} for i in qs.keys(): print(' ' + i) qe = redis_query(self.db, self.redis, i, qs[i]) self.queries[i] = qe self.__dict__[i] = qe except: print("redis failed")
def __init__(self, config, daemon_name="parserdaemon"): """ On construction, it will: * Connect to CouchDB using ``self.config["couch_uri"]`` and ``config["couch_db"]``. """ config = copy.deepcopy(config) self.couch_server = couchdbkit.Server(config["couch_uri"]) self.db = self.couch_server[config["couch_db"]] self.last_seq = self.db.info()["update_seq"] self.parser = parser.Parser(config)
def __init__(self, callsign, couch_uri="http://habitat.habhub.org/", couch_db="habitat", max_merge_attempts=20): # NB: update default options in /bin/uploader self._lock = threading.RLock() self._callsign = callsign self._latest = {} self._max_merge_attempts = max_merge_attempts server = couchdbkit.Server(couch_uri) self._db = server[couch_db]
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ settings.update(parse_settings(settings)) authentication_policy = AuthTktAuthenticationPolicy('seekrit', callback=groupfinder) authorization_policy = ACLAuthorizationPolicy() config = Configurator(settings=settings, root_factory='scielobooks.resources.RootFactory', authentication_policy=authentication_policy, authorization_policy=authorization_policy, request_factory=MyRequest, renderer_globals_factory=renderer_globals_factory) engine = engine_from_config(config.registry.settings, prefix='sqlalchemy.') db_maker = sessionmaker(bind=engine) config.registry.settings['rel_db.sessionmaker'] = db_maker config.include(pyramid_zcml) config.load_zcml('configure.zcml') config.include('pyramid_mailer') config.include('pyramid_celery') config.registry['mailer'] = Mailer.from_settings(config.registry.settings) config.registry['app_version'] = APP_VERSION db_uri = config.registry.settings['db_uri'] conn = couchdbkit.Server(db_uri) config.registry.settings['db_conn'] = conn config.add_subscriber(add_couch_db, NewRequest) config.scan('scielobooks.models') initialize_sql(engine) if config.registry.settings['serve_static_files'] is True: config.add_static_view(name='static', path='static') config.add_static_view('deform_static', 'deform:static') config.add_static_view('/'.join((config.registry.settings['db_uri'], config.registry.settings['db_name'])), 'scielobooks:database') config.add_static_view(config.registry.settings['fileserver_url'], 'scielobooks:fileserver') config.add_view(custom_forbidden_view, context=Forbidden) config.add_translation_dirs('scielobooks:locale/') config.set_locale_negotiator(custom_locale_negotiator) my_session_factory = UnencryptedCookieSessionFactoryConfig('itsaseekreet') config.set_session_factory(my_session_factory) return config.make_wsgi_app()
def receivers(): couch_server = couchdbkit.Server(couch_settings["couch_uri"]) couch_db = couch_server[couch_settings["couch_db"]] listeners = receivers_load(couch_db) response_data = [] for callsign in listeners: l = listener_map(callsign, listeners[callsign]) if l is not None: response_data.append(l) response = flask.make_response(json.dumps(response_data)) set_expires(response, 10 * 60) response.headers["Content-type"] = "application/json" return response
def get_couch(): exit = 1 while exit: print('Enter couch url : http://user:[email protected]:<port>/') r = raw_input('>') u = urlparse.urlparse(r) print(u) if u.hostname != None: s = couchdbkit.Server(u.geturl()) try: info = s.info() if info.has_key('couchdb'): exit = 0 except: print('fail server ' + r) return s
def repair(self, auth=DEFAULT_AUTH): # Authenticate with the local store #user, password = auth server = couchdbkit.Server(uri=DEFAULT_STORE_ADMIN_URI) for dbname in self.all_dbs(): # Recreate database if needed db = server.get_or_create_db(dbname) # Create index design doc if needed if not db.doc_exist('_design/index'): db.save_doc(index_doc()) # Create state doc in the public database if needed if not self.public.doc_exist('_local/state'): self.public.save_doc(state_doc())
def __init__(self, db_uri, db_name, solr_uri, feed_type, callback=None): if feed_type not in ['continuous', 'longpoll']: raise ValueError( "allowed values are 'continuous' or 'longpoll'. received %s" % feed_type) if callback is None: callback = self.handle_notification self.__db_uri = db_uri self.__db_name = db_name self.__server = couchdbkit.Server(db_uri) self.__db = self.__server[db_name] self.__consumer = couchdbkit.Consumer(self.__db) self.__solr_uri = solr_uri self.__feed_type = feed_type self.__callback = callback self.__last_activity = None
def get_payloads(couch_uri, couch_db): server = couchdbkit.Server(couch_uri) db = server[couch_db] results = db.view("payload_configuration/callsign_time_created_index", include_docs=True) payloads = {} # payload_config will be sorted, newest last. New docs will therefore # overwrite: for result in results: callsign, time_created, index = result["key"] metadata, sentence = result["value"] doc = result["doc"] # need to include_docs to get transmission. if not doc.get("transmissions", []): continue payloads[callsign] = [doc["transmissions"], sentence] return payloads
def __init__(self, config, daemon_name="parser"): """ Uses config[daemon_name] as self.config (defaults to 'parser'). Loads a LoadableManager, passing it config. Loads modules from self.config["modules"]. Scans self.config["certs_dir"] for CA and developer certificates. Connects to CouchDB using self.config["couch_uri"] and \ config["couch_db"]. """ config = copy.deepcopy(config) parser_config = config[daemon_name] self.loadable_manager = loadable_manager.LoadableManager(config) self.modules = [] for module in parser_config["modules"]: m = dynamicloader.load(module["class"]) dynamicloader.expecthasmethod(m, "pre_parse") dynamicloader.expecthasmethod(m, "parse") dynamicloader.expecthasnumargs(m.pre_parse, 1) dynamicloader.expecthasnumargs(m.parse, 2) module["module"] = m(self) self.modules.append(module) self.certificate_authorities = [] self.cert_path = parser_config["certs_dir"] ca_path = os.path.join(self.cert_path, 'ca') for f in os.listdir(ca_path): ca = M2Crypto.X509.load_cert(os.path.join(ca_path, f)) if ca.check_ca(): self.certificate_authorities.append(ca) else: raise ValueError("CA certificate is not a CA: " + os.path.join(ca_path, f)) self.loaded_certs = {} self.couch_server = couchdbkit.Server(config["couch_uri"]) self.db = self.couch_server[config["couch_db"]] self.last_seq = self.db.info()["update_seq"]
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ config = Configurator(root_factory=Root, settings=settings) config.add_route('insert', '/insert', view=views.insert_entry) config.add_route('list', '/list', view=views.list_entries) config.add_route('main', '', view=views.list_entries) config.add_route('edit', '/edit/{id}', view=views.edit_entry) config.add_route('view', '/view/{id}', view=views.view_entry) db_uri = settings['db_uri'] conn = couchdbkit.Server(db_uri) config.registry.settings['db_conn'] = conn config.add_subscriber(add_couch_db, NewRequest) attachs_uri = settings['db_uri'] + '/' + settings['db_name'] config.add_static_view(attachs_uri, 'pyramidattachs:attachments') config.add_static_view('static', 'pyramidattachs:static') return config.make_wsgi_app()
def __init__(self, db, collections, host='localhost', port=5984, clear=False, **_): server = couchdbkit.Server(uri='http://%s:%d' % (host, port), uuid_batch_count=20000) # CouchDB has no collections, so we create one db for each language, instead of one collection. dbnames = [('docs_%s' % coll, '%s_docs_%s' % (db, coll)) for coll in collections] if clear: all_dbs = server.all_dbs() for _, name in dbnames: if name in all_dbs: server.delete_db(name) update(self, server=server, databases=dict((coll, server.get_or_create_db(name)) for coll, name in dbnames))
def migration(): parser = argparse.ArgumentParser() parser.add_argument('--conf', help='wsgi conf file') parser.add_argument('--database', help='sqlite database') parser.add_argument('--upload-dir', help='directory where file are stored') args = parser.parse_args() config = ConfigParser.RawConfigParser() config.read(args.conf) server = couchdbkit.Server(config.get('app:main', 'couchdb.url')) db = server.get_or_create_db(config.get('app:main', 'couchdb.db')) ToStore.set_db(db) sqlDB = sqlite3.connect(args.database) cursor = sqlDB.execute( "select id, fdescr, fpath, fid, fname from tasks where closed = 0") for row in cursor.fetchall(): id_, fdescr, fpath, fid, fname = row todo = ToStore(_id=str(id_), description=fdescr, filename=fname, dtInserted=datetime.datetime.now()) todo.save() with open(os.path.join(args.upload_dir, fid), 'rb') as attachment: with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as guess: mime = guess.id_buffer(attachment.read(1024)) attachment.seek(0) todo.put_attachment(attachment, 'attachment', content_type=mime)
def main(): """ Purge old paste. """ parser = argparse.ArgumentParser() parser.add_argument('--conf', help='paulla.paste conf file') args = parser.parse_args() config = ConfigParser.RawConfigParser() config.read(args.conf) logging.config.fileConfig(args.conf) logger = logging.getLogger('purge') server = couchdbkit.Server(config.get('app:main', 'couchdb.url')) db = server.get_or_create_db(config.get('app:main', 'couchdb.db')) Paste.set_db(db) oldPastes = Paste.view('old/all').all() for paste in oldPastes: logger.info("deleting %s", paste._id) paste.delete()
def db(): global _db if _db is None: server = couchdbkit.Server(config["couchdb_server_url"]) _db = server.get_or_create_db(config["couchdb_db"]) return _db
from .utils import * from . import twitter, sentiment import couchdbkit server = couchdbkit.Server("http://{0}:{1}@{2}:{3}" \ .format(env("COUCHDB_USERNAME"), env("COUCHDB_PASSWORD"), env("COUCHDB_HOST"), env("COUCHDB_PORT"))) db_data = server.get_or_create_db(env("DATA_DB")) db_tweets = server.get_or_create_db(env("TWEETS_DB")) def _get_data(): return db_data.view("moodmap_data/words") def _save_tweet(id, username, tweet, latitude, longitude, timestamp, rating): doc = { "id": id, "username": username, "tweet": tweet, "latitude": latitude, "longitude": longitude, "timestamp": timestamp, "rating": rating } if not db_tweets.save_doc(doc): log("Failed to save doc #{0}".format(id))
def connect_db(): server = couchdbkit.Server(app.config['COUCHDB_URL']) return server.get_or_create_db(app.config['COUCHDB_DATABASE'])
import couchdbkit db = couchdbkit.Server('https://garbados.cloudant.com')['hey_listen'] #or db = couchdbkit.Server('https://<username>:<password>@username.cloudant.com')['hey_listen'] vr = db.view('ngrams/1gram', key=['Ichiro']) print vr.first()['value'] vr = db.view('ngrams/1gram', key=['Ichiro'], limit=100, reduce=False, include_docs=True) for row in vr: print row['key'], row['doc']['geo']['coordinates'], row['value']