def main(db_name, db_user, db_pass, db_host, sequence_file, zone_dir, **tls_args): # Starting Sequence for change stream sequence = sequence_read(sequence_file) click.echo('Skipping %s changes.' % sequence) # CouchDB Connection tls_args['cert_reqs'] = ssl.CERT_REQUIRED tls_args['ssl_version'] = ssl.PROTOCOL_TLSv1_2 auth = CouchdbResource(filters=[BasicAuth(db_user, db_pass)], **tls_args) server = Server(uri=db_host, resource_instance=auth) db = server[db_name] if sequence == 0: click.echo('Fast track syncing all zones...') c = Consumer(db) result = c.fetch(descending=True, limit=1) # Fast track to this sequence sequence = result['last_seq'] # Go get all the current zones. zones = c.fetch() for zone in zones['results']: domain = zone['id'] try: doc = db.get(docid=domain) except ResourceNotFound, e: click.echo('%s not found (this is normal if the zone was deleted)' % domain) else: zone_update(domain, doc['data'], zone_dir) sequence_write(sequence_file, sequence) # Keep track of our sync point click.echo('Fast track syncing done')
def wait(self, callback, **params): if 'since' not in params: params['since'] = 0 self._params = params consumer = Consumer(db) def process(change): seq = change.get('seq') last_seq = change.get('last_seq') if seq is not None: if seq > self._params['since']: self._params['since'] = seq callback(change) elif last_seq is not None: self._params['since'] = last_seq while True: try: consumer.wait(process, **self._params) except NoMoreData: pass
def poll_changes(): db = connect_db() last_change = db.info()["update_seq"] c = Consumer(db, backend='gevent') changes = c.wait_once(since=last_change, feed='longpoll', include_docs='true', filter="app/posts") return jsonify(changes)
def main(): s = Server('http://*****:*****@127.0.0.1:5984') db = s['datadb'] c = Consumer(db) #start listening since = current update sequence. #callback function is run.main #heartbeat every minute to keep the connection alive. c.wait(callback, since = db.info()['update_seq'], filter='proc/newproc0', feed='continuous', heartbeat=60000)
def main(): s = Server('http://edwdbik.fzk.de:5984') db = s['datadb'] c = Consumer(db) #start listening since = current update sequence. #callback function is run.main #heartbeat every minute to keep the connection alive. c.wait(remoterun.main, since = db.info()['update_seq'], filter='proc/statusgood', feed='continuous', heartbeat=60000)
def get_recent_changes(db, limit=500): c = Consumer(db) changes = c.fetch(limit=limit, descending=True, include_docs=True)['results'] for row in changes: yield { 'id':row['id'], 'rev': row['changes'][0]['rev'], 'domain': row['doc'].get('domain', '[no domain]'), 'doc_type': row['doc'].get('doc_type', '[no doc_type]'), }
def resolveForever(self): consumer = Consumer(self.db) consumer.register_callback(self.resolve) while True: try: consumer.wait(heartbeat=True, filter="sessiondoc/scanconflicts") self.logger.info('Changes feed closed connection. Restarting.') except Exception as e: self.logger.info('Error connecting to CouchDB for changes:') self.logger.info('%s' % e) time.sleep(1)
def get_recent_changes(db, limit=500): c = Consumer(db) changes = c.fetch(limit=limit, descending=True, include_docs=True)['results'] for row in changes: yield { 'id': row['id'], 'rev': row['changes'][0]['rev'], 'domain': row['doc'].get('domain', '[no domain]'), 'doc_type': row['doc'].get('doc_type', '[no doc_type]'), }
def get_recent_changes(db, limit): c = Consumer(db) changes = c.fetch(limit=limit, descending=True, include_docs=True)["results"] for row in changes: yield { "id": row["id"], "rev": row["changes"][0]["rev"], "domain": row["doc"].get("domain", "[no domain]"), "doc_type": row["doc"].get("doc_type", "[no doc_type]"), "date": _guess_date(row["doc"]), }
def main(): s = Server('http://*****:*****@127.0.0.1:5984') db = s['datadb'] c = Consumer(db) #start listening since = current update sequence. #callback function is run.main #heartbeat every minute to keep the connection alive. c.wait(callback, since=db.info()['update_seq'], filter='proc/newproc0', feed='continuous', heartbeat=60000)
def process_couchdb_changes(): server = Server(settings.COUCHDB_SERVER) db = server.get_or_create_db('openelm') consumer = Consumer(db) sequence = SyncSequenceCache.objects.get(pk=1) changes = consumer.fetch(filter='record/new_records', since=sequence.last_sequence_id) if changes: for change in changes['results']: record_id = change['id'] copy_photo_for_record.delay(record_id) send_new_record_email.delay(record_id) sequence.last_sequence_id = changes['last_seq'] sequence.save()
def process_couchdb_changes(): server = Server(settings.COUCHDB_SERVER) db = server.get_or_create_db("openelm") consumer = Consumer(db) sequence = SyncSequenceCache.objects.get(pk=1) changes = consumer.fetch(filter="record/new_records", since=sequence.last_sequence_id) if changes: for change in changes["results"]: record_id = change["id"] copy_photo_for_record.delay(record_id) send_new_record_email.delay(record_id) sequence.last_sequence_id = changes["last_seq"] sequence.save()
def old_changes(self): """ Couchdbkit < 0.6.0 changes feed listener http://couchdbkit.org/docs/changes_consumer.html """ from couchdbkit import Consumer c = Consumer(self.couch_db, backend='gevent') while True: try: c.wait(self.parsing_processor, since=self.since, filter=self.couch_filter, heartbeat=WAIT_HEARTBEAT, feed='continuous', timeout=30000, **self.extra_args) except Exception, ex: pillow_logging.exception("Exception in form listener: %s, sleeping and restarting" % ex) gevent.sleep(RETRY_INTERVAL)
def get_objects(db, since=0, limit=1, timeout=1000): consumer = Consumer(db) while True: resp = consumer.wait_once(since=since, limit=limit, timeout=timeout, include_docs=True, filter='slugs/slug_objects') results = resp['results'] if not results: break for res in results: cls = (PodcastGroup, Podcast, Episode) classes = dict( (c._doc_type, c) for c in cls) doc = res['doc'] doc_type = doc['doc_type'] seq = res['seq'] c = classes[doc_type] yield seq, c.wrap(doc) since = resp['last_seq']
lines=result.split('\n') authToken="" for line in lines: if line.startswith( "Auth=" ): authToken=line[5:len(line)] return authToken logging.error( "error code: "+str(result.status_code)+"; error message: "+result.content ) return "" # server object server = Server('http://*****:*****@localhost:5984') # create database db = server.get_or_create_db("coconut") c = Consumer(db) def sendCouchMessage(line): #print "got %s" % line print json.dumps(line) id = line['id'] print "id: " + id doc = db.get(id) if 'phone' in doc: phone = doc['phone'] print json.dumps(doc) message = "Message from " + phone print message status = sendMessage( accountName, registrationId, message ) c.wait(sendCouchMessage,since=5,heartbeat=True) # Go into receive loop
pdfname = pl_name jobname = uuid.uuid4().hex f = open('/tmp/%s.tex' % jobname, 'w+') f.write(data) f.close() try: for i in range(2): p = subprocess.Popen([ 'pdflatex', '-output-directory', '/tmp', '-interaction', 'nonstopmode', '-jobname', jobname, '/tmp/%s.tex' % jobname, ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p.communicate() filename = '/tmp/%s.pdf' % jobname data = open(filename).read() db.put_attachment(doc, data, name=pdfname, content_type='application/pdf') except IOError: pass return f if __name__ == '__main__': while True: db = Database(sys.argv[1]) c = Consumer(db) last_seq = c.wait_once()['last_seq'] c.register_callback(generate_pdf(db)) c.wait(filter='aimpl/pl', since=last_seq, heartbeat=True) time.sleep(5)
def wait_for_changes(db_url, since): db = Database(db_url) consumer = Consumer(db) consumer.wait(callback, since=since, heartbeat=1000, include_docs=True)
def queue_once(): c = Consumer(self.db) items = c.fetch(since=0, **item_kwargs)['results'] if items: item_function(items)
def process(self, db, since): consumer = Consumer(db) params = self.get_query_params() consumer.wait(self.callback, since=since, heartbeat=10000, **params)