class CouchdbPoller(InputModule): def __init__(self, actor_config, couchdb_url, native_events=False, seqfile="seqfile", destination="data", since=0, **kw): InputModule.__init__(self, actor_config) self.pool.createQueue("outbox") self.since = since self.seqfile = seqfile self.kw = kw try: self.couchdb = Database(couchdb_url) except HTTPError: self.logging.error("Invalid database name") # TODO: create db def _get_doc(self, doc_id): return loads(self.couchdb.resource.get(doc_id)[2].read()) def preHook(self): if os.path.exists(self.seqfile): with open(self.seqfile) as seqfile: self.since = seqfile.read() self.logging.info('Restoring from seq: {}'.format(self.since)) self.sendToBackground(self.produce) def postHook(self): with open(self.seqfile, 'w+') as seqfile: seqfile.write(str(self.since)) def is_test_doc(self, doc): mode = doc.get('mode', False) if mode == "test": return True title = doc.get('title', False) if title and ("TESTING" in title.upper() or "ТЕСТУВАННЯ" in title.upper()): return True return False def produce(self): while self.loop(): for feed in self.couchdb.changes(feed="continuous", since=self.since): self.since = feed.get('seq', feed.get('last_seq', "now")) self.logging.debug("Change event {}".format(feed)) if 'id' in feed: doc = self._get_doc(feed['id']) if not self.is_test_doc(doc): e = Event(doc) self.submit(e, "outbox") sleep(0) self.logging.info("Stopping changes feed from couchdb")
class CouchdbChangesInput(InputModule): def __init__( self, actor_config, couchdb_url, native_events=False, seqfile="seqfile", destination="data", since=0, ): InputModule.__init__(self, actor_config) self.pool.createQueue("outbox") self.since = since self.seqfile = seqfile try: self.couchdb = Database(couchdb_url) except HTTPError: self.logging.error("Invalid database name") # TODO: create db def preHook(self): if os.path.exists(self.seqfile): with open(self.seqfile) as seqfile: self.since = seqfile.read() self.logging.info('Restoring from seq: {}'.format(self.since)) self.sendToBackground(self.produce) def postHook(self): with open(self.seqfile, 'w+') as seqfile: seqfile.write(str(self.since)) def produce(self): while self.loop(): for feed in self.couchdb.changes( feed="continuous", since=self.since, include_docs=True ): self.since = feed.get('seq', feed.get('last_seq', "now")) self.logging.debug("Change event {}".format(feed)) if feed and 'doc' in feed: self.submit(Event(feed['doc']), "outbox") self.logging.info("Stopping changes feed from couchdb")
class AuctionsDataBridge(object): """AuctionsDataBridge""" def __init__(self, config): super(AuctionsDataBridge, self).__init__() self.config = config self.tenders_url = urljoin( self.config_get('tenders_api_server'), '/api/{}/tenders'.format( self.config_get('tenders_api_version') ) ) self.tz = tzlocal() self.couch_url = urljoin( self.config_get('couch_url'), self.config_get('auctions_db') ) self.db = Database(self.couch_url, session=Session(retry_delays=range(10))) self.url = self.tenders_url def config_get(self, name): return self.config.get('main').get(name) def tender_url(self, tender_id): return urljoin(self.tenders_url, 'tenders/{}/auction'.format(tender_id)) def get_teders_list(self, re_planning=False): while True: params = {'offset': self.offset, 'opt_fields': 'status,auctionPeriod', 'mode': '_all_'} request_id = generate_request_id(prefix=b'data-bridge-req-') logger.debug('Start request to {}, params: {}'.format( self.url, params), extra={"JOURNAL_REQUEST_ID": request_id}) response = requests.get(self.url, params=params, headers={'content-type': 'application/json', 'X-Client-Request-ID': request_id}) logger.debug('Request response: {}'.format(response.status_code)) if response.ok: response_json = response.json() if len(response_json['data']) == 0: logger.info("Change offset date to {}".format(response_json['next_page']['offset']), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) self.offset = response_json['next_page']['offset'] break for item in response_json['data']: if 'auctionPeriod' in item \ and 'startDate' in item['auctionPeriod'] \ and 'endDate' not in item['auctionPeriod'] \ and item['status'] == "active.auction": start_date = iso8601.parse_date(item['auctionPeriod']['startDate']) start_date = start_date.astimezone(self.tz) auctions_start_in_date = startDate_view( self.db, key=(mktime(start_date.timetuple()) + start_date.microsecond / 1E6) * 1000 ) if datetime.now(self.tz) > start_date: logger.info("Tender {} start date in past. Skip it for planning".format(item['id']), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) continue if re_planning and item['id'] in self.tenders_ids_list: logger.info("Tender {} already planned while replanning".format(item['id']), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) continue elif not re_planning and [row.id for row in auctions_start_in_date.rows if row.id == item['id']]: logger.info("Tender {} already planned on same date".format(item['id']), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) continue yield item if item['status'] == "cancelled": future_auctions = endDate_view( self.db, startkey=time() * 1000 ) if item["id"] in [i.id for i in future_auctions]: logger.info("Tender {} canceled".format(item["id"]), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) auction_document = self.db[item["id"]] auction_document["current_stage"] = -100 auction_document["endDate"] = datetime.now(self.tz).isoformat() self.db.save(auction_document) logger.info("Change auction {} status to 'canceled'".format(item["id"]), extra={"JOURNAL_REQUEST_ID": request_id, 'MESSAGE_ID': DATA_BRIDGE_PLANNING}) logger.info( "Change offset date to {}".format(response_json['next_page']['offset']), extra={"JOURNAL_REQUEST_ID": request_id, 'MESSAGE_ID': DATA_BRIDGE_PLANNING} ) self.offset = response_json['next_page']['offset'] else: sleep(10) def start_auction_worker(self, tender_item): result = do_until_success( check_output, args=([self.config_get('auction_worker'), 'planning', str(tender_item['id']), self.config_get('auction_worker_config')],), ) logger.info("Auction planning command result: {}".format(result), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING_PROCESS}) def planning_with_couch(self): logger.info('Start Auctions Bridge with feed to couchdb', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) logger.info('Start data sync...', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) self.planned_tenders = {} self.last_seq_id = 0 while True: do_until_success(self.handle_continuous_feed) def handle_continuous_feed(self): change = self.db.changes(feed='continuous', filter="auctions/by_startDate", since=self.last_seq_id, include_docs=True) for tender_item in change: if 'id' in tender_item: start_date = tender_item['doc']['stages'][0]['start'] if tender_item['doc'].get("current_stage", "") == -100: continue if tender_item['doc'].get("mode", "") == "test": logger.info('Sciped test auction {}'.format(tender_item['id']), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) continue if tender_item['id'] in self.planned_tenders and \ self.planned_tenders[tender_item['id']] == start_date: logger.debug('Tender {} filtered'.format(tender_item['id'])) continue logger.info('Tender {} selected for planning'.format(tender_item['id']), extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) self.start_auction_worker(tender_item) self.planned_tenders[tender_item['id']] = start_date elif 'last_seq' in tender_item: self.last_seq_id = tender_item['last_seq'] logger.info('Resume data sync...', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) def run(self): logger.info('Start Auctions Bridge', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) self.offset = '' logger.info('Start data sync...', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) while True: for tender_item in self.get_teders_list(): logger.debug('Tender {} selected for planning'.format(tender_item)) self.start_auction_worker(tender_item) sleep(2) logger.info('Sleep...', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) sleep(100) logger.info('Resume data sync...', extra={'MESSAGE_ID': DATA_BRIDGE_PLANNING}) def run_re_planning(self): self.re_planning = True self.tenders_ids_list = [] self.offset = '' logger.info('Start Auctions Bridge for re-planning...', extra={'MESSAGE_ID': DATA_BRIDGE_RE_PLANNING}) for tender_item in self.get_teders_list(re_planning=True): logger.debug('Tender {} selected for re-planning'.format(tender_item)) self.start_auction_worker(tender_item) self.tenders_ids_list.append(tender_item['id']) sleep(1) logger.info("Re-planning auctions finished", extra={'MESSAGE_ID': DATA_BRIDGE_RE_PLANNING})