async def __process_subscriber(self, index, subscriber): fail_count = 0 response_file_prefix = subscriber.handler_filename while True: try: Log.i('invoking subscriber {}', subscriber.handler_filename) async for response_text in subscriber.subscribe(): response_text_md5hash = StringExpert.md5hash(response_text) try: epoch = int(time.time()) filepath = os.path.join( self.data_response_dirpath, '{}.{}.{}'.format(response_file_prefix, epoch, FetchApp.RESPONSE_EXTENSION) ) with open(filepath, 'w') as file: file.write(response_text) except Exception as e: Log.e('Failed to save response to file, message: {}', e) Log.d('stored api response for subcriber {} (hash {})', subscriber.handler_filename, response_text_md5hash) except Exception as e: fail_count += 1 Log.e('failed to invoke subscriber {} ({} failures so far)', subscriber.handler_filename, fail_count) stacktrace = traceback.format_exc() Log.d('exception stack:\n{}', stacktrace) Log.i('retrying in {} seconds..', self.retry_delay_seconds) await asyncio.sleep(self.retry_delay_seconds)
async def alert_continuously(self, alert_interval_seconds): is_triggered = False while True: #is_triggered == False: try: is_triggered = await self.check_for_alert_match() except Exception as e: stacktrace = OsExpert.stacktrace() Log.e('Failed to run alert check, stacktace:\n{}', stacktrace) await asyncio.sleep(alert_interval_seconds)
def parse_and_persist_as_transaction_maybe(datafetch_api_response, parser, db): try: transaction = ParseUtil.__parse_and_persist_as_transaction( datafetch_api_response, parser, db) except DuplicateInsertException as e: Log.w('db rejected transaction as a duplicate: {}', datafetch_api_response) return False except Exception as e: Log.e( 'Failed to parse and store transaction from api_response: {}', datafetch_api_response) raise e return True
def watch_continuously(self, watch_interval_seconds): Log.i('continuous watching activated with interval of {} seconds', watch_interval_seconds) consecutive_error_count = 0 while True: try: self.__verify_datafetch_apis_write_frequency() consecutive_error_count = 0 except Exception as e: consecutive_error_count += 1 Log.e('fail during watcher check ({} consecutive errors)', consecutive_error_count) stacktrace = OsExpert.stacktrace() Log.d('stacktrace:\n{}', stacktrace) time.sleep(watch_interval_seconds)
def emailMaybe(sender, receiver, title, text, smtp_host=None, smtp_user=None, smtp_password=None, smtp_port=587): try: NetworkExpert.email(sender, receiver, title, text, smtp_host, smtp_user, smtp_password, smtp_port) return True except Error: Log.e('Failed to send email') return False
def process_api_response_file(self, filepath, subscriber, datafetch_api_response=None): db = self.store filename = os.path.basename(filepath) if not os.path.isfile(filepath) or not filename.startswith( subscriber.handler_filename): return False receiveTimestamp = int(ParseUtil.extractTimestampText(filename)) with open(filepath, 'r') as disk_file: response_text = disk_file.read() response_text_md5hash = StringExpert.md5hash(response_text) if datafetch_api_response is None: datafetch_api_response = ParseUtil.partial_datafetch_api_response( subscriber, db) datafetch_api_response = { **datafetch_api_response, 'response': response_text, 'response_md5hash': response_text_md5hash, 'epoch_receive_time': receiveTimestamp, 'response_filename': filename } transaction = None try: datafetch_api_response_id = db.create_datafetch_api_response( datafetch_api_response) except DuplicateInsertException as e: Log.d('db rejected api_response_id as a duplicate: {}', response_text_md5hash) return False except Exception as e: Log.e('Failed to store api_response ({})', response_text_md5hash) raise e ParseUtil.parse_and_persist_as_transaction_maybe( datafetch_api_response, subscriber, db) return True