def initialize_in_dirs(appconfig_dirpath, logconfig_dirpath): OsExpert.ensure_abs_dirpath_exists(appconfig_dirpath) OsExpert.ensure_abs_dirpath_exists(logconfig_dirpath) App.initialize(appconfig_filepath=os.path.join(appconfig_dirpath, 'config.ini'), logconfig_filepath=os.path.join(logconfig_dirpath, 'logconfig.json'))
def __init__(self, appFilepath, isToNotifyStartup=True): print('App start: {}'.format(appFilepath)) file_dirpath = OsExpert.path_backstep(appFilepath) parent_dirpath = OsExpert.path_backstep(file_dirpath) App.initialize_in_dirs(logconfig_dirpath=file_dirpath, appconfig_dirpath=parent_dirpath) if isToNotifyStartup is True: NetworkExpert.tryAppNotifyByEmail(appFilepath, 'service is starting')
def __init__(self): super().__init__(__file__) Log.d('construct') retry_delay_seconds = int(AppConfig.setting('DATAFETCH_API_RETRY_DELAY_SECONDS')) data_response_dirpath = AppConfig.setting('DATA_RESPONSE_DIRPATH') Log.d('data response dirpath is: {}', data_response_dirpath) self.retry_delay_seconds = retry_delay_seconds self.data_response_dirpath = data_response_dirpath OsExpert.ensure_abs_dirpath_exists(data_response_dirpath) self.subscribers = subscribe.all()
def __init__(self, h5_filepath, version): warnings.simplefilter('ignore', NaturalNameWarning) h5_inputfile = Path(h5_filepath) output_dirpath = AppConfig.setting('PREDICTOR_DATA_DIRPATH') self.h5_out_filepath = os.path.join(output_dirpath, h5_inputfile.name) h5_out_file = Path(self.h5_out_filepath) if h5_out_file.exists(): Log.i('overwrite file?: {}', h5_out_file) if not OsExpert.prompt_confirm('File already exists, overwrite? {}'.format(h5_out_file)): Log.d('user aborted, exiting') exit() Log.w('removing file: {}', h5_out_file) os.remove(self.h5_out_filepath) self.predictors_map = {} base_filepath = output_dirpath with pd.HDFStore(h5_filepath, mode='r') as h5: keys = h5.keys() Log.i('h5 input keys: {}', keys) assert len(keys) == 1, 'harcoded restriction on single key was violated' for key in keys: Log.i('row count for {}: {}', key, h5.get_storer(key).nrows) self.predictors_map[key] = [ EnsemblePredictor(min_predict_generator_size=2000, max_train_size=5000) ] self.h5_watcher = H5FileWatcher(h5_filepath, self.handle_job_epoch, {'is_simulated': 0})
async def alert_continuously(self, alert_interval_seconds): is_triggered = False while True: #is_triggered == False: try: is_triggered = await self.check_for_alert_match() except Exception as e: stacktrace = OsExpert.stacktrace() Log.e('Failed to run alert check, stacktace:\n{}', stacktrace) await asyncio.sleep(alert_interval_seconds)
def test_file_contents_md5hash(self): tempFilepath = os.path.join( AppConfig.setting('TEMP_DIRPATH'), 'file.txt' ) with open(tempFilepath,'w') as f: f.write('sample text') self.assertEqual( OsExpert.md5(tempFilepath), '70ee1738b6b21e2c8a43f3a5ab0eee71' ) os.remove(tempFilepath)
def watch_continuously(self, watch_interval_seconds): Log.i('continuous watching activated with interval of {} seconds', watch_interval_seconds) consecutive_error_count = 0 while True: try: self.__verify_datafetch_apis_write_frequency() consecutive_error_count = 0 except Exception as e: consecutive_error_count += 1 Log.e('fail during watcher check ({} consecutive errors)', consecutive_error_count) stacktrace = OsExpert.stacktrace() Log.d('stacktrace:\n{}', stacktrace) time.sleep(watch_interval_seconds)
def initialize(appconfig_filepath, logconfig_filepath): OsExpert.ensure_abs_filepath_exists(appconfig_filepath) Log.initialize(logconfig_filepath) AppConfig.initialize(appconfig_filepath)
df_to_append, format='table', data_columns=True) row_count = h5.get_storer(job.uid).nrows Log.d('...h5 key {}, row count is {}', job.uid, row_count) except Exception as append_error: raise append_error Log.d('...time spent adding to h5: {:.2f}s', time.time() - h5_process_start_time) row_processing_time = time.time() - subset_process_start_time Log.d('...total time spent on subset: {:.2f}s ({:.2f}s per row)', row_processing_time, row_processing_time / row_process_count) return transaction_min_timestamp if __name__ == '__main__': file_dirpath = OsExpert.path_backstep(__file__) pd.options.display.float_format = '{:.2f}'.format try: app = GeneratorApp() app.feed_jobs_forever( job_changed_handler=lambda job: None #Log.w('dummy callback for job: {}', job.uid) ) except KeyboardInterrupt: print('\n\nKeyboardInterrupt\n') except Exception as e: Log.c('app failed: {}', e) stacktrace = OsExpert.stacktrace() Log.d('stacktrace:\n{}', stacktrace)
import sys; sys.path.append('..') import os import asyncio from applogging import Log from core import AppConfig, OsExpert, Timeout import traceback import time AppConfig.initialize_in_file_dir( OsExpert.path_backstep(__file__) ) class Parser(): def __init__(self): wfPath = "/tmp/my_fifo2" wp = None try: if not os.path.exists(wfPath): os.mkfifo(wfPath) while True: is_sent = False try: with Timeout(1): with open(wfPath, 'w') as wp: print('sending..') wp.write("a write!\n") print('sent') is_sent = True time.sleep(1) except TimeoutError: if not is_sent:
import sys; sys.path.append('../../src/fetch') sys.path.append('../../src/python') from app import App import unittest from core import OsExpert from db import DatabaseGateway class TestCase(unittest.TestCase): def setUp(self): self.db = DatabaseGateway() pass def test_watch_triggers_alert(self): raise NotImplementedError() if __name__ == '__main__': parent_dirpath = OsExpert.path_backstep(__file__, backsteps=2) App.initialize_in_dir(parent_dirpath) unittest.main()
class PredictApp(App): def __init__(self, version): super().__init__(__file__) def handle_change(self): Log.d('modified') def run(self, h5_filepath): predictor = Predictor(h5_filepath) thread = predictor.run_async() thread.join() if __name__ == '__main__': try: app = PredictApp() assert len( sys.argv) == 2, 'not exactly two parameters (i.e. one argument)' h5_filename = sys.argv[1] h5_filepath = os.path.join(AppConfig.setting('GENERATOR_DATA_DIRPATH'), h5_filename) assert Path(h5_filepath).is_file(), 'is not a file: {}'.format( h5_filepath) app.run(h5_filepath) except KeyboardInterrupt: print('\n\nKeyboardInterrupt\n') except Exception as e: Log.c('app failed: {}', e) stacktrace = OsExpert.stacktrace() Log.d('stacktrace:\n{}', stacktrace)
import json import time import configparser import io import os import hashlib import requests import hmac import websocket import simplejson as json from applogging import Log from core import AppConfig, OsExpert from db import DatabaseGateway sys.path.append('..') AppConfig.initialize_in_file_dir(OsExpert.path_backstep(__file__)) file_path = os.path.realpath(__file__) print(file_path) db = DatabaseGateway() datafetch_api_id = db.datafetch_api_id_by_handler_filepath(file_path) print(datafetch_api_id) exit() def prettyJson(jsonData): return json.dumps(jsonData, indent=4, sort_keys=True) publicKey = AppConfig.setting("BITCOINAVERAGE_PUBLIC_KEY") secretKey = AppConfig.setting("BITCOINAVERAGE_SECRET_KEY") url = "https://apiv2.bitcoinaverage.com/websocket/get_ticket"