def do(): last_update = None while True: mtime = os.stat(config.config_path).st_mtime if last_update is None or last_update < mtime: config.load() last_update = mtime time.sleep(1)
def main(): # Load configuration file settings.load() settings.set_dryrun(False) # Set up logger log = src.log.Log(JOBNAME, print_terminal=True, asynchronous=False) # First build the rsync command based on config settings: rsync_cmd = ['rsync', '-a'] target = '%s@%s:%s' % (settings.ServerUser, settings.ServerHostname, DEST_DIR) rsync_cmd.append(SOURCE_DIR) rsync_cmd.append(target) if settings.DryRun: rsync_cmd.append('-n') if settings.RsyncDelete: rsync_cmd.append('--delete') if settings.RsyncVerbose: rsync_cmd.append('-v') for ignore in IGNORE_FILES: rsync_cmd.append('--exclude=' + ignore) # Add cleanup code in case the script terminates prematurely: completed_flag = 0 def on_exit_cleanup(): nonlocal completed_flag if completed_flag != 1: shortmsg = 'Unknown failure during SNS backup' longmsg = 'Script terminated prematurely (caught by atexit function.)' log.notify_all(shortmsg, longmsg, iserror=True) atexit.register(on_exit_cleanup) # TODO: add multi-process semaphore locking. To ensure that two backup jobs # dont conflict, we should try to obtain a lock here before doing anything # else. # Start backup shortmsg = 'SNS backup started' longmsg = 'Copying data from %s to %s' % (SOURCE_DIR, target) log.notify_all(shortmsg, longmsg) print(' '.join(rsync_cmd)) code = call(rsync_cmd) if code == 0: shortmsg = 'SNS backup finished successfully' longmsg = 'Successfully copied data from %s to %s' % (SOURCE_DIR, target) log.notify_all(shortmsg, longmsg) else: shortmsg = 'SNS backup failed with code %d' % code longmsg = 'Failed to backup data from %s to %s\nFailed command: %s' % ( SOURCE_DIR, target, ' '.join(rsync_cmd)) log.notify_all(shortmsg, longmsg, iserror=True) completed_flag = 1
def setUp(self): """ Load the config :return: """ # Build the tests Logger self.log = logging.Logger('Tests') stream = logging.StreamHandler() formatter = logging.Formatter(fmt='%(message)s') stream.setFormatter(formatter) self.log.addHandler(stream) self.app = bottle.Bottle() config.load(self.app, self.log, join('tests', 'config'), log_output=False)
def load_model(run_name: str, model_file: str) -> Tuple[VecEnv, PPO]: run_dir = get_run_dir(run_name) cfg = load(run_dir)['preprocess'] env = make_env(seed=123, n_envs=1, run_dir=run_dir, frame_skip=cfg['frame_skip'], frame_stack=cfg['frame_stack'], is_eval=True) model = PPO.load(os.path.join(run_dir, model_file)) return env, model
def init(configfile): """Load configuration and load IRC connections.""" cfg = config.load(configfile) if 'user' in cfg: defaultuser = cfg['user'] else: defaultuser = None if not 'servers' in cfg: print 'No servers specified in %s - cannot proceed!' % (configfile,) exit(1) servers = [] handlers = [] for server in cfg['servers']: if not 'user' in server: user = defaultuser else: user = cfg['user'] if 'handler' in server: if 'module' == server['handler']['type']: modname = server['handler']['name'] fptr, pathname, description = imp.find_module( modname, ['plugins/']) modtop = imp.load_module(modname, fptr, pathname, description) fptr.close() handler = modtop.main elif 'function' == server['handler']['type']: handler = eval(server['handler']['name']) else: handler = irc.null_handler servers.append(setup_connection(server, user)) handlers.append(handler) return servers, handlers
"""Redis connection class""" from contextlib import ContextDecorator import redis from src.config import load CONFIG = load() class Redis(ContextDecorator): """Redis connection class context""" def __init__(self): redis_db = 0 if "REDIS_DB" in CONFIG.keys(): redis_db = int(CONFIG["REDIS_DB"]) self._connection = redis.Redis(host=CONFIG["REDIS_HOST"], password=CONFIG["REDIS_PASS"], db=redis_db) def __enter__(self): """Enter as a context object""" return self def __exit__(self, exc_type, exc_val, exc_tb): """Exit from context""" self._connection.close()
server = WSGILogger(app, handlers, ApacheFormatter()) # Build the application Logger log = logging.Logger('ALP') rotating_file = TimedRotatingFileHandler('logs/alp-{}.log'.format(log_time), when='midnight') stream = logging.StreamHandler() formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(message)s', datefmt='%y-%m-%d %H:%M:%S') stream.setFormatter(formatter) rotating_file.setFormatter(formatter) log.addHandler(rotating_file) log.addHandler(stream) # Load the config config.load(app, log, os.getenv("CONFIG_DIR", 'config'), log_output=True) # Install the Postgres plugin if os.getenv("DATABASE_URL", None) is not None: urlparse.uses_netloc.append("postgres") url = urlparse.urlparse(os.environ["DATABASE_URL"]) app.install(bottle_pgsql.Plugin('dbname={} user={} password={} ' 'host={} port={}'.format(url.path[1:], url.username, url.password, url.hostname, url.port))) else: app.install(bottle_pgsql.Plugin('dbname={} user={} password={} ' 'host={} port={}'.format(app.config['db.name'], app.config['db.user'],
import json import urllib import googlemaps from src import config credentials = config.load('auth') secret_key = credentials['googlemaps'] gmaps = googlemaps.Client(key=secret_key) def dist_matrix(origin, dest, departure_time): response = gmaps.distance_matrix(origin, dest, departure_time=departure_time) return response
def __init__(self): super().__init__() self._config = config.load() self.init()
import os from flask import Flask, request from flask_restful import Resource, Api, reqparse from flask_jwt import JWT, jwt_required import src.config as cfg from src.security import authenticate, identity from src.resources.user import UserRegister from src.resources.item import Item, ItemList from src.resources.store import Store, StoreList from src.db import db app = Flask(__name__) cfg.load(app) api = Api(app) @app.before_first_request def create_table(): db.create_all() jwt = JWT(app, authenticate, identity) api.add_resource(Item, '/items/<string:name>') api.add_resource(ItemList, '/items') api.add_resource(UserRegister, '/register') api.add_resource(Store, '/stores/<string:name>') api.add_resource(StoreList, '/stores')
def setUp(self): """ Set up the database with some orders ready for a credit :return: """ # Build the tests Logger self.log = logging.Logger('Tests') stream = logging.StreamHandler() formatter = logging.Formatter(fmt='%(message)s') stream.setFormatter(formatter) self.log.addHandler(stream) # set us up a bottle application with correct config self.app = bottle.Bottle() config.load(self.app, self.log, join('tests', 'config'), log_output=False) # build the database if it doesn't exist database.build(self.app, self.log, log_output=False) # clear any existing orders in the database conn = database.get_db(self.app) c = conn.cursor() c.execute("DELETE FROM orders") c.execute("DELETE FROM credits") conn.commit() # create test data # 5 test users each with 100 NBT on each exchange/pair/side/rank self.test_data = {} test_order_number = 1 for i in xrange(0, 5): for unit in ['btc', 'ppc']: for side in ['ask', 'bid']: for rank in ['rank_1', 'rank_2']: c.execute("INSERT INTO orders (key,rank,order_id,order_amount," "side,exchange,unit,credited) VALUES " "(%s,%s,%s,%s,%s,%s,%s,%s)", ('TEST_USER_{}'.format(i + 1), rank, test_order_number, 100, side, 'test_exchange', unit, 0)) test_order_number += 1 conn.commit() conn.close() # setup test data for test_get_total_liquidity] # get the orders from the database conn = database.get_db(self.app) c = conn.cursor() c.execute("SELECT * FROM orders") orders = c.fetchall() # get the liquidity as calculated by the main function self.total_liquidity = credit.get_total_liquidity(self.app, orders) # setup data for test_calculate_rewards # target for btc is 2500. total for btc is 2000.0 which is 0.8 of target # so already reward for btc is 0.02 instead of 0.025 # ask and bid are 50:50 so each gets 0.01. rank_1 ratio is 1.0 and rank_2 is 0 for # both. # # target for ppc is 1500. total for ppc is 2000.0 so full reward of 0.0250 # ask is 0.6 * 0.025 = 0.015 # bid is 0.4 * 0.025 = 0.010 # ask rank_1 is 1 # bid rank_1 is 0.8 * 0.010 = 0.008 # bid rank_2 is 0.2 * 0.010 = 0.002 self.rewards = {'test_exchange': {'btc': {'ask': {'rank_1': 0.01, 'rank_2': 0.0}, 'bid': {'rank_1': 0.01, 'rank_2': 0.0} }, 'ppc': {'ask': {'rank_1': 0.015, 'rank_2': 0.0}, 'bid': {'rank_1': 0.008, 'rank_2': 0.002}}}}
def credit(app, log): """ This runs every minute and calculates the total liquidity on order (rank 1) and each users proportion of it. :param log: :param rpc: :param app: :return: """ # Set the timer going again credit_timer = Timer( 60.0, credit, kwargs={'app': app, 'log': log} ) credit_timer.name = 'credit_timer' credit_timer.daemon = True credit_timer.start() log_output = False # reload the config config.load(app, log, app.config['config_dir'], log_output) # calculate the credit time credit_time = int(time.time()) conn = database.get_db(app) db = conn.cursor() # Get all the orders from the database. db.execute("SELECT * FROM orders WHERE credited=0") all_orders = db.fetchall() if len(all_orders) > 0: log_output = True log.info('Start credit') # store the credit time in the info table db.execute("UPDATE info SET value=%s WHERE key=%s", ( credit_time, 'last_credit_time' )) # set up for some stats # build the blank meta stats object meta = {'last-credit-time': credit_time, 'number-of-users-active': 0, 'number-of-orders': 0} db.execute("SELECT value FROM info WHERE key=%s", ('next_payout_time',)) meta['next-payout-time'] = int(db.fetchone()[0]) db.execute("SELECT COUNT(id) FROM users") meta['number-of-users'] = int(db.fetchone()[0]) # create a list of active users active_users = [] # de-duplicate the orders deduped_orders = deduplicate_orders(all_orders, db) # calculate the liquidity totals totals = get_total_liquidity(app, deduped_orders) # We've calculated the totals so submit them as liquidity_info Thread( target=liquidity_info, kwargs={'app': app, 'totals': totals, 'log': log} ).start() # calculate the round rewards based on percentages of target and ratios of side and # rank rewards = calculate_reward(app, totals) # parse the orders for order in deduped_orders: # save some stats meta['number-of-orders'] += 1 if order[1] not in active_users: meta['number-of-users-active'] += 1 active_users.append(order[1]) # calculate the details reward, percentage = calculate_order_reward(order, totals, rewards) # and save to the database db.execute( "INSERT INTO credits (time,key,exchange,unit,rank,side,order_id,provided," "percentage,reward,paid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", (credit_time, order[1], order[8], order[9], order[2], order[5], order[0], order[4], (percentage * 100), reward, 0) ) # update the original order too to indicate that it has been credited db.execute("UPDATE orders SET credited=%s WHERE id=%s", (1, order[0])) # write the stats to the database stats_config = {} for ex in app.config['exchanges']: stats_config[ex] = {} for unit in app.config['{}.units'.format(ex)]: stats_config[ex][unit] = { 'target': app.config['{}.{}.target'.format(ex, unit)], 'reward': app.config['{}.{}.reward'.format(ex, unit)] } for side in ['ask', 'bid']: stats_config[ex][unit][side] = { 'ratio': app.config['{}.{}.{}.ratio'.format( ex, unit, side )] } for rank in app.config['{}.{}.{}.ranks'.format(ex, unit, side)]: stats_config[ex][unit][side][rank] = { 'ratio': app.config['{}.{}.{}.{}.ratio'.format( ex, unit, side, rank )] } db.execute("INSERT INTO stats (time,meta,totals,rewards,config) VALUES (%s,%s,%s," "%s,%s)", (credit_time, json.dumps(meta), json.dumps(totals), json.dumps(rewards), json.dumps(stats_config))) conn.commit() conn.close() if log_output: log.info('End credit') return