def main(): """ main function to insert options to db """ parser = argparse.ArgumentParser() parser.add_argument('--symbol', default='spy') parser.add_argument('--cnf', help='config file for db') parser.add_argument('--log-file', help='log file') parser.add_argument('--log-mode', default='a', help='log file mode (a or w)') parser.add_argument('--log-level', default='debug',help='log level') opts = parser.parse_args() if opts.log_file: set_logger(level=opts.log_level, filename=opts.log_file, mode=opts.log_mode) else: set_logger(level=opts.log_level, out=sys.stdout) logging.info("========== %s ==========", str(datetime.datetime.now())) session = create_mysql_session(opts.cnf) rows = getOptionMW(opts.symbol) price = rows[0].getKey('price') rows = filter_price_range(rows, int(price*0.85), int(price*1.15)) rows = filter_date_range(rows, 0, 60) rows = filter_out_zero(rows) insert_multiple(session, rows)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--symbol', default='aapl') parser.add_argument('--log-file', help='log file') parser.add_argument('--log-mode', default='a', help='log file mode (a or w)') parser.add_argument('--log-level', default='debug', help='log level') opts = parser.parse_args() if opts.log_file: set_logger(level=opts.log_level, filename=opts.log_file, mode=opts.log_mode) else: set_logger(level=opts.log_level, out=sys.stdout)
from datetime import datetime import tushare as ts from pandas import DataFrame, concat from asset import database_model as db from asset import database_migration as dm from util import logger __module_logger = logger.set_logger("module") def insert_daily_data(): """ Run every day after 15:00, collecting new market data and insert into DB """ # dm.remove_index() update_list = db.StockInfo.select() today = datetime.today().strftime('%Y-%m-%d') for record in update_list: try: current = ts.get_h_data(code=record.ticker, start=today, end=today) str_date = current.index[0].strftime('%Y-%m-%d') row_id = db.DailyPrice.create(trading_date=str_date, ticker=record.ticker, market_id=record.market_id, high=float(current.high[0]), low=float(current.low[0]), open=float(current.open[0]), close=float(current.close[0]), volume=float(current.volume[0])) __module_logger.info('Insert ' + row_id + ' into Daily Price') except ConnectionError as e: __module_logger.debug(today + "Can't fetch daily data due to Connection Error") print(e)
#! /usr/bin/env python3 # -*- coding: utf-8 -*- import multiprocessing from util import logger from data import data_helper as hp from data import daily_data_worker as dw __data_logger = logger.set_logger('data') __author__ = 'Will Chen' hp.loading_calendar() __data_logger.info('Calendar Information loaded.') hp.loading_stock_list() __data_logger.info('All ticker info created.') pool = multiprocessing.Pool(processes=3) stock_list = dw.get_stock_list() __data_logger.info('Ready to load prices in 3 processes') for i in range(len(stock_list)): pool.apply_async(hp.loading_price, (stock_list[i], )) pool.close() pool.join() __data_logger.info('All price loaded into database')
def main(): parser = argparse.ArgumentParser() parser.add_argument('--from-web', action='store_true', help='get data from web') parser.add_argument('--symbol', required=True) parser.add_argument('--TTE-min', default=1, type=int, help='min time to expire') parser.add_argument('--TTE-max', default=10, type=int, help='max time to expire') parser.add_argument('--price-range', default=0.1, type=float, help='strike range, (1 +/- range) * current price') parser.add_argument('--strategy', type=str) parser.add_argument('--arg-list', nargs='*', help='arg list for strategy') parser.add_argument('--log-file', help='log file') parser.add_argument('--log-mode', default='a', help='log file mode (a or w)') parser.add_argument('--log-level', default='debug', help='log level') parser.add_argument('--db-cnf', help='if db config file is given, write results to db') parser.add_argument('--query-time', help='which time to query') opts = parser.parse_args() if opts.log_file: set_logger(level=opts.log_level, filename=opts.log_file, mode=opts.log_mode) else: set_logger(level=opts.log_level, out=sys.stdout) logging.info('retrieving options for %s', opts.symbol) if opts.from_web: res = getOptionMW(opts.symbol) if res is None: # getOptionMW may return None because of page open failure logging.error('no data retrieved') return logging.info('%d data received', len(res)) else: res = get_latest_strikes(table_name='test_options', underlying=opts.symbol, k_list=[0, 10000], exps=[opts.TTE_min, opts.TTE_max], call_list=[False, True], query_time=opts.query_time) if not len(res): logging.error('no result (from db)') exit(0) # data preprocessing # step 1, filter the time range price = res[0].getKey('price') data = data_preprocessing(res, [opts.TTE_min, opts.TTE_max], [(1 - opts.price_range) * price, (1 + opts.price_range) * price]) # select all calls call_strikes = [] put_strikes = [] for k in sorted(data[1].keys()): call_strikes += data[1][k] put_strikes += data[0][k] # calculate implied vol call_vols(call_strikes, rate=0.035) call_vols(put_strikes, rate=0.035, isCall=False) for i in call_strikes + put_strikes: logging.debug('strike:\n%s', i.__json__()) if opts.strategy == 'iron': iron_table_print(data) elif opts.strategy == 'strangle': strangle_table_print(data) elif opts.strategy == 'short-butterfly': short_bufferfly_print(data) else: all_strikes_print(data) if opts.db_cnf: session = create_mysql_session(opts.db_cnf) insert_multiple(session, call_strikes + put_strikes) logging.info('%s data stored to database', opts.symbol)
#! /usr/bin/env python3 # -*- coding: utf-8 -*- from asset import database_model as db from util import logger _main_logger = logger.set_logger() __author__ = 'Will Chen' _main_logger.info("Welcome to QSS. System is preparing...") db.drop_tables() _main_logger.info("Cleaned all tables, ready to begin...") db.create_tables() _main_logger.info("Database Initialized. Ready to importing historical data")
import docker import grpc from auto import get_container_stat, calculate_time, get_container_config, get_all_container, calculate_cpu_percent, \ get_container_stats_stream, update_container from util import logger import auto_pb2 import auto_pb2_grpc from util.util import get_docker0_IP LOG = "/var/log/auto.log" logger = logger.set_logger(os.path.basename(__file__), LOG) DEFAULT_PORT = '8090' def submit(cmd): logger.debug(cmd) try: host = get_docker0_IP() channel = grpc.insecure_channel("{0}:{1}".format(host, DEFAULT_PORT)) client = auto_pb2_grpc.AutoControlStub(channel) # ideally, you should have try catch block here too response = client.Submit(auto_pb2.Request(cmd=cmd)) result = loads(str(response.json)) return result except grpc.RpcError as e: