def upload_file_sftp(self, remote_dir, file_info, local_dir, hostname, username, password, slack_message): port = 22 try: os.path.exists(local_dir) or os.makedirs(local_dir) local_path = os.path.join(local_dir, file_info.file_name) remote_dir = remote_dir + file_info.year + '/' remote_path = remote_dir + file_info.file_name client = paramiko.SSHClient() # client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname=hostname, port=port, username=username, password=password) ftp = client.open_sftp() try: ftp.mkdir(remote_dir) except: pass ftp.put(local_path, remote_path) ftp.close() if PUSH_SLACK_LOGGING: signalapp = SignalApp('ftptransfer', APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) signalapp.send( MsgStatus('FTP_TRANSFER', 'SUCCESS {0} {1}'.format(slack_message, file_info.file_name), notify=True)) except: if PUSH_SLACK_LOGGING: signalapp = SignalApp('ftptransfer', APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) signalapp.send( MsgStatus('FTP_TRANSFER', 'FAILED {0} {1}'.format(slack_message, file_info.file_name), notify=True)) #finally: client.close()
def download_file_ftp(self, remote_dir, file_info, local_dir, hostname, username, password, slack_message): port = 22 try: os.path.exists(local_dir) or os.makedirs(local_dir) local_path = os.path.join(local_dir, file_info.file_name) remote_path = remote_dir + file_info.file_name #client = paramiko.SSHClient() # client.load_system_host_keys() #client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ftp = FTP(hostname, username, username + ":" + password + "@" + hostname) #ftp.login() local_file = open(local_path, "wb") ftp.retrbinary('RETR ' + remote_path, local_file.write) ftp.quit() local_file.close() #ftp = client.open_sftp() #ftp.get(remote_path, local_path) # ftp.get('ftpbackup/gmi_reconcile/TMQR_PNL_20170718.csv', 'C:\\TEST\\TMQR_PNL_20170718.csv') #ftp.close() if PUSH_SLACK_LOGGING: signalapp = SignalApp('ftptransfer', APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) signalapp.send( MsgStatus('FTP_TRANSFER', 'SUCCESS {0} {1}'.format(slack_message, file_info.file_name), notify=True)) except: if PUSH_SLACK_LOGGING: signalapp = SignalApp('ftptransfer', APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) signalapp.send( MsgStatus('FTP_TRANSFER', 'FAILED {0} {1}'.format(slack_message, file_info.file_name), notify=True)) finally: ftp.close()
from tradingcore.signalapp import SignalApp, APPCLASS_DATA import time from datetime import datetime, time from tradingcore.messages import * from unittest.mock import MagicMock from backtester.swarms.swarm import Swarm #app = SignalApp('asdasd', 'test') app = SignalApp('asdasd', 'EXO_ENGINE') #app.send_to("ES", APPCLASS_DATA, {'msg': 'Test message', 'date': datetime.combine(datetime.now().date(), time(12, 45, 0))}) #app.send({'etst':'testse'}) app.send(MsgStatus("ERROR", 'Notifications test')) #app.send(MsgEXOQuote("EXO_TEST", datetime.now())) #app.send(MsgQuoteNotification("CL", datetime.now())) mock_swm = MagicMock(spec=Swarm) mock_swm.name = 'TEST_SWM' mock_swm.exo_name = 'TEST_EXO' mock_swm.instrument = 'TEST' mock_swm.last_exposure = -1 mock_swm.last_prev_exposure = 2 mock_swm.last_rebalance_date = datetime.now() mock_swm.last_date = datetime.now() msg_alpha_state = MsgAlphaState(mock_swm) #app.send(msg_alpha_state) #app.send(MsgAlphaSignal(msg_alpha_state, []))
class EXOScript: def __init__(self, args, loglevel): self.signalapp = None self.asset_info = None self.args = args self.loglevel = loglevel logging.getLogger("pika").setLevel(logging.WARNING) self.logger = logging.getLogger('EXOBuilder') self.logger.setLevel(loglevel) # create console handler with a higher log level ch = logging.StreamHandler(sys.stdout) ch.setLevel(loglevel) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) self.logger.addHandler(ch) def check_quote_data(self, appname, appclass, data): if appclass != APPCLASS_DATA: self.logger.error("Unexpected APP class message: {0}".format(data)) return False if data is None: self.logger.error("Empty message") return False else: if 'date' not in data: self.logger.error("Bad message format") return False if 'mtype' not in data: self.logger.error("Bad message format, no 'mtype'") return False else: if data['mtype'] != 'quote': return False return True def get_exo_list(self, args): if args.exolist == "*": return EXO_LIST else: self.logger.debug("Processing list of EXOs: " + args.exolist) result = [] list_set = {} for e in args.exolist.split(','): # Avoid duplicates if e.lower() not in list_set: for exo_setts in EXO_LIST: if exo_setts['name'].lower() == e.lower(): list_set[e.lower()] = exo_setts result.append(exo_setts) if len(result) == 0: raise ValueError("EXO list is empty, bad filter? ({0})".format( args.exolist)) return result def on_new_quote(self, appclass, appname, data): if data.mtype != MsgQuoteNotification.mtype: return # Check data integrity if not self.check_quote_data(appname, appclass, data): return exec_time, decision_time = AssetIndexMongo.get_exec_time( datetime.now(), self.asset_info) start_time = time.time() quote_date = data['date'] symbol = appname if quote_date > decision_time: # TODO: Check to avoid dupe launch # Run first EXO calculation for this day self.logger.info( "Run EXO calculation, at decision time: {0}".format( decision_time)) assetindex = AssetIndexMongo(MONGO_CONNSTR, MONGO_EXO_DB) exostorage = EXOStorage(MONGO_CONNSTR, MONGO_EXO_DB) futures_limit = 3 options_limit = 10 #datasource = DataSourceMongo(mongo_connstr, mongo_db_name, assetindex, futures_limit, options_limit, exostorage) #datasource = DataSourceSQL(SQL_HOST, SQL_USER, SQL_PASS, assetindex, futures_limit, options_limit, exostorage) # # Test DB temporary credentials # tmp_mongo_connstr = 'mongodb://*****:*****@10.0.1.2/tmldb_test?authMechanism=SCRAM-SHA-1' tmp_mongo_db = 'tmldb_test' datasource = DataSourceHybrid(SQL_HOST, SQL_USER, SQL_PASS, assetindex, tmp_mongo_connstr, tmp_mongo_db, futures_limit, options_limit, exostorage) # Run EXO calculation self.run_exo_calc(datasource, decision_time, symbol, backfill_dict=None) end_time = time.time() self.signalapp.send( MsgStatus('OK', 'EXO processed for {0} at {1}'.format( symbol, quote_date), context={ 'instrument': symbol, 'date': quote_date, 'exec_time': end_time - start_time }, notify=True)) else: self.logger.debug("Waiting next decision time") def run_exo_calc(self, datasource, decision_time, symbol, backfill_dict): # Running all EXOs builder algos exos_list = self.get_exo_list(args) for exo in exos_list: self.logger.info('Processing EXO: {0} at {1}'.format( exo['name'], decision_time)) ExoClass = exo['class'] # Processing Long/Short and bidirectional EXOs for direction in [1, -1]: if ExoClass.direction_type() == 0 or ExoClass.direction_type( ) == direction: try: with ExoClass(symbol, direction, decision_time, datasource, log_file_path=args.debug) as exo_engine: try: asset_list = exo_engine.ASSET_LIST # Checking if current symbol is present in EXO class ASSET_LIST if asset_list is not None: if symbol not in asset_list: # Skipping assets which are not in the list continue except AttributeError: warnings.warn( "EXO class {0} doesn't contain ASSET_LIST attribute filter, calculating all assets" .format(ExoClass)) if backfill_dict is not None: # # Check if last EXO quote is < decision_time # if True - skip the calculation until actual date come # # Note: this is fix for situations when we added new EXO, and we need it to be calculated # from the beginning of the history if exo_engine.exo_name in backfill_dict: exo_start_date = backfill_dict[ exo_engine.exo_name] if decision_time < exo_start_date: break self.logger.debug("Running EXO instance: " + exo_engine.name) # Load EXO information from mongo exo_engine.load() exo_engine.calculate() if backfill_dict is None: # Sending signal to alphas that EXO price is ready self.signalapp.send( MsgEXOQuote(exo_engine.exo_name, decision_time)) except: self.logger.exception( "Failed processing EXO: {0} on {1}".format( ExoClass, symbol)) self.signalapp.send( MsgStatus( "ERROR", "Failed processing EXO: {0} on {1}".format( ExoClass, symbol), notify=True)) def do_backfill(self): # self.logger.info("Run EXO backfill from {0}".format( self.args.backfill)) assetindex = AssetIndexMongo(MONGO_CONNSTR, MONGO_EXO_DB) exostorage = EXOStorage(MONGO_CONNSTR, MONGO_EXO_DB) futures_limit = 3 options_limit = 20 # datasource = DataSourceMongo(mongo_connstr, mongo_db_name, assetindex, futures_limit, options_limit, exostorage) datasource = DataSourceSQL(SQL_HOST, SQL_USER, SQL_PASS, assetindex, futures_limit, options_limit, exostorage) exos = exostorage.exo_list(exo_filter=self.args.instrument + '_', return_names=True) exo_start_dates = {} exec_time, decision_time = AssetIndexMongo.get_exec_time( self.args.backfill, self.asset_info) current_time = decision_time if len(exos) > 0: for exo_name in exos: series = exostorage.load_series(exo_name)[0] if series is not None: last_date = series.index[-1] + timedelta(days=1) exec_time, decision_time = AssetIndexMongo.get_exec_time( last_date, self.asset_info) self.logger.info( 'Updating existing {0} series from: {1}'.format( exo_name, decision_time)) exo_start_dates[exo_name] = decision_time else: self.logger.info('Updating new EXO series from: {0}'.format( self.args.backfill)) exec_time, decision_time = AssetIndexMongo.get_exec_time( self.args.backfill, self.asset_info) exec_time_end, decision_time_end = AssetIndexMongo.get_exec_time( datetime.now(), self.asset_info) while current_time <= decision_time_end: self.logger.info("Backfilling: {0}".format(current_time)) self.run_exo_calc(datasource, current_time, args.instrument, backfill_dict=exo_start_dates) current_time += timedelta(days=1) exec_time += timedelta(days=1) def main(self): self.logger.info("Initiating EXO building engine for {0}".format( self.args.instrument)) # Initialize EXO engine SignalApp (report first status) self.signalapp = SignalApp(self.args.instrument, APPCLASS_EXO, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) self.signalapp.send(MsgStatus('INIT', 'Initiating EXO engine')) # Get information about decision and execution time assetindex = AssetIndexMongo(MONGO_CONNSTR, MONGO_EXO_DB) self.asset_info = assetindex.get_instrument_info(args.instrument) if self.args.backfill is not None: # Backfill mode enabled self.do_backfill() self.signalapp.send( MsgStatus("OK", "EXO backfill for {0} has been finished.".format( self.args.instrument), notify=True)) else: # Online mode # Subscribe to datafeed signal app self.logger.debug('Subscribing datafeed for: ' + self.args.instrument) datafeed = SignalApp(self.args.instrument, APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) # Listening datafeed loop datafeed.listen(self.on_new_quote)
class QuotesNotifyScript: def __init__(self, args, loglevel): self.signalapp = None self.asset_info = None self.args = args self.loglevel = loglevel self.last_quote_date = None self.last_minute = -1 logging.getLogger("pika").setLevel(logging.WARNING) logging.basicConfig( stream=sys.stdout, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=loglevel) self.pprinter = pprint.PrettyPrinter(indent=4) def get_last_quote_date(self): document = self.status_db[STATUS_QUOTES_COLLECTION].find_one( {'instrument': self.args.instrument}) if document is not None and 'last_bar_time' in document: return document['last_bar_time'] else: return NULL_DATE def set_last_quote_state(self, context, update): if not update: self.status_db[STATUS_QUOTES_COLLECTION].replace_one( {'instrument': context['instrument']}, context, upsert=True) else: self.status_db[STATUS_QUOTES_COLLECTION].update_one( {'instrument': context['instrument']}, { '$set': { 'last_bar_time': context['last_bar_time'], 'now': context['now'], 'quote_status': context['quote_status'] } }) def get_last_bar_time(self): last_bar_time = self.db['futurebarcol'].find({ 'errorbar': False }).sort('bartime', pymongo.DESCENDING).limit(1).next()['bartime'] return last_bar_time def date_now(self): return datetime.now() def main(self): logging.info("Initiating data notification script") # Initialize EXO engine SignalApp (report first status) self.signalapp = SignalApp(self.args.instrument, APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) self.signalapp.send( MsgStatus('INIT', 'Initiating data notification script')) # Get information about decision and execution time assetindex = AssetIndexMongo(MONGO_CONNSTR, MONGO_EXO_DB) self.asset_info = assetindex.get_instrument_info(args.instrument) # TODO: replace DB name after release mongo_db_name = 'tmldb_test' tmp_mongo_connstr = 'mongodb://*****:*****@10.0.1.2/tmldb_test?authMechanism=SCRAM-SHA-1' client = MongoClient(tmp_mongo_connstr) self.db = client[mongo_db_name] # Creating index for 'bartime' self.db['futurebarcol'].create_index([('bartime', pymongo.DESCENDING)], background=True) status_client = MongoClient(MONGO_CONNSTR) self.status_db = status_client[MONGO_EXO_DB] self.status_db[STATUS_QUOTES_COLLECTION].create_index( [('instrument', pymongo.DESCENDING)], background=True) last_minute = 0 while True: # Getting last bar time from DB last_bar_time = self.get_last_bar_time() self.process_quote(last_bar_time) time.sleep(15) def is_quote_delayed(self, last_bar_time): dtnow = self.date_now() if bdateutil.isbday( dtnow, holidays=holidays.US()) and dtnow.hour > 8 and dtnow.hour < 13: if int(abs((dtnow - last_bar_time).total_seconds() / 60.0)) > self.args.delay: return True return False def process_quote(self, last_bar_time): dtnow = self.date_now() exec_time, decision_time = AssetIndexMongo.get_exec_time( dtnow, self.asset_info) if self.last_quote_date is None: self.last_quote_date = self.get_last_quote_date() quote_status = 'IDLE' if self.is_quote_delayed(last_bar_time): if self.last_minute != dtnow.minute: logging.info('Quote delayed more than {0} minutes ' 'for {1} LastBarTimeDB: {2} Now: {3}'.format( self.args.delay, self.args.instrument, last_bar_time, dtnow)) self.signalapp.send( MsgStatus('DELAY', 'Quote delayed more than {0} minutes ' 'for {1} LastBarTimeDB: {2} Now: {3}'.format( self.args.delay, self.args.instrument, last_bar_time, dtnow), notify=True)) quote_status = 'DELAY' # Fire new quote notification if last_bar_time > decision_time if self.last_quote_date.date() != last_bar_time.date( ) and last_bar_time > decision_time: if quote_status != 'DELAY': quote_status = 'RUN' # Reporting current status self.signalapp.send( MsgStatus('RUN', 'Processing new bar {0}'.format(last_bar_time))) logging.info( 'Running new bar. Bar time: {0}'.format(last_bar_time)) self.last_quote_date = last_bar_time context = { 'last_bar_time': last_bar_time, 'now': dtnow, 'last_run_date': self.last_quote_date, 'decision_time': decision_time, 'execution_time': exec_time, 'instrument': self.args.instrument, 'quote_status': quote_status, } logging.debug('Current context:\n {0}'.format( self.pprinter.pformat(context))) self.signalapp.send( MsgQuoteNotification(self.args.instrument, last_bar_time, context)) self.set_last_quote_state(context, update=False) else: context = { 'last_bar_time': last_bar_time, 'now': dtnow, 'last_run_date': self.last_quote_date, 'decision_time': decision_time, 'execution_time': exec_time, 'instrument': self.args.instrument, 'quote_status': quote_status, } if self.last_quote_date == NULL_DATE: # If quote is absent is status_quotes collection, insert new self.set_last_quote_state(context, update=False) else: self.set_last_quote_state(context, update=True) # Log initial information: if self.last_minute == -1: logging.debug('Current context:\n {0}'.format( self.pprinter.pformat(context))) elif self.last_minute != dtnow.minute: logging.debug('Last bar time {0}'.format(last_bar_time)) #self.signalapp.send(MsgStatus('IDLE', 'Last bar time {0}'.format(last_bar_time), context)) self.last_minute = dtnow.minute
class AlphaOnlineScript: def __init__(self, args, loglevel): self.args = args self.loglevel = loglevel self.alpha_name = args.alphaname logging.getLogger("pika").setLevel(logging.WARNING) logger = logging.getLogger('AlphaOnlineScript') logger.setLevel(loglevel) # create console handler with a higher log level ch = logging.StreamHandler(sys.stdout) ch.setLevel(loglevel) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) self.log = logger self.log.info('Init AlphaOnlineScript Alpha: {0}'.format( self.alpha_name)) self.signal_app = SignalApp(self.alpha_name, APPCLASS_ALPHA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) self.signal_app.send( MsgStatus( "INIT", 'Initiating online alpha engine {0}'.format(self.alpha_name))) self.exo_app = SignalApp('*', APPCLASS_EXO, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) def swarm_updated_callback(self, swm): # Logging swarm structure information if self.loglevel == logging.DEBUG: self.log.debug( 'swarm_updated_callback: Swarm processed: {0}'.format( swm.name)) last_state = swm.laststate_to_dict() del last_state['swarm_series'] pp = pprint.PrettyPrinter(indent=4) self.log.debug('Swarm last state: \n {0}'.format( pp.pformat(last_state))) self.signal_app.send(MsgAlphaState(swm)) def on_exo_quote_callback(self, appclass, appname, msg): # Make sure that is valid EXO quote message if msg.mtype == MsgEXOQuote.mtype: self.log.debug('on_exo_quote_callback: {0}.{1} Data: {2}'.format( appname, appclass, msg)) self.log.info('Processing EXO quote: {0} at {1}'.format( msg.exo_name, msg.exo_date)) try: # Load strategy_context m = importlib.import_module('scripts.alphas.alpha_{0}'.format( self.alpha_name.replace('alpha_', '').replace('.py', ''))) # Initiate swarm from Mongo DB exo_name = msg.exo_name context = m.STRATEGY_CONTEXT context['strategy']['suffix'] = m.STRATEGY_SUFFIX swmonline = SwarmOnlineManager(MONGO_CONNSTR, MONGO_EXO_DB, context) # Update and save swarm with new day data (and run callback) swmonline.process(exo_name, swm_callback=self.swarm_updated_callback) self.signal_app.send( MsgStatus("RUN", 'Processing generic alpha')) except: self.log.exception("Error in processing EXO quote: {0}".format( msg.exo_name)) self.signal_app.send( MsgStatus( "ERROR", "Error while processing EXO quote: {0} for alpha {1}". format(msg.exo_name, self.alpha_name), notify=True, )) def main(self): """ Application main() :return: """ # Subscribe to rabbit MQ EXO feed self.exo_app.listen(self.on_exo_quote_callback)
from tradingcore.signalapp import SignalApp, APPCLASS_EXO from tradingcore.messages import * from datetime import datetime app = SignalApp('ES_CallSpread', APPCLASS_EXO) app.send(MsgEXOQuote('ES_CallSpread', datetime.now()))
class TradingOnlineScript: def __init__(self, args, loglevel): self.args = args self.loglevel = loglevel logging.getLogger("pika").setLevel(logging.WARNING) logger = logging.getLogger('TradingOnlineScript') logger.setLevel(loglevel) # create console handler with a higher log level ch = logging.StreamHandler(sys.stdout) ch.setLevel(loglevel) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) self.log = logger self.log.info('Init TradingOnlineScript') self.signal_app = SignalApp('TradingOnlineScript', APPCLASS_SIGNALS, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) self.signal_app.send( MsgStatus("INIT", 'Initiating trading online engine')) self.alpha_app = SignalApp('*', APPCLASS_ALPHA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) # # Init EXO engine datasource # assetindex = AssetIndexMongo(MONGO_CONNSTR, MONGO_EXO_DB) exostorage = EXOStorage(MONGO_CONNSTR, MONGO_EXO_DB) futures_limit = 3 options_limit = 10 self.datasource = DataSourceBase(assetindex, futures_limit, options_limit, exostorage) self.exmgr = ExecutionManager(MONGO_CONNSTR, self.datasource, MONGO_EXO_DB) def on_alpha_state_callback(self, appclass, appname, msg): # Make sure that is valid EXO quote message if msg.mtype == MsgAlphaState.mtype: self.signal_app.send( MsgStatus("RUN", "Processing account positions")) self.log.debug( 'on_alpha_signal_callback: {0}.{1} Data: {2}'.format( appname, appclass, msg)) self.log.info('Processing Alpha state of: {0} at {1}'.format( msg.swarm_name, msg.last_date)) try: # Processing positions for each campaign/account pos_list = self.exmgr.account_positions_process( write_to_db=True) pp = pprint.PrettyPrinter(indent=4) self.log.debug(pp.pformat(pos_list)) # Send position information to real-time software via RabbitMQ self.signal_app.send(MsgAlphaSignal(msg, pos_list)) except: self.log.exception("Error in processing account positions") self.signal_app.send( MsgStatus("ERROR", "Error while processing account positions", notify=True)) def main(self): """ Application main() :return: """ # Subscribe to rabbit MQ EXO feed self.alpha_app.listen(self.on_alpha_state_callback)
class CmeSpanImport(object): """ The object that contains methods to import span data into mongodb """ def __init__(self, args=None): if args != None: self.args = args self.optionenabled = args['optionenabled'] else: self.optionenabled = 2 self.testing = TESTING self.mongo_queries = MongoQueries() self.instrumentInfo = InstrumentInfo(optionenabled=self.optionenabled) self.signalapp = SignalApp('historicaldata', APPCLASS_DATA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) def load_span_file(self, filepath): """ Reads and loads the span file into the mongodb """ self.filepath = filepath self.short_file_name = ntpath.basename(self.filepath) #print(self.short_file_name) #print(self.filepath) if os.path.exists(self.filepath): if PUSH_SLACK_LOGGING: self.signalapp.send( MsgStatus('HISTORICAL_LOAD', 'Initialized data load {0}'.format( self.short_file_name), notify=True)) self.filled_risk_free_rate = False file_object = open(self.filepath, 'r') try: file_lines = file_object.readlines() except: file_lines = [] while True: try: line = file_object.readline() while line: try: file_lines.append(line) line = file_object.readline() except: warnings.warn("Can't Import File Line") continue break except: continue #print(file_lines) #print('test ', self.filepath) #file_lines = self.mongo_queries.save_span_file(file_lines, file_name=self.short_file_name, # full_file_name=self.filepath) data_row_type = SPAN_FILE_ROW_TYPES.TYPE_NULL for instrument in self.instrumentInfo.instrument_list: if RUN_SPECIFIC_INSTRUMENT_ID is None\ or instrument['idinstrument'] in RUN_SPECIFIC_INSTRUMENT_ID: if self.testing: self.test_df = [] rowListTypeB = [] rowListTypeB_Future_dict = {} #used as dictionary for finding future by month and year rowListType8_F = [] rowListTypeB_Option_dict = {} # used as dictionary for finding future by month and year rowListType8_OOF = [] #rowListType8_P = []; rowListTypeMAIN = [] print('running', instrument['symbol']) for line in file_lines: #print(line) line_row_type = self.get_cme_line_type(line) #print(line_row_type) if line_row_type == SPAN_FILE_ROW_TYPES.TYPE_0: #print(line) if len(line) > 0: self.extract_rowtype_0( line_in=line, instrument_symbol=instrument) ''' if PUSH_SLACK_LOGGING: self.signalapp.send( MsgStatus('HISTORICAL_LOAD', 'Initialized Loading {0} {1}'.format(instrument['symbol'], self.span_file_date_time.strftime( '%b %d %Y')), notify=True)) ''' '''get the interest rate from the database after date is extracted from rowtype_0''' if not self.filled_risk_free_rate: try: self.risk_free_rate = self.mongo_queries.get_risk_free_rate( self.span_file_date_time) self.filled_risk_free_rate = True except: self.risk_free_rate = 0.01 warnings.warn( "Can't find risk free rate for: {0}". format(self.span_file_date_time)) continue '''update the instrument info specific to the current date''' self.instrumentInfo.update_instrument_list( instrument, self.span_file_date_time) data_row_type = self.decide_data_rowtype_based_on_file_prefix( ) #print("data_row_type " + str(data_row_type)) elif line_row_type == SPAN_FILE_ROW_TYPES.TYPE_B: """ Extracts the B row type data out of the line """ if len(line) > 108: dstBe = DataSpanType_B_Extract( line, instrument) dstBe.extract_identifiers() if dstBe.row_exchg == instrument['exchange']['spanexchangesymbol'] and \ dstBe.underlying == instrument['spanfuturecode']: rowListTypeB.append(dstBe) #print("extract_rowtype_B ", # instrument['exchange']['spanexchangesymbol'], # instrument['spanfuturecode'], # line) elif line_row_type == data_row_type: if len( line ) > 0: #CME_SPAN_TYPE_8.product_type_end - 1: dst8e = DataSpanType_8_Extract( line, self.span_file_date_time, instrument) dst8e.extract_identifiers() #print("@@@" + dst8e.row_exchg) if dst8e.row_exchg == instrument['exchange']['spanexchangesymbol'] and \ (dst8e.commodity_product_code == instrument['spanfuturecode'] or dst8e.commodity_product_code in \ instrument['span_cqg_codes_dict']): #dst8e.commodity_product_code == instrument['spanoptioncode']): if dst8e.product_type == SPAN_FILE_PRODUCT_TYPE_CODES.fut: rowListType8_F.append(dst8e) #print("@@@" + dst8e.row_exchg) elif dst8e.product_type == SPAN_FILE_PRODUCT_TYPE_CODES.oof: dst8e.extract_option_identifiers_type8( ) rowListType8_OOF.append(dst8e) #if dst8e.option_type == SPAN_FILE_CONTRACT_TYPE.call: #elif dst8e.option_type == SPAN_FILE_CONTRACT_TYPE.put: # rowListType8_P.append(dst8e) '''The row types have all been categorized. Now going through all the lists of the different line types''' ''' FILE_HEADER, FUTURE_CONTRACT_INFO_IMPORT, FUTURE_CONTRACT_DATA_IMPORT, OPTION_CONTRACT_INFO_IMPORT, OPTION_CALL_IMPORT, OPTION_PUT_IMPORT ''' '''below imports the future contract info''' for row_dstBe_option_info in rowListTypeB: row_dstBe_option_info.extract_commodity_product_code_identifiers( ) if row_dstBe_option_info.product_type == SPAN_FILE_PRODUCT_TYPE_CODES.fut: row_dstBe_option_info.extract_future_identifiers() row_dstBe_option_info.extract_future_expiration() rowListTypeB_Future_dict[ row_dstBe_option_info.future_contract_month, row_dstBe_option_info. future_contract_year] = row_dstBe_option_info if len(row_dstBe_option_info.future_expiration_str ) > 0: info_dict = \ { "contractname": row_dstBe_option_info.future_cqg_symbol, "expirationdate": row_dstBe_option_info.future_contract_expiration, "month": row_dstBe_option_info.future_contract_month_char, "idinstrument": row_dstBe_option_info.instrument['idinstrument'], "cqgsymbol": row_dstBe_option_info.future_cqg_symbol, "year": row_dstBe_option_info.future_contract_year, "monthint": row_dstBe_option_info.future_contract_month # "idcontract" : 1, # Not required } if self.testing: #pp = pprint.PrettyPrinter(indent=4) #pp.pprint(info_dict) row_dstBe_option_info.idcontract = 0 #df = pd.DataFrame.from_dict(info_dict, orient="index") #if self.test_df is None: # self.test_df = df #else: #x = str(info_dict) #print(self.test_df) self.test_df.append(str(info_dict)) #print(self.test_df) #df.to_csv(instrument['symbol'] + "_data.csv") else: contract_info_idcontract = self.mongo_queries.save_future_info( info_dict) row_dstBe_option_info.idcontract = contract_info_idcontract '''gets future contract settlements''' for row_dst_8_F_e_future_data in rowListType8_F: #print('****** row_dst_8_F_e_future_data ' + row_dst_8_F_e_future_data.product_type) #if row_dst_8_F_e_future_data.product_type == SPAN_FILE_PRODUCT_TYPE_CODES.fut: row_dst_8_F_e_future_data.extract_future_identifiers() #print('ticksize, display', instrument['spanticksize'], instrument['spantickdisplay']) if data_row_type == SPAN_FILE_ROW_TYPES.TYPE_81: row_dst_8_F_e_future_data.extract_settlement_filetype81( \ instrument['spanticksize'], instrument['spantickdisplay']) elif data_row_type == SPAN_FILE_ROW_TYPES.TYPE_82: row_dst_8_F_e_future_data.extract_settlement_filetype82( \ instrument['spanticksize'], instrument['spantickdisplay']) row_dstBe_future = \ rowListTypeB_Future_dict[row_dst_8_F_e_future_data.future_contract_month, row_dst_8_F_e_future_data.future_contract_year] row_dstBe_future.extracted_future_data_row = row_dst_8_F_e_future_data #update future contract with settlement and date to mongo #self.mongo_queries.fill_future_price(row_dst_8_F_e_future_data, row_dstBe_future) info_dict = \ {'idcontract': row_dstBe_future.idcontract, 'settlement': row_dst_8_F_e_future_data.settlement_price, 'openinterest': 0, 'volume': 0, 'date': row_dst_8_F_e_future_data.span_file_date_time} #self.mongo_queries.save_futures_settlement(info_dict) if self.testing: info_dict = \ {'idcontract': row_dstBe_future.idcontract, 'settlement': row_dst_8_F_e_future_data.settlement_price, 'settlement_price_str':row_dst_8_F_e_future_data.settlement_price_str, 'openinterest': 0, 'volume': 0, 'date': row_dst_8_F_e_future_data.span_file_date_time} self.test_df.append(str(info_dict)) else: self.mongo_queries.save_futures_settlement( info_dict) '''below imports the OPTION contract info''' for row_dstBe_option_info in rowListTypeB: row_dstBe_option_info.extract_commodity_product_code_identifiers( ) #if row_dstBe_option_info.commodity_product in \ # row_dstBe_option_info.instrument['span_cqg_codes_dict']: # print('!@#',row_dstBe_option_info.instrument['span_cqg_codes_dict'][row_dstBe_option_info.commodity_product]) if row_dstBe_option_info.product_type == SPAN_FILE_PRODUCT_TYPE_CODES.oof and \ row_dstBe_option_info.commodity_product in \ row_dstBe_option_info.instrument['span_cqg_codes_dict']: row_dstBe_option_info.extract_future_identifiers() row_dstBe_option_info.extract_option_identifiers_typeB( ) row_dstBe_option_info.extract_option_expiration_and_timetoexp( ) row_dstBe_option_info.span_underlying_future_contract_props = \ rowListTypeB_Future_dict[row_dstBe_option_info.future_contract_month, row_dstBe_option_info.future_contract_year] rowListTypeB_Option_dict[ row_dstBe_option_info.option_contract_month, \ row_dstBe_option_info.option_contract_year, \ row_dstBe_option_info.commodity_product] = row_dstBe_option_info #TimeSpan span = optionContractMonthDate - spanOptionContractProps.optionContractExpiration; #if (span.TotalDays < 150) # spanOptionContractPropsList.Add(spanOptionContractProps) for row_dst_8_OOF_e_option_data in rowListType8_OOF: #print('****** row_dst_8_F_e_future_data ' + row_dst_8_F_e_future_data.product_type) #if row_dst_8_F_e_future_data.product_type == SPAN_FILE_PRODUCT_TYPE_CODES.oof: row_dst_8_OOF_e_option_data.extract_option_identifiers_type8( ) if row_dst_8_OOF_e_option_data.commodity_product_code in \ row_dst_8_OOF_e_option_data.instrument['span_cqg_codes_dict']: row_dst_8_OOF_e_option_data.extract_future_identifiers( ) key = (row_dst_8_OOF_e_option_data.option_contract_month, \ row_dst_8_OOF_e_option_data.option_contract_year, \ row_dst_8_OOF_e_option_data.commodity_product_code) if key in rowListTypeB_Option_dict: row_dstBe_option_info = \ rowListTypeB_Option_dict[key] #if dst8e.option_type == SPAN_FILE_CONTRACT_TYPE.call: optionTickSize = instrument[ 'spanoptionticksize'] optionTickDisplay = instrument[ 'spanoptiontickdisplay'] if instrument[ 'secondaryoptionticksizerule'] > 0: ratio = instrument[ 'secondaryoptionticksize'] / instrument[ 'spanoptionticksize'] optionTickDisplay = ratio * instrument[ 'spanoptiontickdisplay'] optionTickSize = instrument[ 'secondaryoptionticksize'] if data_row_type == SPAN_FILE_ROW_TYPES.TYPE_81: row_dst_8_OOF_e_option_data.extract_settlement_filetype81( \ optionTickSize, \ optionTickDisplay) elif data_row_type == SPAN_FILE_ROW_TYPES.TYPE_82: row_dst_8_OOF_e_option_data.extract_settlement_filetype82( \ optionTickSize, \ optionTickDisplay) #row_dstBe_option_info.span_underlying_future_contract_props.extracted_future_data_row \ # .settlement_price ''' print('$$$$$$$$$$',row_dstBe_option_info.product_type, row_dst_8_OOF_e_option_data.option_type, \ row_dstBe_option_info.span_underlying_future_contract_props.extracted_future_data_row \ .settlement_price, \ row_dst_8_OOF_e_option_data.option_strike_price, \ row_dstBe_option_info.option_time_to_exp, \ self.risk_free_rate, \ row_dst_8_OOF_e_option_data.settlement_price, \ optionTickSize) ''' #calculate implied vol row_dst_8_OOF_e_option_data.implied_vol = \ round(calculateOptionVolatilityNR(row_dst_8_OOF_e_option_data.option_type, \ row_dstBe_option_info.span_underlying_future_contract_props.extracted_future_data_row \ .settlement_price, \ row_dst_8_OOF_e_option_data.option_strike_price, \ row_dstBe_option_info.option_time_to_exp, \ self.risk_free_rate, \ row_dst_8_OOF_e_option_data.settlement_price, \ optionTickSize),5) #print('^^^^^^^^^^^',row_dst_8_OOF_e_option_data.implied_vol) option_info_dict = \ { "expirationdate": row_dstBe_option_info.option_contract_expiration, "idinstrument": row_dst_8_OOF_e_option_data.instrument['idinstrument'], "strikeprice": row_dst_8_OOF_e_option_data.option_strike_price, # "idoption" : 3, # Not required "callorput": row_dst_8_OOF_e_option_data.option_type, "optionname": row_dst_8_OOF_e_option_data.option_cqg_symbol, "optionmonthint": row_dst_8_OOF_e_option_data.option_contract_month, "cqgsymbol": row_dst_8_OOF_e_option_data.option_cqg_symbol, "idcontract": row_dstBe_option_info.span_underlying_future_contract_props.idcontract, "optionmonth": row_dst_8_OOF_e_option_data.option_contract_month_char, "optionyear": row_dst_8_OOF_e_option_data.option_contract_year, "optioncode": row_dst_8_OOF_e_option_data.option_span_cqg_code['optcod'] } #id_option = self.mongo_queries.save_option_info(option_info_dict) id_option = 0 if self.testing: #pp = pprint.PrettyPrinter(indent=4) #pp.pprint(option_info_dict) #df = pd.DataFrame.from_dict(option_info_dict, orient="index") #self.test_df = self.test_df.append(df) option_info_dict_test = \ { "strike_price_str": row_dst_8_OOF_e_option_data.option_strike_price_str, "expirationdate": row_dstBe_option_info.option_contract_expiration, "idinstrument": row_dst_8_OOF_e_option_data.instrument['idinstrument'], "strikeprice": row_dst_8_OOF_e_option_data.option_strike_price, # "idoption" : 3, # Not required "callorput": row_dst_8_OOF_e_option_data.option_type, "optionname": row_dst_8_OOF_e_option_data.option_cqg_symbol, "optionmonthint": row_dst_8_OOF_e_option_data.option_contract_month, "cqgsymbol": row_dst_8_OOF_e_option_data.option_cqg_symbol, "idcontract": row_dstBe_option_info.span_underlying_future_contract_props.idcontract, "optionmonth": row_dst_8_OOF_e_option_data.option_contract_month_char, "optionyear": row_dst_8_OOF_e_option_data.option_contract_year, "optioncode": row_dst_8_OOF_e_option_data.option_span_cqg_code['optcod'] } self.test_df.append( str(option_info_dict_test)) else: id_option = self.mongo_queries.save_option_info( option_info_dict) #self.mongo_queries.fill_option_info_and_data(row_dst_8_OOF_e_option_data, \ # row_dstBe_option_info) option_data_dict = \ { "timetoexpinyears" : row_dstBe_option_info.option_time_to_exp, "idoption" : id_option, "price" : row_dst_8_OOF_e_option_data.settlement_price, "datetime" : row_dst_8_OOF_e_option_data.span_file_date_time, "impliedvol" : row_dst_8_OOF_e_option_data.implied_vol } #id_option = self.mongo_queries.save_options_data(option_data_dict) if self.testing: option_data_dict_test = \ { "timetoexpinyears": row_dstBe_option_info.option_time_to_exp, "idoption": id_option, "price_str": row_dst_8_OOF_e_option_data.settlement_price_str, "price": row_dst_8_OOF_e_option_data.settlement_price, "datetime": row_dst_8_OOF_e_option_data.span_file_date_time, "impliedvol": row_dst_8_OOF_e_option_data.implied_vol } self.test_df.append( str(option_data_dict_test)) else: self.mongo_queries.save_options_data( option_data_dict) #self.test_df.to_csv(instrument['symbol'] + "_data.csv") #x = pd.DataFrame(self.test_df) #x.to_csv(instrument['symbol'] + "_data.csv") if self.testing: thefile = open(instrument['symbol'] + "_data.txt", 'w') for item in self.test_df: print(item) thefile.write("%s\n" % item) print('finished', instrument['symbol']) if PUSH_SLACK_LOGGING: self.signalapp.send( MsgStatus('HISTORICAL_LOAD', 'Loaded {0} {1}'.format( instrument['symbol'], self.span_file_date_time.strftime( '%b %d %Y')), notify=True)) def get_cme_line_type(self, line=''): """ Gets the CME line type Parameters: line - the line of the file """ rowType = SPAN_FILE_ROW_TYPES.TYPE_NULL try: if len(line) > 2: rowTypeString = line[0:2].strip() switcher = { "0": SPAN_FILE_ROW_TYPES.TYPE_0, "81": SPAN_FILE_ROW_TYPES.TYPE_81, "82": SPAN_FILE_ROW_TYPES.TYPE_82, "B": SPAN_FILE_ROW_TYPES.TYPE_B, "83": SPAN_FILE_ROW_TYPES.TYPE_83, } return switcher.get(rowTypeString, -1) except: print("rowtype error") return rowType def extract_rowtype_0(self, line_in='', instrument_symbol=''): """ Extracts the 0 row type data out of the line Parameters: line - the line of the file instrument - the instrument that is importing """ try: exchangeComplex = line_in[CME_SPAN_TYPE_0. exchange_complex_start:CME_SPAN_TYPE_0. exchange_complex_end].strip() # ccyyMMdd businessDate = line_in[CME_SPAN_TYPE_0.business_date_start: CME_SPAN_TYPE_0.business_date_end].strip() settlementOrIntraday = line_in[ CME_SPAN_TYPE_0.settlement_or_intraday_start:CME_SPAN_TYPE_0. settlement_or_intraday_end].strip() fileIdentifier = line_in[CME_SPAN_TYPE_0. file_identifier_start:CME_SPAN_TYPE_0. file_identifier_end].strip() fileFormat = line_in[CME_SPAN_TYPE_0.file_format_start: CME_SPAN_TYPE_0.file_format_end].strip() self.span_file_date_time = datetime.strptime( businessDate, "%Y%m%d") #print(instrument_symbol['symbol'], # self.span_file_date_time, # exchangeComplex, # businessDate, # settlementOrIntraday, # fileIdentifier, # fileFormat) except: #print("extract_rowtype_0 error") logging.exception("extract_rowtype_0 error") def decide_data_rowtype_based_on_file_prefix(self): """ Chooses the rowtype to use based on the prefix of the file name """ if self.short_file_name[0:3] == "ccl": return SPAN_FILE_ROW_TYPES.TYPE_82 elif self.short_file_name[0:3] == "cme": if self.span_file_date_time >= datetime(2010, 5, 5): return SPAN_FILE_ROW_TYPES.TYPE_81 else: return SPAN_FILE_ROW_TYPES.TYPE_82 elif self.short_file_name[0:3] == "nyb": return SPAN_FILE_ROW_TYPES.TYPE_82
def main(args, loglevel): if args.logfile == '': logging.basicConfig( stream=sys.stdout, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=loglevel) else: logging.basicConfig( filename=args.logfile, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=loglevel) signalapp = SignalApp("AlphaRebalancer", APPCLASS_ALPHA, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) signalapp.send(MsgStatus('INIT', 'Initiating alpha rebalancer script')) #exo_names = get_exo_names_mat() logging.getLogger("pika").setLevel(logging.WARNING) logging.info("Starting...") exo_storage = EXOStorage(MONGO_CONNSTR, MONGO_EXO_DB) exo_names = exo_storage.exo_list() for exo in exo_names: logging.info("Processing EXO: " + exo) # Load alpha modules to process for module in os.listdir('alphas'): # # Custom EXO folder found # swm = None context = None if module.lower() == exo.lower() and os.path.isdir( os.path.join('alphas', module)): for custom_file in os.listdir(os.path.join('alphas', module)): if 'alpha_' in custom_file and '.py' in custom_file: logging.debug( 'Processing custom module: ' + os.path.join('alphas', module, custom_file)) try: m = importlib.import_module( 'scripts.alphas.{0}.{1}'.format( module, custom_file.replace('.py', ''))) context = m.STRATEGY_CONTEXT context['strategy']['exo_name'] = exo context['strategy'][ 'suffix'] = m.STRATEGY_SUFFIX + 'custom' context['strategy']['exo_storage'] = exo_storage logging.info( 'Running CUSTOM alpha: ' + Swarm.get_name( m.STRATEGY_CONTEXT, m.STRATEGY_SUFFIX)) if 'exo_name' in context['strategy'] and context[ 'strategy']['exo_name'] != exo: logging.error( "Custom strategy context exo_name != current EXO name (folder mismatch?)" ) raise ValueError( "Custom strategy context exo_name != current EXO name (folder mismatch?)" ) swm = Swarm(context) swm.run_swarm() swm.pick() # # Saving last EXO state to online DB # swmonline = SwarmOnlineManager( MONGO_CONNSTR, MONGO_EXO_DB, m.STRATEGY_CONTEXT) logging.debug('Saving: {0}'.format(swm.name)) swmonline.save(swm) except: logging.exception('Exception occurred:') signalapp.send( MsgStatus('ERROR', 'Exception in {0}'.format( Swarm.get_name( m.STRATEGY_CONTEXT, m.STRATEGY_SUFFIX)), notify=True)) elif 'alpha_' in module and '.py' in module: logging.debug('Processing generic module: ' + module) try: m = importlib.import_module('scripts.alphas.{0}'.format( module.replace('.py', ''))) for direction in [-1, 1]: context = m.STRATEGY_CONTEXT context['strategy']['exo_name'] = exo context['strategy']['opt_params'][0] = OptParamArray( 'Direction', [direction]) context['strategy']['suffix'] = m.STRATEGY_SUFFIX context['strategy']['exo_storage'] = exo_storage logging.info('Running alpha: ' + Swarm.get_name(m.STRATEGY_CONTEXT) + ' Direction: {0}'.format(direction)) swm = Swarm(context) swm.run_swarm() swm.pick() # # Saving last EXO state to online DB # swmonline = SwarmOnlineManager(MONGO_CONNSTR, MONGO_EXO_DB, m.STRATEGY_CONTEXT) logging.debug('Saving: {0}'.format(swm.name)) swmonline.save(swm) except: logging.exception('Exception occurred:') signalapp.send( MsgStatus('ERROR', 'Exception in {0}'.format( Swarm.get_name(m.STRATEGY_CONTEXT, m.STRATEGY_SUFFIX)), notify=True)) logging.info("Processing accounts positions") assetindex = AssetIndexMongo(MONGO_CONNSTR, MONGO_EXO_DB) datasource = DataSourceMongo(MONGO_CONNSTR, MONGO_EXO_DB, assetindex, futures_limit=10, options_limit=10, exostorage=exo_storage) exmgr = ExecutionManager(MONGO_CONNSTR, datasource, dbname=MONGO_EXO_DB) exmgr.account_positions_process(write_to_db=True) signalapp.send(MsgStatus('DONE', 'Alpha rebalancer script', notify=True)) logging.info("Done.")
class TradingPositionsArchiveScript: def __init__(self, args, loglevel): self.args = args self.loglevel = loglevel logging.getLogger("pika").setLevel(logging.WARNING) logger = logging.getLogger('TradingPositionsArchiveScript') logger.setLevel(loglevel) fh = None if args.logfile != '': fh = logging.FileHandler(args.logfile) fh.setLevel(loglevel) # create console handler with a higher log level ch = logging.StreamHandler(sys.stdout) ch.setLevel(loglevel) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) if fh is not None: fh.setFormatter(formatter) logger.addHandler(fh) self.log = logger self.log.info('Init TradingPositionsArchive') self.signal_app = SignalApp('TradingPositionsArchive', APPCLASS_UTILS, RABBIT_HOST, RABBIT_USER, RABBIT_PASSW) self.signal_app.send(MsgStatus("INIT", 'Initiating TradingPositionsArchive')) self.mongo_client = MongoClient(MONGO_CONNSTR) self.mongo_db = self.mongo_client[MONGO_EXO_DB] def run(self): """ Application main() :return: """ if not bdateutil.isbday(datetime.datetime.now(), holidays=holidays.US()): self.log.info("Run is skipped due to non business day") return # Populating account positions operations = [] update_date = 'N/A' for acc_pos_dict in self.mongo_db['accounts_positions'].find({}): # Shrinking time of the timestamp update_date = datetime.datetime.combine(acc_pos_dict['date_now'].date(), datetime.time(0,0,0)) # 'date_now' - main timestamp of collection acc_pos_dict['date_now'] = update_date del acc_pos_dict['_id'] # Add MongoDB bulk operation operations.append(ReplaceOne( {'date_now': update_date, 'name': acc_pos_dict['name']}, acc_pos_dict, upsert=True, )) self.log.info("Last collection update date: {0}".format(update_date)) # Execute bulk upsert to Mongo pp = pprint.PrettyPrinter(indent=4) try: bulk_result = self.mongo_db['accounts_positions_archive'].bulk_write(operations, ordered=False) self.log.info("Bulk write result succeed: \n{0}".format(pp.pformat(bulk_result.bulk_api_result))) self.signal_app.send(MsgStatus("OK", "Positions archive created", notify=True, ) ) except BulkWriteError as exc: self.log.error("Bulk write error occured: {0}".format(pp.pformat(exc.details))) self.signal_app.send(MsgStatus("ERROR", "Positions archive error while writing to MongoDB", notify=True, ) )