def main(): parser = argparse.ArgumentParser() parser.add_argument("--hist", action = "store", dest = "hist", default = 20) parser.add_argument("--date", action = "store", dest = "date", required = True) args = parser.parse_args() util.set_log_file() #get relevant securities run_dir = "/".join((os.environ["ROOT_DIR"], "run", os.environ["STRAT"], args.date)) tickers_file = run_dir + "/tickers.txt" if not os.path.isfile(tickers_file): util.error("Failed to locate ticker file: {}".format(tickers_file)) exit(1) secid2tickers = get_secid2tickers(tickers_file) secid2relvol, secid2dailyvol = get_vol_hist(args.date, secid2tickers, int(args.hist)) with open(run_dir+"/exec/volume_distr", "w") as vdfile: #create header. get the exchanges first exchanges = None for secid, relvol in secid2relvol.iteritems(): exchanges = sorted(relvol.keys()) vdfile.write("#symbol,\t" + ",\t".join(exchanges) + ",\tTOTAL(K shs)\n") break for secid, relvol in secid2relvol.iteritems(): data = [] ticker = secid2tickers[secid] data.append(ticker) for exch in exchanges: data.append(relvol[exch]) total = secid2dailyvol[secid] data.append(total) vdfile.write(",\t".join(data)+"\n")
def main (win): parser = OptionParser() parser.add_option("-d", "--debug", default=False, action="store_true", dest="debug") parser.add_option('-n', '--name', dest='name', help='name of this client to give to server') (opt, args) = parser.parse_args() if opt.debug: util.set_debug() else: util.set_log_file() util.info('launching guillotine commander') cfg_file = os.environ['CONFIG_DIR'] + '/exec.conf' util.info("loading config file: %s" % cfg_file) trade_cfg = config.load_trade_config(cfg_file) gtc = guillotine.multiplex_channel() name = "gt-watch from "+(gethostname().split('.',1))[0] if opt.name is None else "gt_watch_" + os.environ['STRAT'] gtc.connect(trade_cfg['servers'], trade_cfg['account'], trade_cfg['password'], name=name) w = watcher(win, gtc) gtc.register(w) asyncore.loop(0.2, use_poll=True)
#!/usr/bin/env python import os import sys import util from gzip import GzipFile from data_sources import file_source import datafiles ADV_FRACTION = 1.3 / 100.0 LOT_SIZE = 100 MIN_REQUEST = 5000 MAX_DOLLARS = 1.5e7 if __name__ == "__main__": util.check_include() util.set_log_file() #get last calcres of day fs = file_source.FileSource(os.environ["RUN_DIR"] + "/calcres") calcresFiles = fs.list(r'calcres.*\.txt\.gz') if len(calcresFiles) == 0: util.error("Failed to locate calcres file") sys.exit(1) calcresFiles.sort(key=lambda x: x[0], reverse=True) lastCalcresFile = os.environ["RUN_DIR"] + "/calcres/" + calcresFiles[0][0] #get tradeable secs and advp tradeable = set() advps = {} prices = {}
parser.add_option("-f", "--files", dest="files") parser.add_option("-b", "--database", dest="db", default="pri") parser.add_option("-i", "--ignore_mod_time", dest="ignore_mod_time", type=int, default=0) parser.add_option("-l", "--process_lag", dest="lag", type=float) (options, args) = parser.parse_args() assert options.ignore_mod_time in (0, 1, 2) if options.debug: util.set_debug() else: util.set_log_file("all", True) if options.db == "pri": newdb.init_db() database = newdb.get_db() elif options.db == "sec": newdb.init_db(os.environ["SEC_DB_CONFIG_FILE"]) database = newdb.get_db() else: util.error("Valid database choices are [pri|sec]") sys.exit(1) # Check for previously running instance if not database.getProcessedFilesLock(): util.warning("Not processing, previous instance running") sys.exit(1)
if __name__ == "__main__": parser = OptionParser() parser.add_option("-s", "--source", dest="source") parser.add_option("-m", "--maxfiles", default=100, dest="maxfiles") parser.add_option("-a", "--maxage", default=5, dest="maxage") parser.add_option("-d", "--debug", default=False, action="store_true", dest="debug") (options, args) = parser.parse_args() if options.debug: util.set_debug() else: util.set_log_file(options.source, True) lock_f = util.lock(options.source) #Create a lock config = config.load_source_config( options.source) #Load config file for source time_file = "%s/%s/%s.time" % (os.environ["DATA_DIR"], config["local_dir"], options.source) util.info("Acquiring data from %s" % options.source) try: # Read last check time try: last_time = cPickle.load(open(time_file, 'rb')) except IOError: last_time = ""
def main(): util.check_include() util.set_log_file() #get last calcres of previous day prevDay = util.exchangeTradingOffset(os.environ['PRIMARY_EXCHANGE'], os.environ['DATE'], -1) fs = file_source.FileSource(os.environ['RUN_DIR'] + '/../' + str(prevDay) + '/calcres') calcresFiles = fs.list(r'calcres.*\.txt\.gz') if len(calcresFiles) == 0: util.error("Failed to locate calcres file") sys.exit(1) calcresFiles.sort(key=lambda x: x[0], reverse=True) lastCalcresFile = os.environ['RUN_DIR'] + '/../' + str( prevDay) + '/calcres/' + calcresFiles[0][0] secidParams = {} for line in GzipFile(lastCalcresFile, 'r'): if line.startswith('FCOV'): continue secid, name, datatype, datetime, value, currency, born = line.split( '|') if int(secid) not in secidParams: secidParams[int(secid)] = {} if name == 'F:BBETA': secidParams[int(secid)]['BBETA'] = float(value) elif name == 'F:ASE_BETA90': secidParams[int(secid)]['ASE_BETA'] = float(value) elif name == 'CAPITALIZATION': secidParams[int(secid)]['CAP'] = float(value) #get tickers tic2sec, sec2tic = datafiles.load_tickers(os.environ['RUN_DIR'] + '/tickers.txt') etfbetaFilename = os.environ['RUN_DIR'] + '/exec/etfbetafile' etfbetaFile = open(etfbetaFilename, 'w') etfbetaFile.write('#ETF BETA MKT-CAP\n') tickers = tic2sec.keys() tickers.sort() count = 0 for ticker in tickers: secid = tic2sec.get(ticker, None) if secid not in secidParams: continue bbeta = secidParams[secid].get('BBETA', None) asebeta = secidParams[secid].get('ASE_BETA', None) mcap = secidParams[secid].get('CAP', None) if bbeta is None or asebeta is None or mcap is None: util.error( 'Error while getting data for secid {}: ticker={}, bbeta={}, asebeta={}, mcap={}' .format(secid, ticker, bbeta, asebeta, mcap)) continue beta = 0.5 * (bbeta + asebeta) etfbetaFile.write('{0},SPY,{1:.3f},{2:.3f}\n'.format( ticker, beta, mcap)) count += 1 etfbetaFile.close() print 'Finished writing etfbeta file: {} for {} tickers'.format( etfbetaFilename, count)
default=False) parser.add_argument("--processed-files", action="store_const", const=True, dest="processed_files", default=False) parser.add_argument("--qcc", action="store_const", const=True, dest="qcc", default=False) args = parser.parse_args() if not args.debug: util.set_log_file("na") newdb.init_db() database = newdb.get_db() if args.qcc: qcc() if args.xref_changes: report = xrefChanges2() if args.debug: print report elif report is not None: util.email("Xref changes", report) if args.processed_files:
def main(): util.check_include() util.set_log_file() tickersFile = open(os.environ['RUN_DIR'] + '/tickers.txt', 'r') tickerLines = [line.strip().split('|') for line in tickersFile] tickersFile.close() sec2tic = {} tic2sec = {} for line in tickerLines: (ticker, secid) = (line[0], int(line[1])) sec2tic[secid] = ticker tic2sec[ticker] = secid prevDay = util.exchangeTradingOffset(os.environ['PRIMARY_EXCHANGE'], os.environ['DATE'], -1) tradeSzFile = open( os.environ['DATA_DIR'] + '/bars/' + str(prevDay) + '/tradeSz.txt', 'r') tradeSzLines = [line.strip().split('|') for line in tradeSzFile] tradeSzFile.close() dataBySecID = {} allSizes = {} for line in tradeSzLines[1:]: (secid, size, count) = (int(line[0]), int(line[1]), int(line[2])) if secid not in dataBySecID: dataBySecID[secid] = [] dataBySecID[secid].append((size, count)) if size not in allSizes: allSizes[size] = 0 allSizes[size] += count klist = {} for secid in sec2tic.keys(): if secid not in dataBySecID: klist[secid] = (-2, -2, -2, -2) continue dataBySecID[secid].sort(key=lambda x: x[1]) keys = [] values = [] scaledKeys = [] logKeys = [] logValues = [] for (key, value) in dataBySecID[secid]: if (key < 100) or (key > 10000): continue keys.append(key) values.append(value) scaledKeys.append(key / 100.0) logKeys.append(log(key)) logValues.append(log(value)) klist[secid] = (-1, -1, -1, -1) for ii in range(-3, -1 * (len(keys) + 1), -1): (a, b, R2) = linreg(logKeys[ii:], logValues[ii:]) if R2 == 1: print 'R2 = 1 for ' + sec2tic[secid] + ' for ' + str( -1 * ii) + ' out of ' + str(len(keys)) if R2 > 0.9: klist[secid] = (a, R2, -1 * ii, len(keys)) else: break # read previous day's klist file and compute an average prevKlistFile = open( os.environ['RUN_DIR'] + '/../' + str(prevDay) + '/exec/klist', 'r') prevKlistLines = [line.strip().split(',') for line in prevKlistFile] prevKlistFile.close() prevKlist = {} # ignore header for line in prevKlistLines[1:]: prevKlist[line[0]] = float(line[1]) klistFilename = os.environ['RUN_DIR'] + '/exec/klist' outFile = open(klistFilename, 'w') #outFile.write('Stock,K,R2,Reg,Tot\n') outFile.write('Stock,K,R2\n') tickers = tic2sec.keys() tickers.sort() count = 0 notInPrev = 0 for ticker in tickers: secid = tic2sec.get(ticker, None) if secid not in klist: continue kPoints = klist[secid][2] if kPoints < 0: continue kValue = klist[secid][0] if ticker in prevKlist: kValue = 0.5 * (kValue + prevKlist[ticker]) else: notInPrev += 1 if kValue > -1: util.error( 'For {}, kValue = {}, learned today = {}, R2 = {}, points for today = {} out of {}\n' .format(ticker, kValue, klist[secid][0], klist[secid][1], klist[secid][2], klist[secid][3])) #outFile.write('{0},{1:.4f},{2:.4f}{3}{4}\n'.format(ticker, klist[secid][0], kValue, klist[secid][2], klist[secid][3]) outFile.write('{0},{1:.4f},{2:.4f}\n'.format(ticker, kValue, klist[secid][1])) count += 1 outFile.close() print 'Finished writing klist file: {} for {} tickers. {} tickers not present in previous klist file.'.format( klistFilename, count, notInPrev)