def jobqueueRefresh(self, inception=False): mtime = os.path.getmtime(os.path.abspath("menuConfig.json")) if self.menus_mtime > mtime: self.menus = json.load(open("menuConfig.json")) self.menus_mtime = mtime baseDt = dateoffset(self.sysdate, -1,"%d/%m/%Y") for loc, v in self.menus.items(): farm = v["mad"] for scene, vv in v["scenes"].items(): for batch, jobs in vv["monitor"].items(): self.mongo.refreshQueue(farm, baseDt, batch, jobs, scene, inception) self.mongo.rebuildIndex(self.reglib)
def spotcorrection(): tday = dateoffset(datetime.today(), 0) rampset = "OSPG TEST 6" rampdate = xldate(tday) mkt = load_market("mkt", rampset, rampdate, rampdate) ccy1ccy2 = [['USD','BRL'], ['USD','JPY'], ['USD','TRY'], ['TRY','JPY'], ['BRL','JPY']] df = DataFrame(ccy1ccy2, columns=['CCY1','CCY2']) df['Mid'] = df[['CCY1','CCY2']].apply(lambda x: west.FXGetSpot("mkt", x[0], x[1]), axis=1) df['SpotDate'] = df[['CCY1','CCY2']].apply(lambda x: west.GetFXSpotDate2(rampdate, x[0], x[1], 'X', False), axis=1) df["SpotDate"][3] = west.Add(rampdate,1,"bd",'',"TYO#IST#NYK") df['DF1'] = df[['CCY1', 'SpotDate']].apply(lambda x: west.Df(x[1],"mkt",rampdate,'-5','',x[0]), axis=1) df['DF2'] = df[['CCY2', 'SpotDate']].apply(lambda x: west.Df(x[1],"mkt",rampdate,'-5','',x[0]), axis=1) df['CashRate'] = df['Mid']*df['DF2']/df['DF1'] df['SpotRate'] = df['CashRate']*df['DF1']/df['DF2'] #df = df.set_index(['CCY1', 'CCY2']) fxspot=[] for t in ramp.RampReadCurve("SPOT FX RATES", rampset, rampdate, 2).to_tuple()[0]: if not t[0]+t[1] in ["TRYJPY", "BRLJPY"]: fxspot.append(t) fxspot.extend([('TRY', 'JPY',df["SpotRate"][3],df["SpotRate"][3]), ('BRL', 'JPY',df["SpotRate"][4],df["SpotRate"][4])]) sts = ramp.RampWriteCurve(tuple3d(fxspot), "SPOT FX RATES", rampset, rampdate, 2) if sts[:2].upper() != "OK": logger.error("SPOT FX contribution failed") #correct eurcny vol df_eurusd = read_ramp('HYB EURUSD VOL MKT VOLS', rampset, rampdate) df_eurusd = df_eurusd.ix[:14] df_usdcny = read_ramp('HYB USDCNY VOL MKT VOLS', rampset, rampdate) df_eurcny = read_ramp('HYB EURCNY VOL MKT VOLS', rampset, rampdate) df_cross = np.sqrt(np.power(df_eurusd["ATM"],2)+np.power(df_usdcny["ATM"],2)) df_check = np.abs(df_eurcny["ATM"]-df_eurusd["ATM"])>0.5*df_usdcny["ATM"] df_eurcny["ATM"] = concat([df_eurcny["ATM"], df_cross, df_check], axis=1).apply(lambda x: x.values[1] if x.values[2] else x.values[0], axis=1) df_eurcny = df_eurcny.reset_index() curvedata = [["Vols"]+df_eurcny.columns.tolist()] curvedata.extend([['']+t for t in df_eurcny.values.tolist()]) sts = write_ramp(curvedata, 'HYB EURCNY VOL MKT VOLS', rampset, rampdate) if sts[:2].upper() != "OK": console.error("EURCNY vol contribution failed")
farm = v["mad"] for scene, vv in v["scenes"].items(): for batch, jobs in vv["monitor"].items(): self.mongo.refreshQueue(farm, baseDt, batch, jobs, scene, inception) self.mongo.rebuildIndex(self.reglib) def livefeed(self): logger.info("PriceChangeLive") df = PriceChangeLive() self.mongo.updateLiveFeed(json.loads(df.to_json(orient="records"))) if __name__ == "__main__": app = Application() http_server = tornado.httpserver.HTTPServer(app) http_server.listen(8080) #app.listen(443) delta = trunc(dateoffset(app.sysdate, 1))-app.sysdate tornado.ioloop.IOLoop.instance().add_timeout(timedelta(seconds=delta.seconds+7*60*60), app.rollsysdate) app.jobqueueRefresh(True) tornado.ioloop.PeriodicCallback(app.jobqueueRefresh, 5*1000*60).start() app.livefeed() tornado.ioloop.PeriodicCallback(app.livefeed, 1*1000*60).start() tornado.ioloop.PeriodicCallback(app.rrqueueRefresh, 1000*1).start() logger.info("interstellar started") tornado.ioloop.IOLoop.instance().start()
def calcOfficial(df, tday): calcdate = xldate(dateoffset(tday,-1)) mkt = load_market('mesaofficial', 'OFFICIAL', calcdate, calcdate) return DataFrame({"OFFICIAL":df.apply(getRate, axis=1, args=(mkt, calcdate))})
def buildcmd(self, groupindex): cmds = [] for g, batches in self.riskconfig.items(): if g in groupindex.keys(): for batch, v in batches.items(): jobs = filter(lambda x: x[0]!="*" and (x[0] in groupindex[g] or groupindex[g][0]=="All"), [(t["JOBNAME"],t["RISKFILE"]) for t in v["JOBS"]]) jobs = list(jobs) self.mapping.update(dict(jobs)) cmd ='"%s" %s %s' % (batch, self.db, " ".join(map(lambda x: '--job "%s"' % x[0], jobs))) timingcat = self.request["timing-cat"] if "timing-cat" in self.request else v["TIMINGCALC"] c = ' --base-date %s --sub-book "%s" "%s" --force-farm "%s" --force-priority %s --timing-cat %s ' % (dateoffset(self.basedate, int(v["DAYOFFSET"]), "%d/%m/%Y"), v["BOOK"], self.request["sub-book"], v["GARDEN"], v["PRIORITY"], timingcat) if "force-ramp" in self.request: c = c + ' --force-ramp "%s"' % self.request["force-ramp"] cmd = cmd + c + " ".join(self.overrides) cmd = cmd + " --sub-batch %s" % self.request["sub-batch"] cmd = cmd + " --no-fltr --no-tfc --ignore-day --add-co FORCE_VALID_MODEL FALSE --add-co TARGET_PKT_TM 800" if "eod-fixing" in self.request: cmd = cmd + " --eod-fixing %s" % self.request["eod-fixing"] cmds.append(cmd) return cmds
from rampdiff import read_ramp, write_ramp from pandas import DataFrame, concat import numpy as np import pdb logger = logging.getLogger('') logger.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') console = logging.StreamHandler() console.setFormatter(formatter) logger.addHandler(console) dstr = datetime.today().strftime("%d%m") yestdstr = dateoffset(datetime.today(), -1, "%d%m") def waitforcash(): cashfile = r"\\sins00120165\Reporting\Risk\rvg\EurOpen\BkLonxHybrids\ActRpt."+dstr logger.info("waiting cash....") while not os.path.exists(cashfile): time.sleep(60*5) logger.info("cash arrived") def intradaysnapshot(): logger.info("intraday snapshot") cmdline = "LON_MM_EVENING_INTRADAY GFTBSGL --force-priority 100 --job MKT_SNAPSHOT_LIVE_FASTINTRA --user-name SG435551 --now --no-tfc --sub-batch _FAST --ignore-day" ntmad_ret = ntmadsubmit(cmdline) jobid = ntmad_ret["job_id"] mad = maddb("MADBUKL", {"select_progress":"""select job_id, proportion_done, sts_cod from exo_evt_log where job_id in (%s)"""})