def mkt_data_eod(self): for inst in self.instruments: if (len(self.tick_data[inst]) > 0) : last_tick = self.tick_data[inst][-1] self.cur_min[inst]['volume'] = last_tick.volume - self.cur_min[inst]['volume'] self.cur_min[inst]['openInterest'] = last_tick.openInterest self.min_switch(inst) if (self.cur_day[inst]['close']>0): mysqlaccess.insert_daily_data_to_df(self.day_data[inst], self.cur_day[inst]) df = self.day_data[inst] for fobj in self.day_data_func[inst]: fobj.rfunc(df) if self.save_flag: event = Event(type=EVENT_DB_WRITE, priority = 500) event.dict['data'] = self.cur_day[inst] event.dict['type'] = EVENT_MKTDATA_EOD event.dict['instID'] = inst self.eventEngine.put(event) if len(self.day_data[inst]) > 0: d_start = workdays.workday(self.scur_day, -self.daily_data_days, CHN_Holidays) df = self.day_data[inst] self.day_data[inst] = df[df.index >= d_start] m_start = workdays.workday(self.scur_day, -self.min_data_days, CHN_Holidays) for m in self.min_data[inst]: if len(self.min_data[inst][m]) > 0: mdf = self.min_data[inst][m] self.min_data[inst][m] = mdf[mdf.index.date >= m_start]
def get_publishing_dates(brief): APPLICATION_OPEN_DAYS = {'1 week': 7, '2 weeks': 14} QUESTIONS_OPEN_DAYS = {'1 week': 2, '2 weeks': 5} dates = {} if _brief_is_from_api_(brief): dates['published_date'] = _get_start_date_from_brief_api(brief) elif _brief_is_from_frontend_app_and_published_(brief): dates[ 'published_date'] = _get_start_date_from_published_frontend_brief( brief) else: dates['published_date'] = _get_todays_date() length = _get_length_of_brief(brief) dates['closing_date'] = dates['published_date'] + timedelta( days=APPLICATION_OPEN_DAYS[length]) dates['questions_close'] = workday(dates['published_date'], QUESTIONS_OPEN_DAYS[length]) dates['answers_close'] = workday(dates['closing_date'], -1) dates['application_open_weeks'] = length dates['closing_time'] = '{d:%I:%M %p}'.format( d=dates['closing_date']).lower() return dates
def mkt_data_eod(self): for inst in self.instruments: if (len(self.tick_data[inst]) > 0): last_tick = self.tick_data[inst][-1] self.cur_min[inst][ 'volume'] = last_tick.volume - self.cur_min[inst]['volume'] self.cur_min[inst]['openInterest'] = last_tick.openInterest self.min_switch(inst) if (self.cur_day[inst]['close'] > 0): mysqlaccess.insert_daily_data_to_df(self.day_data[inst], self.cur_day[inst]) df = self.day_data[inst] for fobj in self.day_data_func[inst]: fobj.rfunc(df) if self.save_flag: event = Event(type=EVENT_DB_WRITE, priority=500) event.dict['data'] = self.cur_day[inst] event.dict['type'] = EVENT_MKTDATA_EOD event.dict['instID'] = inst self.eventEngine.put(event) if len(self.day_data[inst]) > 0: d_start = workdays.workday(self.scur_day, -self.daily_data_days, CHN_Holidays) df = self.day_data[inst] self.day_data[inst] = df[df.index >= d_start] m_start = workdays.workday(self.scur_day, -self.min_data_days, CHN_Holidays) for m in self.min_data[inst]: if len(self.min_data[inst][m]) > 0: mdf = self.min_data[inst][m] self.min_data[inst][m] = mdf[mdf.index.date >= m_start]
def get_opt_expiry(fut_inst, cont_mth, exch=''): cont_yr = int(cont_mth / 100) cont_mth = cont_mth % 100 expiry_month = datetime.date(cont_yr, cont_mth, 1) wkday = expiry_month.weekday() if fut_inst[:6].isdigit(): nbweeks = 4 if wkday <= 2: nbweeks = 3 expiry = expiry_month + datetime.timedelta(days=nbweeks * 7 - wkday + 1) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif fut_inst[:2] == 'IF' or exch == 'CFFEX': nbweeks = 2 if wkday >= 5: nbweeks = 3 expiry = expiry_month + datetime.timedelta(days=nbweeks * 7 - wkday + 3) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif fut_inst[:2] == 'SR' or fut_inst[:2] == 'CF' or exch == 'CZCE': if cont_mth > 1: expiry_month = datetime.date(cont_yr, cont_mth - 1, 1) else: expiry_month = datetime.date(cont_yr - 1, 12, 1) expiry = workdays.workday(expiry_month, -5, CHN_Holidays) elif fut_inst[:1] == 'm' or exch == 'DCE': if cont_mth > 1: expiry_month = datetime.date(cont_yr, cont_mth - 1, 1) + datetime.timedelta(days=-1) else: expiry_month = datetime.date(cont_yr - 1, 11, 30) expiry = workdays.workday(expiry_month, 5, CHN_Holidays) return datetime.datetime.combine(expiry, datetime.time(15, 0))
def contract_expiry(cont, hols='db'): if type(hols) == list: exch = inst_to_exch(cont) mth = int(cont[-2:]) if cont[-4:-2].isdigit(): yr = 2000 + int(cont[-4:-2]) else: yr = 2010 + int(cont[-3:-2]) cont_date = datetime.date(yr,mth,1) if exch == 'DCE' or exch == 'CZCE': expiry = workdays.workday(cont_date - datetime.timedelta(days=1), 10, CHN_Holidays) elif exch =='CFFEX': wkday = cont_date.weekday() expiry = cont_date + datetime.timedelta(days=13+(11-wkday)%7) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif exch == 'SHFE': expiry = datetime.date(yr, mth, 14) expiry = workdays.workday(expiry, 1, CHN_Holidays) else: expiry = 0 else: cnx = mysql.connector.connect(**mysqlaccess.dbconfig) cursor = cnx.cursor() stmt = "select expiry from contract_list where instID='{inst}' ".format(inst=cont) cursor.execute(stmt) out = [exp for exp in cursor] if len(out)>0: expiry = out[0][0] else: expiry = contract_expiry(cont, CHN_Holidays) return expiry
def contract_expiry(cont, hols='db'): if type(hols) == list: exch = inst_to_exch(cont) mth = int(cont[-2:]) if cont[-4:-2].isdigit(): yr = 2000 + int(cont[-4:-2]) else: yr = 2010 + int(cont[-3:-2]) cont_date = datetime.date(yr, mth, 1) if exch == 'DCE' or exch == 'CZCE': expiry = workdays.workday(cont_date - datetime.timedelta(days=1), 10, CHN_Holidays) elif exch == 'CFFEX': wkday = cont_date.weekday() expiry = cont_date + datetime.timedelta(days=13 + (11 - wkday) % 7) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif exch == 'SHFE': expiry = datetime.date(yr, mth, 14) expiry = workdays.workday(expiry, 1, CHN_Holidays) else: expiry = 0 else: cnx = mysql.connector.connect(**mysqlaccess.dbconfig) cursor = cnx.cursor() stmt = "select expiry from contract_list where instID='{inst}' ".format( inst=cont) cursor.execute(stmt) out = [exp for exp in cursor] if len(out) > 0: expiry = out[0][0] else: expiry = contract_expiry(cont, CHN_Holidays) return expiry
def tenor_expiry(exch, product, tenor, rolldays=0, field='fwd'): expiry = tenor if exch == 'DCE' or exch == 'CZCE': while expiry.month not in [1, 5, 9]: expiry = expiry + relativedelta(months=1) expiry = workdays.workday(expiry - datetime.timedelta(days=1), 10 - rolldays, misc.CHN_Holidays) elif exch == 'CFFEX': while product in ['T', 'TF'] and expiry.month not in [3, 6, 9, 12]: expiry = expiry + relativedelta(months=1) wkday = expiry.weekday() expiry = expiry + datetime.timedelta(days=13 + (11 - wkday) % 7) expiry = workdays.workday(expiry, 1 - rolldays, misc.CHN_Holidays) elif exch == 'SHFE': if product in ['hc', 'rb']: while expiry.month not in [1, 5, 10]: expiry = expiry + relativedelta(months=1) else: while expiry.month not in [1, 5, 9]: expiry = expiry + relativedelta(months=1) expiry = expiry.replace(day=14) expiry = workdays.workday(expiry, 1 - rolldays, misc.CHN_Holidays) elif exch == 'SGX' or exch == 'OTC': expiry = expiry + relativedelta(months=1) expiry = workdays.workday(expiry, -1 - rolldays, misc.PLIO_Holidays) return expiry
def generate_scen(base_market, curve_type, curve_name, curve_tenor = 'ALL', shift_size = 0.0001, shift_type = cmq_inst.CurveShiftType.Abs): market_scen = copy.deepcopy(base_market) if curve_type == 'value_date': if shift_size >= 1: market_scen[curve_type] = workdays.workday(market_scen[curve_type], shift_size) curr_date = market_scen['market_date'] prefix_dates = [workdays.workday(curr_date, shift) for shift in range(1, shift_size + 1)] for fwd_idx in market_scen['COMFwd']: crv_info = cmq_crv_defn.COM_Curve_Map[fwd_idx] if (crv_info['exch'] == 'SGX') and ('COMFix' in market_scen) and (crv_info['spotID'] in market_scen['COMFix']): fixes = market_scen['COMFix'][crv_info['spotID']] fwd_quotes = market_scen['COMFwd'][fwd_idx] idy = 0 if len(fixes) > 0: for fix_date in prefix_dates: if fix_date <= fixes[-1][0]: continue while fwd_quotes[idy][1] < fix_date: idy += 1 fixes.append([fix_date, fwd_quotes[idy][2]]) elif (curve_type in market_scen) and (curve_name in market_scen[curve_type]): for idx, value in enumerate(market_scen[curve_type][curve_name]): if curve_tenor == 'ALL' or value[0] == curve_tenor: curve_shift = shift_size if shift_type == cmq_inst.CurveShiftType.Rel: curve_shift *= value[2] market_scen[curve_type][curve_name][idx][2] += curve_shift return market_scen
def get_opt_expiry(fut_inst, cont_mth, exch = ''): cont_yr = int(cont_mth/100) cont_mth = cont_mth % 100 expiry_month = datetime.date(cont_yr, cont_mth, 1) wkday = expiry_month.weekday() if fut_inst[:6].isdigit(): nbweeks = 4 if wkday <= 2: nbweeks = 3 expiry = expiry_month + datetime.timedelta(days = nbweeks*7 - wkday + 1) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif fut_inst[:2]=='IF' or exch == 'CFFEX': nbweeks = 2 if wkday >= 5: nbweeks = 3 expiry = expiry_month + datetime.timedelta(days = nbweeks*7 - wkday + 3) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif fut_inst[:2]=='SR' or fut_inst[:2]=='CF' or exch == 'CZCE': if cont_mth > 1: expiry_month = datetime.date(cont_yr, cont_mth-1, 1) else: expiry_month = datetime.date(cont_yr-1, 12, 1) expiry = workdays.workday(expiry_month, -5, CHN_Holidays) elif fut_inst[:1] == 'm' or exch == 'DCE': if cont_mth > 1: expiry_month = datetime.date(cont_yr, cont_mth-1, 1) + datetime.timedelta(days = -1) else: expiry_month = datetime.date(cont_yr-1, 11, 30) expiry = workdays.workday(expiry_month, 5, CHN_Holidays) return expiry
def prepare_data_env(self, inst, mid_day = True): if self.instruments[inst].ptype == instrument.ProductType.Option: return if self.daily_data_days > 0 or mid_day: self.logger.debug('Updating historical daily data for %s' % self.scur_day.strftime('%Y-%m-%d')) daily_start = workdays.workday(self.scur_day, -self.daily_data_days, CHN_Holidays) daily_end = self.scur_day self.day_data[inst] = mysqlaccess.load_daily_data_to_df('fut_daily', inst, daily_start, daily_end) df = self.day_data[inst] if len(df) > 0: self.instruments[inst].price = df['close'][-1] self.instruments[inst].last_update = 0 self.instruments[inst].prev_close = df['close'][-1] for fobj in self.day_data_func[inst]: ts = fobj.sfunc(df) df[ts.name]= pd.Series(ts, index=df.index) if self.min_data_days > 0 or mid_day: self.logger.debug('Updating historical min data for %s' % self.scur_day.strftime('%Y-%m-%d')) d_start = workdays.workday(self.scur_day, -self.min_data_days, CHN_Holidays) d_end = self.scur_day min_start = int(self.instruments[inst].start_tick_id/1000) min_end = int(self.instruments[inst].last_tick_id/1000)+1 mindata = mysqlaccess.load_min_data_to_df('fut_min', inst, d_start, d_end, minid_start=min_start, minid_end=min_end, database = 'blueshale') mindata = backtest.cleanup_mindata(mindata, self.instruments[inst].product) self.min_data[inst][1] = mindata if len(mindata)>0: min_date = mindata.index[-1].date() if (len(self.day_data[inst].index)==0) or (min_date > self.day_data[inst].index[-1]): ddf = data_handler.conv_ohlc_freq(mindata, 'd') self.cur_day[inst]['open'] = float(ddf.open[-1]) self.cur_day[inst]['close'] = float(ddf.close[-1]) self.cur_day[inst]['high'] = float(ddf.high[-1]) self.cur_day[inst]['low'] = float(ddf.low[-1]) self.cur_day[inst]['volume'] = int(ddf.volume[-1]) self.cur_day[inst]['openInterest'] = int(ddf.openInterest[-1]) self.cur_min[inst]['datetime'] = pd.datetime(*mindata.index[-1].timetuple()[0:-3]) self.cur_min[inst]['open'] = float(mindata.ix[-1,'open']) self.cur_min[inst]['close'] = float(mindata.ix[-1,'close']) self.cur_min[inst]['high'] = float(mindata.ix[-1,'high']) self.cur_min[inst]['low'] = float(mindata.ix[-1,'low']) self.cur_min[inst]['volume'] = self.cur_day[inst]['volume'] self.cur_min[inst]['openInterest'] = self.cur_day[inst]['openInterest'] self.cur_min[inst]['min_id'] = int(mindata.ix[-1,'min_id']) self.instruments[inst].price = float(mindata.ix[-1,'close']) self.instruments[inst].last_update = 0 self.logger.debug('inst=%s tick data loaded for date=%s' % (inst, min_date)) for m in self.min_data_func[inst]: if m != 1: self.min_data[inst][m] = data_handler.conv_ohlc_freq(self.min_data[inst][1], str(m)+'min') df = self.min_data[inst][m] for fobj in self.min_data_func[inst][m]: ts = fobj.sfunc(df) df[ts.name]= pd.Series(ts, index=df.index)
def update_dates_with_earliest_start(self, earliest_start_date, workdays=False): for node in self.get_node_list(): if workdays: node.set_earliest_start_date(workday(earliest_start_date, node.get_earliest_start())) node.set_earliest_finish_date(workday(earliest_start_date, node.get_earliest_finish())) node.set_latest_start_date(workday(earliest_start_date, node.get_latest_start())) node.set_latest_finish_date(workday(earliest_start_date, node.get_latest_finish())) else: node.set_earliest_start_date(earliest_start_date + timedelta(node.get_earliest_start())) node.set_earliest_finish_date(earliest_start_date + timedelta(node.get_earliest_finish())) node.set_latest_start_date(earliest_start_date + timedelta(node.get_latest_start())) node.set_latest_finish_date(earliest_start_date + timedelta(node.get_latest_finish()))
def _set_due_date(self, cr, uid, inv, context): # And, now we have to set the date_due to compy with Sepa stuff. # We do this for every invoice print "date-due now is", inv.date_due if inv.sdd_mandate_id.recurrent_sequence_type == 'recurring': # Recurring mandate, setting date to to invoice data, but only if the invoice date is # 3 days into the future! Otherwise add 3 working days date_invoice = datetime.strptime(inv.date_invoice, DEFAULT_SERVER_DATE_FORMAT) difference = workdays.networkdays(date.today(), date_invoice.date()) print "En het verschil is", difference # If invoicedate == today, difference == 1 # If invoicedate == tomorrow, difference == 2 # networkdays is inclusive, so workdays.networkdays(date.today(), date.today()) == 1 # We need a minimum difference of 3 real days, 4 networkdays if difference < 5: # Difference isn't enough, setting the difference from today date date_due = workdays.workday(datetime.now(), 5).date() else: # Difference is good date_due = date_invoice.date() else: # First mandate, this needs at least six working days, at the SP we decided to pick 14 days, # that's ten working days. date_invoice = datetime.strptime(inv.date_invoice, DEFAULT_SERVER_DATE_FORMAT) difference = workdays.networkdays(date.today(), date_invoice.date()) print "En het verschil is", difference if difference < 7: # Difference isn't enough, setting the difference from today date date_due = workdays.workday(datetime.now(), 7).date() else: # Difference is good date_due = date_invoice.date() print "new date due is", date_due date_due = date_due.strftime(DEFAULT_SERVER_DATE_FORMAT) print "En als string", date_due # Setting new date_due on the invoice: # inv.write(cr, uid, inv.id, {'date_due': date_due}, context=context) inv.write({'date_due': date_due}) # inv.date_due = date_due # And on the move lines. Only update the receivable line, that's the only line that has a due date for move_line in inv.move_id.line_id: if move_line.account_id.type == 'receivable': move_line.date_maturity = date_due # move_line.write(cr, uid, move_line.id, {'date_maturity': date_due}, context=context) move_line.write({'date_maturity': date_due}) return date_due
def contract_expiry(contract_date): UK = sorted(holidays.UK(state=None, years=[2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019]).items()) holiday_list = [] for d, n in UK: holiday_list.append(d) date_exp = workday(contract_date, -2, holiday_list) return date_exp
def consolidate(dat_end): ''' fetch data from bmf site and generates ouput table inputs: ------ - dat: initial date - dat1: final date output: ----- pandas dataframe ''' dat = datetime.strptime(dat_end, "%m/%d/%Y") df = _fetch_bmf(wk.workday(dat,-1).strftime('%m/%d/%Y')) df1 = _fetch_bmf(dat_end) col = ["long", "long(%)", "ch(long)", "short", "short(%)", "ch(short)", "Net Position", "Net Change"] dfinal = pd.DataFrame(index=df.index, columns = col) dfinal["long"] = df1['long'] dfinal["long(%)"] = df1['long(%)'] dfinal["ch(long)"] = df1['long'] - df['long'] dfinal["short"] = df1['short'] dfinal["short(%)"] = df1['short(%)'] dfinal["ch(short)"] = df1['short'] - df['short'] dfinal["Net Change"] = dfinal["ch(long)"] - dfinal["ch(short)"] dfinal["Net Position"] = dfinal["long"] - dfinal["short"] return dfinal[dfinal.index != "Nonresident investors - RES.2689"]
def return_last_day_of_working(year, month) : if month == 3 : if year % 4 == 0 : start_date = datetime.datetime(year, month-1, 29) end_date = datetime.datetime(year, month, 31) else : start_date = datetime.datetime(year, month-1, 28) end_date = datetime.datetime(year, month, 31) elif month is 5 or 7 or 10 or 12 : start_date = datetime.datetime(year, month-1, 30) end_date = datetime.datetime(year, month, 31) elif month == 2 : if year % 4 == 0 : start_date = datetime.datetime(year, month-1, 31) end_date = datetime.datetime(year, month, 29) else : start_date = datetime.datetime(year, month-1, 31) end_date = datetime.datetime(year, month, 28) else : start_date = datetime.datetime(year, month-1, 31) end_date = datetime.datetime(year, month, 30) day_num = workdays.networkdays(start_date, end_date) return workdays.workday(start_date, days=day_num)
def load_local_positions(self, tday): pos_date = tday logfile = self.file_prefix + 'EODPos_' + pos_date.strftime('%y%m%d')+'.csv' if not os.path.isfile(logfile): pos_date = workdays.workday(pos_date, -1, CHN_Holidays) logfile = self.file_prefix + 'EODPos_' + pos_date.strftime('%y%m%d')+'.csv' if not os.path.isfile(logfile): logContent = "no prior position file is found" self.onLog(logContent, level = logging.INFO) return False else: self.eod_flag = True with open(logfile, 'rb') as f: reader = csv.reader(f) for idx, row in enumerate(reader): if row[0] == 'capital': self.account_info['prev_capital'] = float(row[1]) elif row[0] == 'pos': inst = row[1] if inst in self.instruments: if inst not in self.positions: self.positions[inst] = order.Position(self.agent.instruments[inst], self) self.positions[inst].pos_yday.long = int(row[2]) self.positions[inst].pos_yday.short = int(row[3]) return True
def load_local_positions(self, tday): pos_date = tday logfile = self.file_prefix + 'EODPos_' + pos_date.strftime( '%y%m%d') + '.csv' if not os.path.isfile(logfile): pos_date = workdays.workday(pos_date, -1, CHN_Holidays) logfile = self.file_prefix + 'EODPos_' + pos_date.strftime( '%y%m%d') + '.csv' if not os.path.isfile(logfile): logContent = "no prior position file is found" self.onLog(logContent, level=logging.INFO) return False else: self.eod_flag = True with open(logfile, 'rb') as f: reader = csv.reader(f) for idx, row in enumerate(reader): if row[0] == 'capital': self.account_info['prev_capital'] = float(row[1]) elif row[0] == 'pos': inst = row[1] if inst in self.instruments: if inst not in self.positions: (pos_cls, pos_args) = self.get_pos_class( self.agent.instruments[inst]) self.positions[inst] = pos_cls( self.agent.instruments[inst], self, **pos_args) self.positions[inst].pos_yday.long = int(row[2]) self.positions[inst].pos_yday.short = int(row[3]) return True
def generate_scen(base_market, curve_type, curve_name, curve_tenor = 'ALL', shift_size = 0.0001, shift_type = cmq_inst.CurveShiftType.Abs): market_scen = copy.deepcopy(base_market) if curve_type == 'value_date': if shift_size >= 1: market_scen[curve_type] = workdays.workday(market_scen[curve_type], shift_size) # curr_date = market_scen[curve_type] # print shift_size, curr_date, prefix_dates # market_scen[curve_type] = prefix_dates[-1] # prefix_dates = prefix_dates[:-1] # for fwd_idx in market_scen['COMFwd']: # crv_info = cmq_crv_defn.COM_Curve_Map[fwd_idx] # if (crv_info['exch'] == 'SGX') and (crv_info['spotID'] in market_scen['COMFix']): # fixes = market_scen['COMFix'][crv_info['spotID']] # fwd_quotes = market_scen['COMFwd'][fwd_idx] # if prefix_dates[0] == fixes[-1][0]: # prefix_dates = prefix_dates[1:] # idy = 0 # for fix_date in prefix_dates: # while fwd_quotes[idy][1] < fix_date: # idy += 1 # fixes.append([fix_date, fwd_quotes[idy][2]]) # print [fix_date, fwd_quotes[idy][2]] elif (curve_type in market_scen) and (curve_name in market_scen[curve_type]): for idx, value in enumerate(market_scen[curve_type][curve_name]): if curve_tenor == 'ALL' or value[0] == curve_tenor: curve_shift = shift_size if shift_type == cmq_inst.CurveShiftType.Rel: curve_shift *= value[2] market_scen[curve_type][curve_name][idx][2] += curve_shift return market_scen
def get_earliest_start_date(self, latest_finish_date, workdays=False): if workdays: return (workday(latest_finish_date, (self.get_finish_node().get_latest_finish() * -1))) else: return latest_finish_date - timedelta( self.get_finish_node().get_latest_finish())
def get_datetime_by_days_since_start(self, class_days, add_holidays=False): excluded_days = self.excluded_days() # The next day of class excluding holidays/weekends date = workday(self.first_day, class_days, excluded_days) # Might want to include the holidays (if class day is Friday, then work done on weekend/holidays won't show up # till Monday. For chart, want to include those days if (add_holidays): next_date = workday(self.first_day, class_days + 1, excluded_days) num_holidays_to_add = next_date - date - timedelta(days=1) # If more than one day difference date += num_holidays_to_add # convert from date to datetime dt = datetime.combine(date, datetime.max.time()) # make timezone aware return timezone.make_aware(dt, timezone.get_default_timezone())
def load_market_data(mkt_deps, value_date = datetime.date.today(), region = 'EOD', is_eod = True): if region == 'EOD': mkt_db = dbaccess.dbconfig if is_eod: market_date = value_date else: market_date = workdays.workday(value_date, -1) market_key = market_date.strftime('%Y-%m-%d') else: mkt_db = dbaccess.mktsnap_dbconfig market_date = value_date market_key = market_date.strftime('%Y-%m-%d') + '_' + region print market_key, market_date, region, is_eod market_data = {'value_date': value_date, 'market_date': market_date, 'market_key': market_key, 'market_db': mkt_db,} for field in mkt_deps: if field == 'COMFwd': mkt_loader = comfwd_db_loader elif field == 'COMVolATM': for f in cmq_crv_defn.COMVOL_fields[1:]: market_data[f] = {} mkt_loader = comvol_db_loader elif field == 'COMFix': mkt_loader = comfix_db_loader elif field == 'FXFwd': mkt_loader = fxfwd_db_loader elif field == 'FXVolATM': for f in cmq_crv_defn.FXVOL_fields[1:]: market_data[f] = {} mkt_loader = fxvol_db_loader elif field == 'FXFix': mkt_loader = fxfix_db_loader elif field == 'IRCurve': mkt_loader = ircurve_db_loader elif field[:5] == 'COMDV': mkt_loader = comdv_db_loader else: continue market_data[field] = {} for crv_idx in mkt_deps[field]: if field == 'IRCurve': if crv_idx == 'cny_disc': flat_rate = 0.045 market_data[field][crv_idx] = flat_ir_curve(market_date, flat_rate) continue if field == 'COMVolATM': output = mkt_loader(market_data, crv_idx, mkt_deps[field][crv_idx]) for vol_field in cmq_crv_defn.COMVOL_fields: if len(output) == 0: market_data[vol_field][crv_idx] = {} else: market_data[vol_field][crv_idx] = output[vol_field] elif field[:5] == 'COMDV': market_data[field][crv_idx] = mkt_loader(market_data, crv_idx, mkt_deps[field][crv_idx], field[3:]) else: market_data[field][crv_idx] = mkt_loader(market_data, crv_idx, mkt_deps[field][crv_idx]) process_BOM(market_data, mkt_deps) return market_data
def test_saturday(): assert workday(date(2014, 2, 22), -1) == date(2014, 2, 21) assert workday(date(2014, 2, 22), 0) == date(2014, 2, 22) assert workday(date(2014, 2, 22), 1) == date(2014, 2, 24) assert workday(date(2014, 2, 22), 2) == date(2014, 2, 25) assert workday(date(2014, 2, 22), 3) == date(2014, 2, 26) assert workday(date(2014, 2, 22), 4) == date(2014, 2, 27) assert workday(date(2014, 2, 22), 5) == date(2014, 2, 28) assert workday(date(2014, 2, 22), 6) == date(2014, 3, 3)
def test_monday(): assert workday(date(2014, 2, 17), -1) == date(2014, 2, 14) assert workday(date(2014, 2, 17), 0) == date(2014, 2, 17) assert workday(date(2014, 2, 17), 1) == date(2014, 2, 18) assert workday(date(2014, 2, 17), 2) == date(2014, 2, 19) assert workday(date(2014, 2, 17), 3) == date(2014, 2, 20) assert workday(date(2014, 2, 17), 4) == date(2014, 2, 21) assert workday(date(2014, 2, 17), 5) == date(2014, 2, 24) assert workday(date(2014, 2, 17), 6) == date(2014, 2, 25)
def limit(self, date=None): if not date: date = datetime.today() # cut microseconds date = date.replace(microsecond=0) if isinstance(self.days, Naturaldays): return date + timedelta(self.days) else: return workday(date, self.days)
def predictToday(self, day=date.today()): today = wd.workday(day, 1) data = sm.dayToDayDiffPercent(self.ticker, which='Open', numDays=self.inputSize, endDay=today) if len(data) < self.inputSize: return -1 act = self.predict([data]) return act
def cont_date_expiry(cont_date, exch): hols = CHN_Holidays yr = cont_date.year mth = cont_date.month if exch == 'DCE' or exch == 'CZCE': expiry = workdays.workday(cont_date - datetime.timedelta(days=1), 10, hols) elif exch == 'CFFEX': wkday = cont_date.weekday() expiry = cont_date + datetime.timedelta(days=13 + (11 - wkday) % 7) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif exch == 'SHFE': expiry = datetime.date(yr, mth, 14) expiry = workdays.workday(expiry, 1, CHN_Holidays) elif exch in ['SGX', 'LME', 'NYMEX', 'OTC']: expiry = workdays.workday(cont_date + relativedelta(months=1), -1, PLIO_Holidays) else: expiry = 0 return expiry
def singleGuessManyDays(): ticker = 'tsla' endDate = date.today() # startDate = date(2020,8,5) startDate = date(2020, 5, 1) while startDate <= endDate: print(startDate) getTodayPredictions(ticker, startDate) startDate = wd.workday(startDate, 1)
def calc_day(self): self.d_dict = {} self.load_holidays(HOLIDAYS_FNAME) boundary1 = workdays.workday(self.today, days=-D_LIMIT, holidays=self.holidays) boundary2 = self.today exterior1 = workdays.workday(boundary1, days=-1, holidays=self.holidays) exterior2 = workdays.workday(self.today, days=1, holidays=self.holidays) self.d_dict[BOUNDARY1] = boundary1.strftime("%Y/%m/%d") self.d_dict[BOUNDARY2] = boundary2.strftime("%Y/%m/%d") self.d_dict[EXTERIOR1] = exterior1.strftime("%Y/%m/%d") self.d_dict[EXTERIOR2] = exterior2.strftime("%Y/%m/%d")
def working_days(user, dt, till=False): start = dt_to_date(dt) duration = working_days_total(user, dt, till=till) holidays = calendar_holidays(user, dt.year) start -= relativedelta(days=1)#include starting day days = [] for k in range(duration): start = workdays.workday(start, 1, holidays=holidays) days.append(start) return days
def vacation_days(user, start, end): """ Calculate used working days for the duration of vacation """ start, end = dt_to_date(start), dt_to_date(end) duration = vacation_duration(user, start, end) years = list(set([start.year, end.year])) holidays = calendar_holidays(user, years) days = [] start -= relativedelta(days=1) #include starting day for k in range(duration): start = workdays.workday(start, 1, holidays=holidays) days.append(start) return days
def vacation_days(user, start, end): """ Calculate used working days for the duration of vacation """ start, end = dt_to_date(start), dt_to_date(end) duration = vacation_duration(user, start, end) years = list(set([start.year, end.year])) holidays = calendar_holidays(user, years) days = [] start -= relativedelta(days=1)#include starting day for k in range(duration): start = workdays.workday(start, 1, holidays=holidays) days.append(start) return days
def split_logic(user, start, end, duration, vacations): calls = [] slots = get_slots(duration, vacations) years = list(set([start.year, end.year])) holidays = calendar_holidays(user, years) end_holiday = copy.deepcopy(end) for year,days in slots.iteritems(): days = days-1#to include starting day; eg. days=1 skips to next day, days=0 includes starting day end = workdays.workday(start, days, holidays=holidays) c = { 'extra_params': { 'Task.VacationYear': year,}, 'start': copy.deepcopy(start), 'end': copy.deepcopy(end), } start = workdays.workday(start, days+1, holidays=holidays) calls.append(c) # There can be more slots than the length of the vacation; short circuit if end>=end_holiday: break return calls
def generate_scen(base_market, curve_type, curve_name, curve_tenor='ALL', shift_size=0.0001, shift_type=cmq_inst.CurveShiftType.Abs): market_scen = copy.deepcopy(base_market) if curve_type == 'value_date': if shift_size >= 1: market_scen[curve_type] = workdays.workday(market_scen[curve_type], shift_size) curr_date = market_scen['market_date'] prefix_dates = [ workdays.workday(curr_date, shift) for shift in range(shift_size + 1) ] prefix_dates = prefix_dates[:-1] for fwd_idx in market_scen['COMFwd']: crv_info = cmq_crv_defn.COM_Curve_Map[fwd_idx] if (crv_info['exch'] == 'SGX') and ('COMFix' in market_scen) and ( crv_info['spotID'] in market_scen['COMFix']): fixes = market_scen['COMFix'][crv_info['spotID']] fwd_quotes = market_scen['COMFwd'][fwd_idx] if prefix_dates[0] == fixes[-1][0]: prefix_dates = prefix_dates[1:] idy = 0 for fix_date in prefix_dates: while fwd_quotes[idy][1] < fix_date: idy += 1 fixes.append([fix_date, fwd_quotes[idy][2]]) elif (curve_type in market_scen) and (curve_name in market_scen[curve_type]): for idx, value in enumerate(market_scen[curve_type][curve_name]): if curve_tenor == 'ALL' or value[0] == curve_tenor: curve_shift = shift_size if shift_type == cmq_inst.CurveShiftType.Rel: curve_shift *= value[2] market_scen[curve_type][curve_name][idx][2] += curve_shift return market_scen
def split_logic(user, start, end, duration, vacations): calls = [] slots = get_slots(duration, vacations) years = list(set([start.year, end.year])) holidays = calendar_holidays(user, years) end_holiday = copy.deepcopy(end) for year, days in slots.iteritems(): days = days - 1 #to include starting day; eg. days=1 skips to next day, days=0 includes starting day end = workdays.workday(start, days, holidays=holidays) c = { 'extra_params': { 'Task.VacationYear': year, }, 'start': copy.deepcopy(start), 'end': copy.deepcopy(end), } start = workdays.workday(start, days + 1, holidays=holidays) calls.append(c) # There can be more slots than the length of the vacation; short circuit if end >= end_holiday: break return calls
def checkIfDateHasPrice(ticker, date): stock = yf.Ticker(ticker) numDays = 2 his = stock.history(start=date, end=wd.workday(date, numDays), period='1d', interval='1d') try: dates = his.Open.index.date except: return False return numDays == len(dates)
def __init__(self,items,start): "docstring" # for i in items: # print("'%s'"%i.strip()) self.Kids = [] self.Id = items[0].strip() self.Description = items[1].strip() self.Parent = items[2].strip() self.Duration = self.calcDuration(items[3].strip()) self.Before = items[4].strip() self.After = items[5].strip() self.Start = self.getTime(items[6].strip(),start) self.End = self.getTime(items[7].strip(),arrow.get(workdays.workday(self.Start,self.Duration-1))) # self.Start = items[6].strip() self.Type = items[8].strip() # if len(self.Start) == 0: # self.Start = start # else: # self.Start = arrow.get(self.Start,"D/M/YYYY") self.End = arrow.get(workdays.workday(self.Start,self.Duration-1)) self.gItem = None self.Depends = []
def update_period_graph(n_intervals, value, n_clicks): df = pd.read_csv(filename, index_col='Timestamp', parse_dates=True, infer_datetime_format=True) if value is None: with warnings.catch_warnings(): warnings.simplefilter("ignore") start_day = workdays.workday(datetime.today(), -1) start_day_time = datetime(start_day.year, start_day.month, start_day.day, 17, 0) df_start = df.index.values[df.index.get_loc(start_day_time, method='nearest')] else: start = datetime.strptime(value.split('+')[0], '%Y-%m-%d %H:%M:%S') df_start = datetime(start.year, start.month, start.day, start.hour, start.minute, start.second) df_end = df.index.max() filtered_df = df.loc[df_start:df_end].copy() target_bar = go.Bar(x=filtered_df.index, y=filtered_df['CLEDelta'], name='CLE', marker=dict(color='#FFD700')) model_bar = go.Bar(x=filtered_df.index, y=filtered_df['ModelDelta'], name='Model', marker=dict(color='#9EA0A1')) data = [target_bar, model_bar] layout = go.Layout(title='Period by Period Change', titlefont={ 'size': 16, 'family': 'Balto' }, xaxis={ 'type': 'category', 'dtick': 3, 'tickangle': 30, 'showline': True, 'ticks': 'inside', 'tickfont': { 'family': 'PT Sans Narrpw', 'size': 12 } }, yaxis={'showline': True}) fig = go.Figure(data=data, layout=layout) return fig
def updated_running_daily_total(n_intervals, value, nclicks): data = pd.read_csv(filename, index_col='Timestamp', parse_dates=True, infer_datetime_format=True) if value is None: with warnings.catch_warnings(): warnings.simplefilter("ignore") start_day = workdays.workday(datetime.today(), -1) start_day_time = datetime( start_day.year, start_day.month, start_day.day, 17, 0, ) df_start = data.index.values[data.index.get_loc(start_day_time, method='nearest')] else: start = datetime.strptime(value.split('+')[0], '%Y-%m-%d %H:%M:%S') df_start = datetime(start.year, start.month, start.day, start.hour, start.minute, start.second) df_end = data.index.max() filtered_df = data.loc[df_start:df_end].copy() filtered_df['ModelRunning'] = round( filtered_df['y_pred'] - filtered_df.loc[df_start]['y_pred'], 2) filtered_df['CrudeRunning'] = round( filtered_df['CLE'] - filtered_df.loc[df_start]['CLE'], 2) filtered_df['RunningDailyTotal'] = filtered_df[ 'ModelRunning'] - filtered_df['CrudeRunning'] latest_value = round(filtered_df.loc[df_end]['RunningDailyTotal'], 2) data = [ go.Scatter(x=filtered_df.index, y=filtered_df['RunningDailyTotal'], mode='lines', marker={'color': 'lightblue'}) ] layout = go.Layout(title='Running Daily Total:{} '.format(latest_value), titlefont={ 'size': 16, 'family': 'Balto' }, xaxis={ 'showspikes': True, 'spikemode': 'toaxis' }) return go.Figure(data=data, layout=layout)
def day_shift(d, roll_rule): if 'b' in roll_rule: days = int(roll_rule[:-1]) shft_day = workdays.workday(d,days) elif 'm' in roll_rule: mths = int(roll_rule[:-1]) shft_day = d + dateutil.relativedelta.relativedelta(months = mths) elif 'd' in roll_rule: days = int(roll_rule[:-1]) shft_day = d + datetime.timedelta(days = days) elif 'y' in roll_rule: years = int(roll_rule[:-1]) shft_day = d + dateutil.relativedelta.relativedelta(years = years) elif 'w' in roll_rule: weeks = int(roll_rule[:-1]) shft_day = d + dateutil.relativedelta.relativedelta(weeks = weeks) return shft_day
def get_local_positions(self, tday): pos_date = tday logfile = self.folder + 'EOD_Pos_' + pos_date.strftime('%y%m%d')+'.csv' if not os.path.isfile(logfile): pos_date = workdays.workday(pos_date, -1, CHN_Holidays) logfile = self.folder + 'EOD_Pos_' + pos_date.strftime('%y%m%d')+'.csv' if not os.path.isfile(logfile): print "no prior position file is found" return False else: self.eod_flag = True with open(logfile, 'rb') as f: reader = csv.reader(f) for idx, row in enumerate(reader): if row[0] == 'capital': self.prev_capital = float(row[1]) elif row[0] == 'pos': inst = row[1] if inst in self.positions: self.positions[inst].pos_yday.long = int(row[2]) self.positions[inst].pos_yday.short = int(row[3]) return True
def day_shift(d, roll_rule, hols = []): if 'b' in roll_rule: days = int(roll_rule[:-1]) shft_day = workdays.workday(d, days, hols) elif 'm' in roll_rule: mths = int(roll_rule[:-1]) shft_day = d + relativedelta(months=mths) elif 'd' in roll_rule: days = int(roll_rule[:-1]) shft_day = d + datetime.timedelta(days=days) elif 'y' in roll_rule: years = int(roll_rule[:-1]) shft_day = d + relativedelta(years=years) elif 'w' in roll_rule: weeks = int(roll_rule[:-1]) shft_day = d + relativedelta(weeks=weeks) elif 'MEND' in roll_rule: mths = int(roll_rule[:-4]) + 1 shft_day = d + relativedelta(months=mths) shft_day = shft_day.replace(day=1) shft_day = shft_day - datetime.timedelta(days=1) return shft_day
def test_workday(): holidays = [date(2015, 9, 7)] weekends = (0,5,6) # test 0 days (so that it act the same as Excel) assert workday(date(2015, 8, 23)) == date(2015, 8, 23) assert workday(date(2015, 8, 23), weekends=weekends) == date(2015, 8, 23) # test with non-zero day on weekend assert workday(date(2015, 8, 23), days=1, weekends=weekends) == \ date(2015, 8, 25) # test with holiday assert workday(date(2015, 9, 4), days=1, holidays=holidays) == \ date(2015, 9, 8) # test non-zero workday solo, with holidays, with weekends, and both assert workday(date(2015, 8, 24), 10) == date(2015, 9, 7) assert workday(date(2015, 8, 25), 10, weekends=weekends) == \ date(2015, 9, 10) assert workday(date(2015, 8, 24), 10, holidays=holidays) == \ date(2015, 9, 8) assert workday(date(2015, 8, 25), 10, holidays=holidays, weekends=weekends) == date(2015, 9, 10)
def get_due_date(self): #get statute with least number of days till maturity #TODO: scheduled send date should probably be required and checked against the first sent message if self.due_date: return self.due_date if self.government is None: return None statutes = self.government.get_statutes holidays = self.government.get_holiday_dates if statutes is not None and self.scheduled_send_date is not None: if len(statutes) > 0: soonest_statute = statutes[0] else: return None days_till_due = soonest_statute.get_days_till_due if days_till_due: sent = self.scheduled_send_date due_when = workdays.workday(sent, days_till_due, holidays) self.due_date = due_when else: self.due_date = None self.save() return self.due_date return None
def get_date(self, fraction_complete): days = self.num_days() days_to_fraction = int(days * fraction_complete) excluded_days = self.excluded_days() return workday(self.first_day, days_to_fraction, excluded_days)
def process(responce)->list: #some variables here... bigram_measures = nltk.collocations.BigramAssocMeasures() line = '' date = '' symbol = '' count = 1 big_feature_list = [] current_tokens = [] label = [] features = [] pattern = '[a-z][a-z][a-z]*' if responce in ['y',"Y"]: #generate holiday list e = open('holiday_list.txt','r',encoding="utf-8") holidays = generate_holidays_list(e) e.close() f = open('combined_data.txt', 'r',encoding="utf-8") for line in f: #pull out the date if (line[0:7] == 'DATE = '): print("Processing article {}".format(count)) count += 1 date = line[7:] date_object = datetime(int(date[0:4]), int(date[4:6]), int(date[6:8])) #pull out the symbol elif (line[0:9] == 'SYMBOL = '): symbol = line[9:].strip('\n') #use the information to populate the feature vector... (most of the work happens here) elif (line[0:12] == 'END OF ENTRY'): if (calendar.weekday(date_object.year,date_object.month,date_object.day) in [5,6]): date_object = workday(date_object,-12,holidays)#include NYSE holidays end_date = workday(date_object,11,holidays) sleep(.4)#calls to api limited to one per 2.5ms data = open_url(symbol, str(date_object.month -1), str(date_object.day), str(date_object.year),str(end_date.year), str(end_date.month-1), str(end_date.day)) if(data[0] <= 0): label.append(-1) else: label.append(1) date = '' symbol = '' finder = BigramCollocationFinder.from_words(current_tokens) finder.apply_freq_filter(5) scored = sorted(finder.nbest(bigram_measures.raw_freq,100)) for pair in scored: current_tokens += [str(pair[0])+str(pair[1])] features.append(current_tokens) current_tokens = [] else: line = line.lower() tokens = regexp_tokenize(line, pattern) current_tokens +=[w for w in tokens if not w in stopwords.words('english')] g = open('label.txt','w', encoding="utf-8") for classifier in label: g.write(str(classifier)+'\n') for feat in features: big_feature_list.append(' '.join(word for word in feat)) h = open('pre_prossed_data.txt','w') for feat in big_feature_list: h.write(feat+'\n') f.close() else: g = open('label.txt','r+', encoding="utf-8") for line in g: label.append(int(line.strip('\n'))) h = open('pre_prossed_data.txt','r+') for line in h: big_feature_list.append(line.strip('\n')) #close documnets: h.close() g.close() return big_feature_list,label
#pull out the date # print(line) if (line[0:7] == 'DATE = '): print("Processing article {}".format(count)) count += 1 date = line[7:] # print(date) date_object = datetime(int(date[0:4]), int(date[4:6]), int(date[6:8])) #pull out the symbol elif (line[0:9] == 'SYMBOL = '): symbol = line[9:].strip('\n') # print(symbol) #use the information to populate the feature vector... (most of the work happens here) elif (line[0:12] == 'END OF ENTRY'): if (calendar.weekday(date_object.year,date_object.month,date_object.day) in [5,6]): date_object = workday(date_object,-12,holidays)#include NYSE holidays end_date = workday(date_object,11,holidays) # print(date_object) # print(end_date) if(responce in ['y',"Y"]): sleep(.45) data = open_url(symbol, str(date_object.month -1), str(date_object.day), str(date_object.year),str(end_date.year), str(end_date.month-1), str(end_date.day)) # if(data[0] <= 0): # label.append(-1) # else: # label.append(1) label.append(data[0]) opening_prices.append(data[1][1]) date = '' symbol = '' finder = BigramCollocationFinder.from_words(current_tokens)
#pull out the date #print(line) if (line[0:7] == 'DATE = '): print("Processing article {}".format(count)) count += 1 date = line[7:] # print(date) date_object = datetime(int(date[0:4]), int(date[4:6]), int(date[6:8])) #pull out the symbol elif (line[0:9] == 'SYMBOL = '): symbol = line[9:].strip('\n') # print(symbol) #use the information to populate the feature vector... (most of the work happens here) elif (line[0:12] == 'END OF ENTRY'): if (calendar.weekday(date_object.year,date_object.month,date_object.day) in [5,6]): date_object = workday(date_object,back_x_days,holidays)#include NYSE holidays end_date = workday(date_object,forward_x_days,holidays) #print(date_object) #print(end_date) if(responce in ['y',"Y"]): #sleep(.15) #data = open_url(symbol, str(date_object.month), str(date_object.day), str(date_object.year),str(end_date.year), str(end_date.month), str(end_date.day)) data = open_url(symbol, "%02d" % (date_object.month),"%02d" % (date_object.day), str(date_object.year),str(end_date.year), "%02d" % (end_date.month),"%02d" % (end_date.day)) if(data[0] <= 0): label.append(-1) else: label.append(1) opening_prices.append(data[1][1])
def prepare_data_env(self, inst, mid_day = True): if self.instruments[inst].ptype == instrument.ProductType.Option: return self.db_conn = dbaccess.connect(**dbaccess.dbconfig) if self.daily_data_days > 0 or mid_day: #self.logger.debug('Updating historical daily data for %s' % self.scur_day.strftime('%Y-%m-%d')) daily_start = workdays.workday(self.scur_day, -self.daily_data_days, CHN_Holidays) daily_end = self.scur_day ddf = dbaccess.load_daily_data_to_df(self.db_conn, 'fut_daily', inst, daily_start, daily_end, index_col = None) if len(ddf) > 0: self.instruments[inst].price = self.instruments[inst].mid_price = ddf['close'].iloc[-1] self.instruments[inst].last_update = 0 self.instruments[inst].prev_close = ddf['close'].iloc[-1] for fobj in self.day_data_func[inst]: ts = fobj.sfunc(ddf) if type(ts).__name__ == 'Series': if ts.name in ddf.columns: self.logger.warning('TimeSeries name %s is already in the columns for inst = %s' % (ts.name, inst)) ddf[ts.name]= ts elif type(ts).__name__ == 'DataFrame': for col_name in ts.columns: if col_name in ddf.columns: self.logger.warning('TimeSeries name %s is already in the columns for inst = %s' % (col_name, inst)) ddf[col_name] = ts[col_name] self.day_data[inst] = data_handler.DynamicRecArray(dataframe = ddf) if self.min_data_days > 0 or mid_day: #self.logger.debug('Updating historical min data for %s' % self.scur_day.strftime('%Y-%m-%d')) d_start = workdays.workday(self.scur_day, -self.min_data_days, CHN_Holidays) d_end = self.scur_day min_start = int(self.instruments[inst].start_tick_id/1000) min_end = int(self.instruments[inst].last_tick_id/1000)+1 mdf = dbaccess.load_min_data_to_df(self.db_conn, 'fut_min', inst, d_start, d_end, minid_start=min_start, minid_end=min_end, index_col = None) mdf = cleanup_mindata(mdf, self.instruments[inst].product, index_col = None) mdf['bar_id'] = self.conv_bar_id(mdf['min_id']) if len(mdf)>0: min_date = mdf['date'].iloc[-1] if (len(self.day_data[inst])==0) or (min_date > self.day_data[inst].data['date'][-1]): ddf = data_handler.conv_ohlc_freq(mdf, 'd', index_col = None) self.cur_day[inst]['open'] = float(ddf.open[-1]) self.cur_day[inst]['close'] = float(ddf.close[-1]) self.cur_day[inst]['high'] = float(ddf.high[-1]) self.cur_day[inst]['low'] = float(ddf.low[-1]) self.cur_day[inst]['volume'] = int(ddf.volume[-1]) self.cur_day[inst]['openInterest'] = int(ddf.openInterest[-1]) self.cur_min[inst]['datetime'] = pd.datetime(*mdf['datetime'].iloc[-1].timetuple()[0:-3]) self.cur_min[inst]['date'] = mdf['date'].iloc[-1] self.cur_min[inst]['open'] = float(mdf['open'].iloc[-1]) self.cur_min[inst]['close'] = float(mdf['close'].iloc[-1]) self.cur_min[inst]['high'] = float(mdf['high'].iloc[-1]) self.cur_min[inst]['low'] = float(mdf['low'].iloc[-1]) self.cur_min[inst]['volume'] = self.cur_day[inst]['volume'] self.cur_min[inst]['openInterest'] = self.cur_day[inst]['openInterest'] self.cur_min[inst]['min_id'] = int(mdf['min_id'].iloc[-1]) self.cur_min[inst]['bar_id'] = self.conv_bar_id(self.cur_min[inst]['min_id']) self.instruments[inst].price = self.instruments[inst].mid_price = float(mdf['close'].iloc[-1]) self.instruments[inst].last_update = 0 #self.logger.debug('inst=%s tick data loaded for date=%s' % (inst, min_date)) if 1 not in self.min_data_func[inst]: self.min_data[inst][1] = data_handler.DynamicRecArray(dataframe = mdf) for m in sorted(self.min_data_func[inst]): if m != 1: mdf_m = data_handler.conv_ohlc_freq(mdf, str(m)+'min', index_col = None, bar_func = self.conv_bar_id, extra_cols = ['bar_id']) else: mdf_m = mdf for fobj in self.min_data_func[inst][m]: ts = fobj.sfunc(mdf_m) if type(ts).__name__ == 'Series': if ts.name in mdf_m.columns: self.logger.warning('TimeSeries name %s is already in the columns for inst = %s' % (ts.name, inst)) mdf_m[ts.name]= ts elif type(ts).__name__ == 'DataFrame': for col_name in ts.columns: if col_name in mdf_m.columns: self.logger.warning('TimeSeries name %s is already in the columns for inst = %s' % (col_name, inst)) mdf_m[col_name] = ts[col_name] self.min_data[inst][m] = data_handler.DynamicRecArray(dataframe = mdf_m) #print inst, self.min_data[inst][m].data['date'][-1] < self.cur_min[inst]['date'] self.db_conn.close()
def due_date(self): max = self.sla.days_budget + self.sla.days_start_attendance + self.sla.days_delivery break_days = self._get_interruption_days() max += break_days due_date = workday(self.start_date, max) return due_date