def on_trading_open(self, timestamp): self.logger.info('on_trading_open:{}'.format(timestamp)) if is_same_date(timestamp, self.start_timestamp): return # get the account for trading at the date accounts = get_account(session=self.session, trader_name=self.trader_name, return_type='domain', end_timestamp=to_time_str(timestamp), limit=1, order=SimAccount.timestamp.desc()) if accounts: account = accounts[0] else: return positions = [] # FIXME:dump all directly for position_domain in account.positions: position_dict = position_schema.dump(position_domain).data self.logger.info('current position:{}'.format(position_dict)) del position_dict['sim_account'] positions.append(position_dict) self.latest_account = sim_account_schema.dump(account).data self.latest_account['positions'] = positions self.logger.info('on_trading_open:{},latest_account:{}'.format( timestamp, self.latest_account))
def persist(self, entity, domain_list): if domain_list: if domain_list[0].timestamp >= domain_list[-1].timestamp: first_timestamp = domain_list[-1].timestamp last_timestamp = domain_list[0].timestamp else: first_timestamp = domain_list[0].timestamp last_timestamp = domain_list[-1].timestamp self.logger.info( "persist {} for entity_id:{},time interval:[{},{}]".format( self.data_schema, entity.id, first_timestamp, last_timestamp)) current_timestamp = now_pd_timestamp() saving_datas = domain_list # FIXME:remove this logic # FIXME:should remove unfinished data when recording,always set it to False now if is_same_date(current_timestamp, last_timestamp) and self.contain_unfinished_data: if current_timestamp.hour >= self.close_hour and current_timestamp.minute >= self.close_minute + 2: # after the closing time of the day,we think the last data is finished saving_datas = domain_list else: # ignore unfinished kdata saving_datas = domain_list[:-1] self.logger.info( "ignore kdata for entity_id:{},level:{},timestamp:{},current_timestamp" .format(entity.id, self.level, last_timestamp, current_timestamp)) self.session.add_all(saving_datas) self.session.commit()
def is_in_trading(entity_type, exchange, timestamp): current = now_pd_timestamp() timestamp = to_pd_timestamp(timestamp) if is_same_date(current, timestamp): for start, end in get_trading_intervals(entity_type=entity_type, exchange=exchange): if current > date_and_time(current, start) and current < date_and_time(current, end): return True return False
def record(self, entity, start, end, size, timestamps): the_quarters = get_year_quarters(start) if not is_same_date(entity.timestamp, start) and len(the_quarters) > 1: the_quarters = the_quarters[1:] param = { 'security_item': entity, 'quarters': the_quarters, 'level': self.level.value } security_item = param['security_item'] quarters = param['quarters'] level = param['level'] result_df = pd.DataFrame() for year, quarter in quarters: query_url = self.url.format(security_item.code, year, quarter) response = requests.get(query_url) response.encoding = 'gbk' try: dfs = pd.read_html(response.text) except ValueError as error: self.logger.error( f'skip ({year}-{quarter:02d}){security_item.code}{security_item.name}({error})' ) time.sleep(10.0) continue if len(dfs) < 5: time.sleep(10.0) continue df = dfs[4].copy() df = df.iloc[1:] df.columns = [ 'timestamp', 'open', 'high', 'close', 'low', 'volume', 'turnover' ] df['name'] = security_item.name df['level'] = level df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'sina' result_df = pd.concat([result_df, df]) self.logger.info( f'({security_item.code}{security_item.name})({year}-{quarter:02d})' ) time.sleep(10.0) result_df = result_df.sort_values(by='timestamp') return result_df.to_dict(orient='records')
def evaluate_start_end_size_timestamps(self, entity): start, end, size, timestamps = super( ).evaluate_start_end_size_timestamps(entity) if start: trade_day = StockTradeDay.query_data( limit=1, order=StockTradeDay.timestamp.desc(), return_type='domain') if trade_day: if is_same_date(trade_day[0].timestamp, start): size = 0 return start, end, size, timestamps
def evaluate_start_end_size_timestamps(self, entity): # get latest record latest_saved_record = self.get_latest_saved_record(entity=entity) if latest_saved_record: latest_timestamp = latest_saved_record[0].timestamp else: latest_timestamp = entity.timestamp if not latest_timestamp: return latest_timestamp, None, self.default_size, None current_time = pd.Timestamp.now() time_delta = current_time - latest_timestamp if self.level == IntervalLevel.LEVEL_1DAY: if is_same_date(current_time, latest_timestamp): return latest_timestamp, None, 0, None return latest_timestamp, None, time_delta.days + 1, None # to today,check closing time # 0,0 means never stop,e.g,coin if (self.close_hour != 0 and self.close_minute != 0) and time_delta.days == 0: if latest_timestamp.hour == self.close_hour and latest_timestamp.minute == self.close_minute: return latest_timestamp, None, 0, None if self.kdata_use_begin_time: touching_timestamp = latest_timestamp + pd.Timedelta( seconds=self.level.to_second()) else: touching_timestamp = latest_timestamp waiting_seconds, size = self.level.count_from_timestamp( touching_timestamp, one_day_trading_minutes=self.one_day_trading_minutes) if not self.one_shot and waiting_seconds and (waiting_seconds > 30): t = waiting_seconds / 2 self.logger.info( 'level:{},recorded_time:{},touching_timestamp:{},current_time:{},next_ok_time:{},just sleep:{} seconds' .format( self.level.value, latest_timestamp, touching_timestamp, current_time, touching_timestamp + pd.Timedelta(seconds=self.level.to_second()), t)) time.sleep(t) return latest_timestamp, None, size, None
def record(self, entity, start, end, size, timestamps): the_quarters = get_year_quarters(start) # treat has recorded the season if contains some date if not is_same_date(entity.timestamp, start) and len(the_quarters) > 1: the_quarters = the_quarters[1:] for year, quarter in the_quarters: kdatas = [] for fuquan in ['bfq', 'hfq']: the_url = self.get_kdata_url(entity.code, year, quarter, fuquan) resp = requests.get(the_url) trs = Selector(text=resp.text).xpath( '//*[@id="FundHoldSharesTable"]/tr[position()>1 and position()<=last()]' ).extract() for idx, tr in enumerate(trs): tds = Selector(text=tr).xpath('//td//text()').extract() tds = [x.strip() for x in tds if x.strip()] open = tds[1] high = tds[2] close = tds[3] low = tds[4] volume = tds[5] turnover = tds[6] if fuquan == 'hfq': factor = tds[7] the_timestamp = to_pd_timestamp(tds[0]) the_id = generate_kdata_id(entity_id=entity.id, timestamp=the_timestamp, level=self.level) if fuquan == 'hfq': # we got bfq at first and then update hfq data existed = [ item for item in kdatas if item['id'] == the_id ] if existed: kdata = existed[0] else: self.logger.error( "bfq not got for:{}".format(the_id)) kdata = { 'id': the_id, 'timestamp': the_timestamp, 'name': entity.name, 'level': self.level.value, 'open': to_float(open) / to_float(factor), 'close': to_float(close) / to_float(factor), 'high': to_float(high) / to_float(factor), 'low': to_float(low) / to_float(factor), 'volume': to_float(volume), 'turnover': to_float(turnover) } kdatas.append(kdata) kdata['hfq_open'] = to_float(open) kdata['hfq_high'] = to_float(high) kdata['hfq_close'] = to_float(close) kdata['hfq_low'] = to_float(low) kdata['factor'] = to_float(factor) self.latest_factors[entity.id] = to_float(factor) else: kdatas.append({ 'id': the_id, 'timestamp': the_timestamp, 'name': entity.name, 'level': self.level.value, 'open': to_float(open), 'close': to_float(close), 'high': to_float(high), 'low': to_float(low), 'volume': to_float(volume), 'turnover': to_float(turnover) }) return kdatas