def getParallelResult(strategyParameter, strategy_class, parasetlist, paranum, indexcols): strategyName = strategyParameter['strategy_name'] exchange_id = strategyParameter['exchange_id'] sec_id = strategyParameter['sec_id'] K_MIN = strategyParameter['K_MIN'] startdate = strategyParameter['startdate'] enddate = strategyParameter['enddate'] domain_symbol = '.'.join([exchange_id, sec_id]) result_para_dic = strategyParameter['result_para_dic'] # ======================数据准备============================================== # 取合约信息 symbolInfo = DI.SymbolInfo(domain_symbol, startdate, enddate) # 取跨合约数据 # contractswaplist = DC.getContractSwaplist(domain_symbol) # swaplist = np.array(contractswaplist.swaputc) # 取K线数据 # rawdata = DC.getBarData(symbol, K_MIN, startdate + ' 00:00:00', enddate + ' 23:59:59').reset_index(drop=True) rawdataDic = DI.getBarBySymbolList(domain_symbol, symbolInfo.getSymbolList(), K_MIN, startdate, enddate) timestart = time.time() # 多进程优化,启动一个对应CPU核心数量的进程池 pool = multiprocessing.Pool(multiprocessing.cpu_count() - 1) l = [] resultlist = pd.DataFrame(columns=['Setname'] + indexcols) strategy_para_name_list = strategy_class.get_para_name_list() for i in range(0, paranum): paraset = {} setname = parasetlist.ix[i, 'Setname'] paraset['Setname'] = setname for strategy_para_name in strategy_para_name_list: paraset[strategy_para_name] = parasetlist.ix[i, strategy_para_name] #l.append(getResult(strategyName, strategy_class, symbolInfo, K_MIN, rawdataDic, paraset, result_para_dic, indexcols,timestart)) l.append( pool.apply_async( getResult, (strategyName, strategy_class, symbolInfo, K_MIN, rawdataDic, paraset, result_para_dic, indexcols, timestart))) pool.close() pool.join() timeend = time.time() print("total time %.2f" % (timeend - timestart)) # 显示结果 i = 0 for res in l: resultlist.loc[i] = res.get() i += 1 finalresults = ("%s %s %d finalresult.csv" % (strategyName, domain_symbol, K_MIN)) resultlist.to_csv(finalresults) return resultlist
def __init__(self): super(SearchCommandProceedingBehavior, self).__init__() self.behavior_type = "search" self.__commands_dict = config['core_commands_search'] self.setCommandRecognizer(DictBasedCommandRecognizer(CommandConfigLoader.load(self.__commands_dict), DifflibMatchFinder)) self._output_connection = CoreOutputSingleton.getInstance() self._data_interface = DataInterface() self._data_interface.registerDataFinder(QPyDataFinder(NoModifyingQueryGenerator(), NonASCIICleanDataFinderOutputProcessor()), 1) self._data_interface.registerDataFinder(TaggedDataFinder(KeywordsQueryGenerator(), NoModifyingDataFinderOutputProcessor(), config['database_file']), 2) self._data_interface.registerDataFinder(ESearchDataFinder(KeywordsQueryGenerator(), NoModifyingDataFinderOutputProcessor()), 3) self._history = [] self._prev_query = [] self._parent = None
def __init__(self, name, manager_mac, manager_address, manager_port, device_type, slots, data_interfaces): self.name = name self.manager_address = manager_address self.manager_mac = manager_mac self.manager_port = manager_port self.device_type = device_type self.slots = slots self.data_interfaces = [] for interface in data_interfaces: mac_addr = interface['mac'] ip_addr = interface['address'] port = str(interface['port']) self.data_interfaces.append(DataInterface(mac_addr, ip_addr, port))
def setup_child_graph(self): domain_symbol = '.'.join([self.setting_dic['exchange'], self.setting_dic['symbol']]) contract = self.setting_dic['contract'] bar_type = self.setting_dic['period'] start_date = self.setting_dic['start_date'] end_date = self.setting_dic['end_date'] self.raw_data = DI.getBarBySymbol(domain_symbol, contract, bar_type, start_date + ' 09:00:00', end_date + ' 15:00:00') #self.raw_data = pd.read_excel('RB1810_2018-06-19_1m.xlsx') self.main_child_graph.set_raw_data(self.raw_data) for second_child_graph in self.child_graph_list: second_child_graph.set_raw_data(self.raw_data) self.range_control_plt.plot(self.raw_data['close'], pen="w", name='close') self.region.sigRegionChanged.connect(self.set_child_range) self.region.setRegion([0, 100]) pass
def multi_stop_loss(strategyName, symbolInfo, K_MIN, setname, stopLossTargetDictList, barxmdic, result_para_dic, tofolder, indexcols): print 'setname:', setname symbol = symbolInfo.domain_symbol bt_folder = "%s %d backtesting\\" % (symbol, K_MIN) oprdf = pd.read_csv(bt_folder + strategyName + ' ' + symbol + str(K_MIN) + ' ' + setname + ' result.csv') symbolDomainDic = symbolInfo.amendSymbolDomainDicByOpr(oprdf) barxm = DI.getDomainbarByDomainSymbol(symbolInfo.getSymbolList(), barxmdic, symbolDomainDic) dailyK = DI.generatDailyClose(barxm) positionRatio = result_para_dic['positionRatio'] initialCash = result_para_dic['initialCash'] oprlist = [] sltnum = len(stopLossTargetDictList) for i in range(sltnum): slt = stopLossTargetDictList[i] # 遍历读取各止损目标的结果文件,按名称将结果写入oprdf中 sltdf = pd.read_csv("%s%s %s%d %s %s" % (slt['folder'], strategyName, symbol, K_MIN, setname, slt['fileSuffix'])) sltName = slt['name'] oprdf[sltName + '_closeprice'] = sltdf['new_closeprice'] oprdf[sltName + '_closetime'] = sltdf['new_closetime'] oprdf[sltName + '_closeindex'] = sltdf['new_closeindex'] oprdf[sltName + '_closeutc'] = sltdf['new_closeutc'] oprdf[sltName + '_ret'] = sltdf['new_ret'] oprdf[sltName + '_own cash'] = sltdf['new_own cash'] oprlist.append(sltdf) # dsloprname=stratetyName+' '+symbol + str(K_MIN) + ' ' + setname + ' resultDSL_by_tick.csv' # ownloprname=stratetyName+' '+symbol + str(K_MIN) + ' ' + setname + ' resultOWNL_by_tick.csv' # dsloprdf=pd.read_csv(dslFolder+dsloprname) # ownloprdf=pd.read_csv(ownlFolder+ownloprname) oprdf['new_closeprice'] = oprdf['closeprice'] oprdf['new_closetime'] = oprdf['closetime'] oprdf['new_closeindex'] = oprdf['closeindex'] oprdf['new_closeutc'] = oprdf['closeutc'] oprdf['min_closeutc'] = oprdf['closeutc'] oprdf['max_closeutc'] = oprdf['closeutc'] for i in range(sltnum): # 先取最早平仓的时间,再根据时间去匹配类型 slt = stopLossTargetDictList[i] utcname = slt['name'] + '_closeutc' oprdf['min_closeutc'] = oprdf.loc[:, ['min_closeutc', utcname]].min( axis=1) oprdf['max_closeutc'] = oprdf.loc[:, ['max_closeutc', utcname]].max( axis=1) # 根据最早平仓时间的结果,匹配平仓类型,不处理时间相同的情况 oprdf['closetype'] = 'Normal' oprdf.loc[oprdf['max_closeutc'] != oprdf['closeutc'], 'min_closeutc'] = oprdf['max_closeutc'] for i in range(sltnum): slt = stopLossTargetDictList[i] name = slt['name'] utcname = name + '_closeutc' utcnamebuf = name + '_closeutc_buf' oprdf[utcnamebuf] = oprdf[utcname] oprdf.loc[(oprdf['max_closeutc'] != oprdf['closeutc']) & (oprdf[utcname] == oprdf['closeutc']), utcnamebuf] = oprdf['max_closeutc'] for i in range(sltnum): # 先取最早平仓的时间,再根据时间去匹配类型 slt = stopLossTargetDictList[i] name = slt['name'] utcnamebuf = name + '_closeutc_buf' oprdf['min_closeutc'] = oprdf.loc[:, ['min_closeutc', utcnamebuf]].min( axis=1) for i in range(sltnum): # 先按与最小相同的标识名称,因为止损文件中没有生效的操作的值与原值相同 # 所以标识完后剩下的Normal就是原时间比止损时间早的值(也就是使用最小值匹配不出来的值,需要特殊处理) slt = stopLossTargetDictList[i] name = slt['name'] utcname = name + '_closeutc' oprdf.loc[oprdf['min_closeutc'] == oprdf[utcname], 'closetype'] = slt['name'] oprdf.loc[oprdf['min_closeutc'] == oprdf[utcname], 'new_closeprice'] = oprdf[name + '_closeprice'] oprdf.loc[oprdf['min_closeutc'] == oprdf[utcname], 'new_closetime'] = oprdf[name + '_closetime'] oprdf.loc[oprdf['min_closeutc'] == oprdf[utcname], 'new_closeindex'] = oprdf[name + '_closeindex'] oprdf.loc[oprdf['min_closeutc'] == oprdf[utcname], 'new_closeutc'] = oprdf[name + '_closeutc'] oprdf.drop(name + '_closeutc_buf', axis=1, inplace=True) # 删掉buf列 # 标识正常止损 oprdf.loc[oprdf['min_closeutc'] == oprdf['closeutc'], 'closetype'] = 'Normal' oprdf.drop('min_closeutc', axis=1, inplace=True) oprdf.drop('max_closeutc', axis=1, inplace=True) slip = symbolInfo.getSlip() # 2017-12-08:加入滑点 oprdf['new_ret'] = ((oprdf['new_closeprice'] - oprdf['openprice']) * oprdf['tradetype']) - slip oprdf['new_ret_r'] = oprdf['new_ret'] / oprdf['openprice'] oprdf['new_commission_fee'], oprdf['new_per earn'], oprdf[ 'new_own cash'], oprdf['new_hands'] = RS.calcResult(oprdf, symbolInfo, initialCash, positionRatio, ret_col='new_ret') oprdf.to_csv(tofolder + '\\' + strategyName + ' ' + symbol + str(K_MIN) + ' ' + setname + ' result_multiSLT.csv', index=False) # 计算统计结果 slWorkNum = oprdf.loc[oprdf['closetype'] != 'Normal'].shape[0] olddailydf = pd.read_csv(bt_folder + strategyName + ' ' + symbol + str(K_MIN) + ' ' + setname + ' dailyresult.csv', index_col='date') oldr = RS.getStatisticsResult(oprdf, False, indexcols, olddailydf) dR = RS.dailyReturn(symbolInfo, oprdf, dailyK, initialCash) # 计算生成每日结果 dR.calDailyResult() dR.dailyClose.to_csv(tofolder + '\\' + strategyName + ' ' + symbol + str(K_MIN) + ' ' + setname + ' dailyresult_multiSLT.csv') newr = RS.getStatisticsResult(oprdf, True, indexcols, dR.dailyClose) print newr return [ setname, tofolder, slWorkNum, ] + oldr + newr
class Demo: def __init__(self, *args, **kwargs): self.data_interface = DataInterface() self.twitter_interface = TwitterInterface(self.data_interface) self.sniffer = Sniffer(self.data_interface) def parse_and_execute_command(self, last_mention): command = last_mention['text'] if command[0] == 'SET_MODE': current_config = self.data_interface.get_hound_mode() try: if command[1] == 'SCAN': if current_config['Mode'] == 'SCAN': return else: self.data_interface.set_hound_mode(command[1]) self.twitter_interface.post( 'Mode Successfully Set: SCAN') return elif command[1] == current_config['Mode'] and command[ 2] == current_config['Args']: print 'No Change In New Command' return else: self.data_interface.set_hound_mode(command[1], command[2]) self.twitter_interface.post( 'Mode Successfully Set: {0}, {1}'.format( command[1], command[2])) except Exception: print 'Duplicate Twitter Status' elif command[0] == 'REFRESH': if last_mention['created_at'] > datetime.utcnow() - timedelta( 0, SLEEP_INTERVAL): current_config = self.data_interface.get_hound_mode() try: if current_config['Mode'] == 'SCAN': self.data_interface.refresh_scan() self.twitter_interface.post('SCAN Refresh Complete') elif current_config['Mode'] == 'AP': self.data_interface.refresh_ap() self.twitter_interface.post('AP Refresh Complete') else: self.data_interface.refresh_scan() self.twitter_interface.post('MAC Refresh Complete') except Exception: print 'Duplicate Twitter Status' def mention_is_new(self, last_mention): current_config = self.data_interface.get_hound_mode() if last_mention['created_at'] > current_config['Set Time']: return True else: return False def run(self): last_mention = self.twitter_interface.get_last_mention() if self.mention_is_new(last_mention): self.parse_and_execute_command(last_mention) #results = self.sniffer.execute() #if len(results): # self.twitter_interface.post_many(results) else: print 'Last Mention Is Stale'
# -*- coding: utf-8 -*- """ Created on Tue Jul 9 16:32:25 2019 for TLM Revised on 12/12/2019 include detector IV and CV Move overlapped methods to parent class in DI Detector IV data uploading @author: dding """ from DataInterface import DataInterface as DI if __name__ == "__main__": path = '//sjt-fs00/MaterialsTeam/ALL/Characterization Data/DetectorIV' folders = {'root':path,\ 'uploaded':path+'/'+'0_Uploaded',\ 'failed':path+'/'+'0_Upload_Failed',\ 'old':path+'/'+'0_Old'} test = DI.getInterface('detector',folders) test.process_files()
class HoundDaemon(): def __init__(self, *args, **kwargs): self.data_interface = DataInterface() self.twitter_interface = TwitterInterface(self.data_interface) self.sniffer = Sniffer(self.data_interface) def parse_and_execute_command(self, last_mention): command = last_mention['text'] if command[0] == 'SET_MODE': current_config = self.data_interface.get_hound_mode() try: if command[1] == 'SCAN': if current_config['Mode'] == 'SCAN': return else: self.data_interface.set_hound_mode(command[1]) self.twitter_interface.post('Mode Successfully Set: SCAN') return elif command[1] == current_config['Mode'] and command[2] == current_config['Args']: print 'No Change In New Command' return else: self.data_interface.set_hound_mode(command[1], command[2]) self.twitter_interface.post('Mode Successfully Set: {0}, {1}'.format(command[1], command[2])) except Exception: print 'Duplicate Twitter Status' print 'Mode Successfully Set: {0}, {1}'.format(command[1], command[2]) elif command[0] == 'REFRESH': if last_mention['created_at'] > datetime.utcnow() - timedelta(0,SLEEP_INTERVAL): current_config = self.data_interface.get_hound_mode() message = '{0} Refresh Complete'.format(current_config['Mode']) try: if current_config['Mode'] == 'SCAN': self.data_interface.refresh_scan() elif current_config['Mode'] == 'AP': self.data_interface.refresh_ap() else: self.data_interface.refresh_scan() self.twitter_interface.post(message) except Exception: print 'Duplicate Twitter Status' print message else: print 'REFRESH Command Stale, Skipping Refresh...' def mention_is_new(self, last_mention): current_config = self.data_interface.get_hound_mode() if last_mention['created_at'] > current_config['Set Time']: return True else: return False def run(self): loop_count = 1 while True: print 'Starting Sequence {0}:'.format(loop_count) print 'Getting Last Mentions From Twitter...' last_mention = self.twitter_interface.get_last_mention() print 'Last Mention Received: {0}'.format(' '.join(last_mention['text'])) if self.mention_is_new(last_mention): if last_mention['text'][0] != 'REFRESH': print 'Executing Command...' self.parse_and_execute_command(last_mention) else: print 'Last Mention Stale, Not Executing...' print 'Running Sniffer...' results = self.sniffer.execute() if len(results): print 'Posting Sniffer Results...' self.twitter_interface.post_many(results) else: print 'No New Sniffer Results...' print 'Sleeping for {0} Seconds...'.format(SLEEP_INTERVAL) time.sleep(SLEEP_INTERVAL) print '\n\n' loop_count += 1
class HoundDaemon(): def __init__(self, *args, **kwargs): self.data_interface = DataInterface() self.twitter_interface = TwitterInterface(self.data_interface) self.sniffer = Sniffer(self.data_interface) def parse_and_execute_command(self, last_mention): command = last_mention['text'] if command[0] == 'SET_MODE': current_config = self.data_interface.get_hound_mode() try: if command[1] == 'SCAN': if current_config['Mode'] == 'SCAN': return else: self.data_interface.set_hound_mode(command[1]) self.twitter_interface.post( 'Mode Successfully Set: SCAN') return elif command[1] == current_config['Mode'] and command[ 2] == current_config['Args']: print 'No Change In New Command' return else: self.data_interface.set_hound_mode(command[1], command[2]) self.twitter_interface.post( 'Mode Successfully Set: {0}, {1}'.format( command[1], command[2])) except Exception: print 'Duplicate Twitter Status' print 'Mode Successfully Set: {0}, {1}'.format( command[1], command[2]) elif command[0] == 'REFRESH': if last_mention['created_at'] > datetime.utcnow() - timedelta( 0, SLEEP_INTERVAL): current_config = self.data_interface.get_hound_mode() message = '{0} Refresh Complete'.format(current_config['Mode']) try: if current_config['Mode'] == 'SCAN': self.data_interface.refresh_scan() elif current_config['Mode'] == 'AP': self.data_interface.refresh_ap() else: self.data_interface.refresh_scan() self.twitter_interface.post(message) except Exception: print 'Duplicate Twitter Status' print message else: print 'REFRESH Command Stale, Skipping Refresh...' def mention_is_new(self, last_mention): current_config = self.data_interface.get_hound_mode() if last_mention['created_at'] > current_config['Set Time']: return True else: return False def run(self): loop_count = 1 while True: print 'Starting Sequence {0}:'.format(loop_count) print 'Getting Last Mentions From Twitter...' last_mention = self.twitter_interface.get_last_mention() print 'Last Mention Received: {0}'.format(' '.join( last_mention['text'])) if self.mention_is_new(last_mention): if last_mention['text'][0] != 'REFRESH': print 'Executing Command...' self.parse_and_execute_command(last_mention) else: print 'Last Mention Stale, Not Executing...' print 'Running Sniffer...' results = self.sniffer.execute() if len(results): print 'Posting Sniffer Results...' self.twitter_interface.post_many(results) else: print 'No New Sniffer Results...' print 'Sleeping for {0} Seconds...'.format(SLEEP_INTERVAL) time.sleep(SLEEP_INTERVAL) print '\n\n' loop_count += 1
def __init__(self, *args, **kwargs): super(HoundDaemon, self).__init__(*args, **kwargs) self.data_interface = DataInterface() self.twitter_interface = TwitterInterface(self.data_interface) self.sniffer = Sniffer(self.data_interface)
def single_sl(strategy_name, symbol_info, bar_type, setname, bar1m_dic, barxm_dic, stop_loss_class_list, result_para_dic, indexcols, timestart): print "%s %s %d %s" % (strategy_name, symbol_info.domain_symbol, bar_type, setname) symbol = symbol_info.domain_symbol bt_folder = "%s %d backtesting\\" % (symbol, bar_type) oprdf = pd.read_csv(bt_folder + strategy_name + ' ' + symbol + str(bar_type) + ' ' + setname + ' result.csv') close_type_list = [] all_final_result_dic = {} # 这个是用来保存每个文件的RS结果,返回给外部调用的 all_stop_loss_opr_result_dic = {} # 这个是用来保存每个参数每次操作的止损结果 for stop_loss_class in stop_loss_class_list: sl_name = stop_loss_class.get_sl_name() close_type_list.append(sl_name) final_result_dic = {} stop_loss_opr_result_dic = {} for para in stop_loss_class.get_para_dic_list(): final_result_dic[para['para_name']] = [] stop_loss_opr_result_dic[para['para_name']] = [] all_stop_loss_opr_result_dic[sl_name] = stop_loss_opr_result_dic all_final_result_dic[sl_name] = final_result_dic for stop_loss_class in stop_loss_class_list: if stop_loss_class.need_data_process_before_domain: bar1m_dic, barxm_dic = stop_loss_class.data_process_before_domain( bar1m_dic, barxm_dic) symbolDomainDic = symbol_info.amendSymbolDomainDicByOpr(oprdf) bar1m = DI.getDomainbarByDomainSymbol(symbol_info.getSymbolList(), bar1m_dic, symbolDomainDic) bar1m = bar1m_prepare(bar1m) barxm = DI.getDomainbarByDomainSymbol(symbol_info.getSymbolList(), barxm_dic, symbolDomainDic) barxm.set_index('utc_time', drop=False, inplace=True) # 开始时间对齐 bar1m.set_index('utc_time', drop=False, inplace=True) for stop_loss_class in stop_loss_class_list: if stop_loss_class.need_data_process_after_domain: bar1m, barxm = stop_loss_class.data_process_after_domain( bar1m, barxm) positionRatio = result_para_dic['positionRatio'] initialCash = result_para_dic['initialCash'] oprnum = oprdf.shape[0] worknum = 0 for i in range(oprnum): opr = oprdf.iloc[i] #startutc = barxm.loc[opr['openutc'], 'utc_endtime'] # 从开仓的10m线结束后开始 #endutc = barxm.loc[opr['closeutc'], 'utc_endtime'] - 60 # 一直到平仓的10m线结束 startutc = opr['openutc'] endutc = opr['closeutc'] data_1m = bar1m.loc[startutc:endutc] data1m = data_1m.drop(data_1m.index[-1]) # 因为loc取数是含头含尾的,所以要去掉最后一行 for stop_loss_class in stop_loss_class_list: sl_name = stop_loss_class.get_sl_name() stop_loss_opr_result_dic = all_stop_loss_opr_result_dic[sl_name] opr_result_dic = stop_loss_class.get_opr_sl_result(opr, data1m) for para in stop_loss_class.get_para_dic_list(): stop_loss_opr_result_dic[para['para_name']].append( opr_result_dic[para['para_name']]) slip = symbol_info.getSlip() olddailydf = pd.read_csv(bt_folder + strategy_name + ' ' + symbol + str(bar_type) + ' ' + setname + ' dailyresult.csv', index_col='date') oldr = RS.getStatisticsResult(oprdf, False, indexcols, olddailydf) dailyK = DI.generatDailyClose(barxm) for stop_loss_class in stop_loss_class_list: sl_name = stop_loss_class.get_sl_name() stop_loss_opr_result_dic = all_stop_loss_opr_result_dic[sl_name] final_result_dic = all_final_result_dic[sl_name] folder_prefix = stop_loss_class.get_folder_prefix() file_suffix = stop_loss_class.get_file_suffix() for para_name, opr_result_dic_list in stop_loss_opr_result_dic.items(): result_df = pd.DataFrame(opr_result_dic_list) oprdf_temp = pd.concat([oprdf, result_df], axis=1) oprdf_temp['new_ret'] = ( (oprdf_temp['new_closeprice'] - oprdf_temp['openprice']) * oprdf_temp['tradetype']) - slip oprdf_temp[ 'new_ret_r'] = oprdf_temp['new_ret'] / oprdf_temp['openprice'] oprdf_temp['new_commission_fee'], oprdf_temp['new_per earn'], oprdf_temp['new_own cash'], oprdf_temp['new_hands'] = \ RS.calcResult(oprdf_temp, symbol_info, initialCash, positionRatio, ret_col='new_ret') # 保存新的result文档 tofolder = "%s%s\\" % (folder_prefix, para_name) oprdf_temp.to_csv(tofolder + strategy_name + ' ' + symbol + str(bar_type) + ' ' + setname + ' ' + file_suffix + para_name + '.csv', index=False) dR = RS.dailyReturn(symbol_info, oprdf_temp, dailyK, initialCash) # 计算生成每日结果 dR.calDailyResult() dR.dailyClose.to_csv(tofolder + strategy_name + ' ' + symbol + str(bar_type) + ' ' + setname + ' daily' + file_suffix + para_name + '.csv') newr = RS.getStatisticsResult(oprdf_temp, True, indexcols, dR.dailyClose) worknum = oprdf_temp.loc[oprdf_temp['new_closeindex'] != oprdf_temp['closeindex']].shape[0] final_result_dic[para_name] = [setname, para_name, worknum ] + oldr + newr return all_final_result_dic
class HoundDaemon(Daemon): def __init__(self, *args, **kwargs): super(HoundDaemon, self).__init__(*args, **kwargs) self.data_interface = DataInterface() self.twitter_interface = TwitterInterface(self.data_interface) self.sniffer = Sniffer(self.data_interface) def parse_and_execute_command(self, last_mention): command = last_mention['text'] if command[0] == 'SET_MODE': current_config = self.data_interface.get_hound_mode() try: if command[1] == 'SCAN': if current_config['Mode'] == 'SCAN': return else: self.data_interface.set_hound_mode(command[1]) self.twitter_interface.post('Mode Successfully Set: SCAN') return elif command[1] == current_config['Mode'] and command[2] == current_config['Args']: print 'No Change In New Command' return else: self.data_interface.set_hound_mode(command[1], command[2]) self.twitter_interface.post('Mode Successfully Set: {0}, {1}'.format(command[1], command[2])) except Exception: print 'Duplicate Twitter Status' elif command[0] == 'REFRESH': if last_mention['created_at'] > datetime.utcnow() - timedelta(0,SLEEP_INTERVAL): current_config = self.data_interface.get_hound_mode() try: if current_config['Mode'] == 'SCAN': self.data_interface.refresh_scan() self.twitter_interface.post('SCAN Refresh Complete') elif current_config['Mode'] == 'AP': self.data_interface.refresh_ap() self.twitter_interface.post('AP Refresh Complete') else: self.data_interface.refresh_scan() self.twitter_interface.post('MAC Refresh Complete') except Exception: print 'Duplicate Twitter Status' def mention_is_new(self, last_mention): current_config = self.data_interface.get_hound_mode() if last_mention['created_at'] > current_config['Set Time']: return True else: return False def run(self): while True: last_mention = self.twitter_interface.get_last_mention() if self.mention_is_new(last_mention): self.parse_and_execute_command(last_mention) results = self.sniffer.execute() if len(results): self.twitter_interface.post_many(results) time.sleep(SLEEP_INTERVAL)
# 生成月份列表,取开始月 newmonth = Parameter.enddate[:7] month_n = Parameter.month_n monthlist = [ datetime.strftime(x, '%Y-%m') for x in list( pd.date_range( start=Parameter.startdate, end=newmonth + '-01', freq='M')) ] startmonth = monthlist[-month_n] # ======================================参数配置================================================== strategy_name = Parameter.strategy_name exchange_id = Parameter.exchange_id sec_id = Parameter.sec_id bar_type = Parameter.K_MIN symbol = '.'.join([exchange_id, sec_id]) symbolinfo = DC.SymbolInfo(symbol) price_tick = symbolinfo.getPriceTick() symbol_bar_folder_name = Parameter.strategy_folder + "%s %s %s %d\\" % ( strategy_name, exchange_id, sec_id, bar_type) os.chdir(symbol_bar_folder_name) paraset_name = "%s %s %s %d Parameter.csv" % (strategy_name, exchange_id, sec_id, bar_type) # 读取已有参数表 parasetlist = pd.read_csv(paraset_name)['Setname'].tolist() paranum = len(parasetlist) sltlist = [] calcMultiSLT = False for sl_name, stop_loss in Parameter.forward_mode_para_dic.items(): if sl_name == 'multi_sl':
strategyParameterSet[strategy_name] = strategy_bt_parameter for strategy_name, strategy_bt_parameter in strategyParameterSet.items(): strategy_folder = "%s%s\\" % (Parameter.root_path, strategy_name) for strategyParameter in strategy_bt_parameter: exchange_id = strategyParameter['exchange_id'] sec_id = strategyParameter['sec_id'] bar_type = strategyParameter['K_MIN'] startdate = strategyParameter['startdate'] enddate = strategyParameter['enddate'] domain_symbol = '.'.join([exchange_id, sec_id]) result_para_dic = strategyParameter['result_para_dic'] stop_loss_dic = strategyParameter['stop_loss_dic'] symbol_info = DI.SymbolInfo(domain_symbol, startdate, enddate) symbol_bar_folder_name = strategy_folder + "%s %s %s %d" % ( strategy_name, exchange_id, sec_id, bar_type) os.chdir(symbol_bar_folder_name) paraset_name = "%s %s %s %d Parameter.csv" % ( strategy_name, exchange_id, sec_id, bar_type) # 读取已有参数表 parasetlist = pd.read_csv(paraset_name)['Setname'].tolist() cols = [ 'open', 'high', 'low', 'close', 'strtime', 'utc_time', 'utc_endtime' ] #bar1m_dic = DI.getBarBySymbolList(domain_symbol, symbol_info.getSymbolList(), 60, startdate, enddate, cols) bar1m_dic = DI.getBarDicAfterDomain(symbol_info, 60, cols)
import theano import numpy as np from sklearn.linear_model import LogisticRegression from DataInterface import DataInterface ################ # LOADING DATA # ################ print '... loading the data' floatX = theano.config.floatX dface = DataInterface() x_train, x_valid, y_train, y_valid = dface.get_traindata(full=True) x_test = dface.get_testdata() labels = dface.classes NUM_FEATURES = x_train.shape[1] NUM_CLASSES = len(labels) # Convert to theano types x_train = np.asarray(x_train.astype(dtype=floatX)) x_valid = np.asarray(x_valid.astype(dtype=floatX)) x_test = np.asarray(x_test.astype(dtype=floatX)) y_train = y_train.astype(dtype=np.int32) y_valid = y_valid.astype(dtype=np.int32)
import skimage import numpy as np import scipy.misc as misc import imageio import sklearn.preprocessing as pp nr_projections = 15 src = "/home/andrei/low-dose/DATASET-REGULARIZED/" dest = "/home/andrei/low-dose/DATASET-256 LOW-DOSE/" new_dest = os.path.join(dest, "{}_projections/".format(nr_projections)) try: os.mkdir(new_dest) data = DataInterface(src) scans = data.get_tomo_list() t = 0 for x in scans: vol = data.get_tomo_volume(x) ct = ConeBeamCT(vol) rec = ct.run_new_scan(nr_projections) maxim = rec.max() _, _, z = rec.shape for slice in range(z): im = rec[:, :, slice] # print(im[150]) # im = (normalize.normalize(im)) # im *= 255 / maxim
astra.algorithm.run(alg_id) rec = astra.data3d.get(rec_id) astra.algorithm.delete(alg_id) astra.data3d.delete(rec_id) astra.data3d.delete(proj_id) return rec if __name__ == "__main__": import pylab from DataInterface import DataInterface src = "/home/andrei/Área de Trabalho/Pesquisa/DATASET-256/" dataset = DataInterface(src) vol = dataset.get_tomo_volume(90) ct = ConeBeamCT(vol) rec = ct.run_new_scan(15) pylab.gray() pylab.figure(1) pylab.imshow(vol[:, :, 128]) pylab.figure(2) pylab.imshow(rec[:, :, 128]) pylab.show()
class SearchCommandProceedingBehavior(AbstractCoreCommandProceedingBehavior): """ This class is implemented as a singleton as it store history of search and required to call the same instance during all program run. """ instance = None @staticmethod def getInstance(): if not SearchCommandProceedingBehavior.instance: SearchCommandProceedingBehavior.instance = SearchCommandProceedingBehavior() return SearchCommandProceedingBehavior.instance def __init__(self): super(SearchCommandProceedingBehavior, self).__init__() self.behavior_type = "search" self.__commands_dict = config['core_commands_search'] self.setCommandRecognizer(DictBasedCommandRecognizer(CommandConfigLoader.load(self.__commands_dict), DifflibMatchFinder)) self._output_connection = CoreOutputSingleton.getInstance() self._data_interface = DataInterface() self._data_interface.registerDataFinder(QPyDataFinder(NoModifyingQueryGenerator(), NonASCIICleanDataFinderOutputProcessor()), 1) self._data_interface.registerDataFinder(TaggedDataFinder(KeywordsQueryGenerator(), NoModifyingDataFinderOutputProcessor(), config['database_file']), 2) self._data_interface.registerDataFinder(ESearchDataFinder(KeywordsQueryGenerator(), NoModifyingDataFinderOutputProcessor()), 3) self._history = [] self._prev_query = [] self._parent = None def proceed(self, user_input, parent): self._parent = parent self._parent.user_input = None self.prev_query = user_input recognized_command = self._command_recognizer.recognize_command(user_input) if recognized_command == "CANCEL": self._history = [] self._output_connection.sendPOST({'type': 'OPEN_SCREEN', 'command': 'IDLE'}) self._output_connection.sendPOST({'type': 'SPEAK', 'command': random.choice(config['voice_command_output']['CANCEL'])}) from IdleCommandProceedingBehavior import IdleCommandProceedingBehavior parent.setProceedingBehavior(IdleCommandProceedingBehavior.getInstance()) parent.user_input = None return None elif recognized_command == "MUTE": self._output_connection.sendPOST({'type': 'MUTE', 'command': ''}) elif recognized_command == "UNMUTE": self._output_connection.sendPOST({'type': 'UNMUTE', 'command': ''}) else: user_input = self._command_recognizer.remove_command(user_input, 'START') self._async_work(self._find_data, user_input) def _find_data(self, user_input, ret_result): from DisplayingDataCommandProceedingBehavior import DisplayingDataCommandProceedingBehavior from SearchFailedCommandProceedingBehavior import SearchFailedCommandProceedingBehavior logger.info('Searching data...') results = self._data_interface.getResults(user_input) request = "" if results is not None and len(results) != 0: _id = 0 result = results[_id] logger.debug(self._history) while result.body in [history_entry.body for history_entry in self._history]: _id += 1 if _id >= len(results): ret_result.append({'type': 'OPEN_SCREEN', 'command': 'ERROR'}) return None result = results[_id] self._history.append(result) if result.type == '.pdf': rel_path = os.path.relpath(config['root_dir'] + config['elastic_docs_dir'] + result.body, config['root_dir'] + config['output_server_home']) fname = "pdf.js/web/viewer.html?file=" + rel_path request = {'type': 'OPEN_PDF', 'command': fname} elif result.type == '.html': request = {'type': 'OPEN_LOCAL_PAGE', 'command': result.body} elif result.type == '.url': link = open(result.body).read() request = {'type': 'OPEN_URL', 'command': link} elif result.type == '.webm': request = {'type': 'OPEN_VIDEO', 'command': result.body} elif result.type == "speech": request = {'type': 'SPEAK', 'command': result.body} self._parent.setProceedingBehavior(DisplayingDataCommandProceedingBehavior.getInstance()) else: request = {'type': 'OPEN_SCREEN', 'command': 'ERROR'} self._parent.setProceedingBehavior(SearchFailedCommandProceedingBehavior.getInstance()) ret_result.append(request) return None def _async_work(self, function, argument): result = [] logger.info(argument) worker = threading.Thread(target=function, args=(argument, result)) worker.start() while not (self._parent.user_input is not None or result): pass if result: self._output_connection.sendPOST(result[0]) result = None return None else: logger.info('Cancelled') return None
def test_cannot_set_mac_addr(self): with self.assertRaises(AttributeError): data = DataInterface("00:00:00:00:00:00", "127.0.0.1", 12345) data.mac_addr = "0a:0b:0c:0d:0e:0f"
def calOprResult(strategyName, rawpath, symbolinfo, K_MIN, nextmonth, columns, barxmdic, positionRatio, initialCash, indexcols, indexcolsFlag, resultfilesuffix='result.csv'): ''' 根据灰区的取值,取出各灰区的操作列表,组成目标集组的操作表,并计算各个评价指标 :return: ''' symbol = symbolinfo.domain_symbol graydf = pd.read_csv(rawpath + 'ForwardOprAnalyze\\' + strategyName + ' ' + symbol + str(K_MIN) + 'multiTargetForwardSetname.csv', index_col='Group') cols = graydf.columns.tolist()[3:] cols.append(nextmonth) groupResult = [] closetime_col = columns['closetime_col'] closeindex_col = columns['closeindex_col'] closeprice_col = columns['closeprice_col'] closeutc_col = columns['closeutc_col'] retr_col = columns['retr_col'] ret_col = columns['ret_col'] cash_col = columns['cash_col'] hands_col = columns['hands_col'] for i in range(graydf.shape[0]): gray = graydf.iloc[i] oprdf = pd.DataFrame(columns=[ 'opentime', 'openutc', 'openindex', 'openprice', closetime_col, closeutc_col, closeindex_col, closeprice_col, 'tradetype', ret_col, retr_col, 'symbol' ]) print gray.name, gray.Target, gray.Windows for l in range(len(cols) - 1): startmonth = cols[l] endmonth = cols[l + 1] setname = gray[startmonth] oprdf = pd.concat([ oprdf, getOprlistByMonth(strategyName, rawpath, symbol, K_MIN, setname, startmonth, endmonth, columns, resultfilesuffix) ]) oprdf = oprdf.reset_index(drop=True) oprdf['commission_fee'], oprdf['per earn'], oprdf[cash_col], oprdf[ hands_col] = RS.calcResult(oprdf, symbolinfo, initialCash, positionRatio, ret_col) tofilename = ('%s %s%d_%s_win%d_oprResult.csv' % (strategyName, symbol, K_MIN, gray.Target, gray.Windows)) oprdf.to_csv(rawpath + 'ForwardOprAnalyze\\' + tofilename) symbolDomainDic = symbolinfo.amendSymbolDomainDicByOpr( oprdf, closeutc_col=closeutc_col) barxm = DC.getDomainbarByDomainSymbol(symbolinfo.getSymbolList(), barxmdic, symbolDomainDic) dailyK = DC.generatDailyClose(barxm) # 生成按日的K线 dR = RS.dailyReturn(symbolinfo, oprdf, dailyK, initialCash) # 计算生成每日结果 dR.calDailyResult() tofilename = ('%s %s%d_%s_win%d_oprdailyResult.csv' % (strategyName, symbol, K_MIN, gray.Target, gray.Windows)) dR.dailyClose.to_csv(rawpath + 'ForwardOprAnalyze\\' + tofilename) r = RS.getStatisticsResult(oprdf, indexcolsFlag, indexcols, dR.dailyClose) groupResult.append([gray.name, gray.Target, gray.Windows] + r) groupResultDf = pd.DataFrame(groupResult, columns=['Group', 'Target', 'Windows'] + indexcols) groupResultDf.to_csv(rawpath + 'ForwardOprAnalyze\\' + strategyName + ' ' + symbol + '_' + str(K_MIN) + '_groupOprResult.csv', index=False) pass
def test_cannot_set_port(self): with self.assertRaises(AttributeError): data = DataInterface("00:00:00:00:00:00", "127.0.0.1", 12345) data.port = 222
def test_get_properties(self): data = DataInterface("00:00:00:00:00:00", "127.0.0.1", 12345) self.assertEqual(data.mac_addr, "00:00:00:00:00:00") self.assertEqual(data.ip_addr, "127.0.0.1") self.assertEqual(data.port, 12345)
def __init__(self, *args, **kwargs): self.data_interface = DataInterface() self.twitter_interface = TwitterInterface(self.data_interface) self.sniffer = Sniffer(self.data_interface)
# -*- coding: utf-8 -*- """ Created on Mon Feb 24 16:55:58 2020 @author: dding """ from DataInterface import DataInterface as DI if __name__ == "__main__": path = '//sjt-fs00/MaterialsTeam/ALL/Characterization Data/EEL' folders = {'root':path,\ 'uploaded':path+'/'+'0_Uploaded',\ 'failed':path+'/'+'0_Upload_Failed',\ 'old':path+'/'+'0_Old'} test = DI.getInterface('laser', folders) test.process_files()
def test_cannot_set_ip_addr(self): with self.assertRaises(AttributeError): data = DataInterface("00:00:00:00:00:00", "127.0.0.1", 12345) data.ip_addr = "192.168.1.1"
def test_reserve(self): data = DataInterface("xx", "yy", 1) self.assertTrue(data.is_available) self.assertTrue(data.is_available) # isn't changed by access. data.reserve() self.assertFalse(data.is_available) data.reserve() self.assertFalse(data.is_available) data.release() self.assertTrue(data.is_available) data.release() self.assertTrue(data.is_available) data.reserve() self.assertFalse(data.is_available) # reserve again.
strategy_folder = "%s%s\\" % (Parameter.root_path, strategy_name) for strategyParameter in strategy_bt_parameter: strategy_name = strategyParameter['strategy_name'] exchange_id = strategyParameter['exchange_id'] sec_id = strategyParameter['sec_id'] bar_type = strategyParameter['K_MIN'] startdate = strategyParameter['startdate'] enddate = strategyParameter['enddate'] symbol = '.'.join([exchange_id, sec_id]) result_para_dic = strategyParameter['result_para_dic'] forward_mode_dic = strategyParameter['forward_mode_dic'] symbol_info = DI.SymbolInfo(symbol, startdate, enddate) price_tick = symbol_info.getPriceTick() symbol_bar_folder_name = strategy_folder + "%s %s %s %d\\" % ( strategy_name, exchange_id, sec_id, bar_type) os.chdir(symbol_bar_folder_name) paraset_name = "%s %s %s %d Parameter.csv" % ( strategy_name, exchange_id, sec_id, bar_type) # 读取已有参数表 parasetlist = pd.read_csv(paraset_name)['Setname'].tolist() # 混合止损模式 sltlist = [] for sl_name, stop_loss in forward_mode_dic.items(): if sl_name != 'multi_sl' and sl_name != 'common': # 混合标志和普通模式标志都是不带参数的 stop_loss['price_tick'] = price_tick stop_loss_class = StopLoss.strategy_mapping_dic[sl_name]( stop_loss)
def getResult(strategyName, strategy_class, symbolinfo, K_MIN, rawdataDic, para, result_para_dic, indexcols, timestart): time1 = time.time() setname = para['Setname'] print("%s %s %d %s Enter %.3f" % (strategyName, symbolinfo.domain_symbol, K_MIN, setname, time1 - timestart)) initialCash = result_para_dic['initialCash'] positionRatio = result_para_dic['positionRatio'] remove_polar_switch = result_para_dic['remove_polar_switch'] remove_polar_rate = result_para_dic['remove_polar_rate'] symbollist = symbolinfo.getSymbolList() symbolDomainDic = symbolinfo.getSymbolDomainDic() result = pd.DataFrame() last_domain_utc = None for symbol in symbollist: if last_domain_utc: # 如果上一个合约的最后一次平仓时间超过其主力合约结束时间,则要修改本次合约的开始时间为上一次平仓后 symbol_domain_start = last_domain_utc symbolDomainDic[symbol][0] = last_domain_utc else: symbol_domain_start = symbolDomainDic[symbol][0] symbol_domain_end = symbolDomainDic[symbol][1] rawdata = rawdataDic[symbol] r = strategy_class.run_trade_logic(symbolinfo, rawdata, para) r['symbol'] = symbol # 增加主力全约列 r = r.loc[(r['openutc'] >= symbol_domain_start) & (r['openutc'] <= symbol_domain_end)] last_domain_utc = None if r.shape[0] > 0: last_close_utc = r.iloc[-1]['closeutc'] if last_close_utc > symbol_domain_end: # 如果本合约最后一次平仓时间超过其主力合约结束时间,则要修改本合约的主力结束时间为平仓后 symbolDomainDic[symbol][1] = last_close_utc last_domain_utc = last_close_utc result = pd.concat([result, r]) result.reset_index(drop=True, inplace=True) # 去极值操作 if remove_polar_switch: result = RS.opr_result_remove_polar(result, remove_polar_rate) # 全部操作结束后,要根据修改完的主力时间重新接出一份主连来计算dailyK domain_bar = pd.DataFrame() for symbol in symbollist[:-1]: symbol_domain_start = symbolDomainDic[symbol][0] symbol_domain_end = symbolDomainDic[symbol][1] rbar = rawdataDic[symbol] bars = rbar.loc[(rbar['utc_time'] >= symbol_domain_start) & (rbar['utc_endtime'] < symbol_domain_end)] domain_bar = pd.concat([domain_bar, bars]) # 最后一个合约只截前不截后 symbol = symbollist[-1] symbol_domain_start = symbolDomainDic[symbol][0] rbar = rawdataDic[symbol] bars = rbar.loc[rbar['utc_time'] >= symbol_domain_start] domain_bar = pd.concat([domain_bar, bars]) dailyK = DI.generatDailyClose(domain_bar) result['commission_fee'], result['per earn'], result['own cash'], result[ 'hands'] = RS.calcResult(result, symbolinfo, initialCash, positionRatio) bt_folder = "%s %d backtesting\\" % (symbolinfo.domain_symbol, K_MIN) result.to_csv(bt_folder + strategyName + ' ' + symbolinfo.domain_symbol + str(K_MIN) + ' ' + setname + ' result.csv', index=False) dR = RS.dailyReturn(symbolinfo, result, dailyK, initialCash) # 计算生成每日结果 dR.calDailyResult() dR.dailyClose.to_csv( (bt_folder + strategyName + ' ' + symbolinfo.domain_symbol + str(K_MIN) + ' ' + setname + ' dailyresult.csv')) results = RS.getStatisticsResult(result, False, indexcols, dR.dailyClose) del result print results return [setname] + results # 在这里附上setname
def get_forward(strategyName, symbolinfo, K_MIN, parasetlist, rawdatapath, startdate, enddate, colslist, result_para_dic, indexcolsFlag, resultfilesuffix): forward_window_set = range(Parameter.forwardWinStart, Parameter.forwardWinEnd + 1) # 白区窗口值 nextmonth = enddate[0:7] symbol = symbolinfo.domain_symbol forwordresultpath = rawdatapath + '\\ForwardResults\\' forwardrankpath = rawdatapath + '\\ForwardRank\\' monthlist = [ datetime.strftime(x, '%Y-%m') for x in list(pd.date_range(start=startdate, end=enddate, freq='M')) ] monthlist.append(nextmonth) os.chdir(rawdatapath) try: os.mkdir('ForwardResults') except: print 'ForwardResults already exist!' try: os.mkdir('ForwardRank') except: print 'ForwardRank already exist!' try: os.mkdir('ForwardOprAnalyze') except: print 'ForwardOprAnalyze already exist!' starttime = datetime.now() print starttime # 多进程优化,启动一个对应CPU核心数量的进程池 initialCash = result_para_dic['initialCash'] positionRatio = result_para_dic['positionRatio'] pool = multiprocessing.Pool(multiprocessing.cpu_count() - 1) l = [] for whiteWindows in forward_window_set: # l.append(mtf.runPara(strategyName, whiteWindows, symbolinfo, K_MIN, parasetlist, monthlist, rawdatapath, forwordresultpath, forwardrankpath, colslist, resultfilesuffix)) l.append( pool.apply_async( mtf.runPara, (strategyName, whiteWindows, symbolinfo, K_MIN, parasetlist, monthlist, rawdatapath, forwordresultpath, forwardrankpath, colslist, resultfilesuffix))) pool.close() pool.join() mtf.calGrayResult(strategyName, symbol, K_MIN, forward_window_set, forwardrankpath, rawdatapath) indexcols = Parameter.ResultIndexDic # rawdata = DC.getBarData(symbol, K_MIN, monthlist[12] + '-01 00:00:00', enddate + ' 23:59:59').reset_index(drop=True) cols = [ 'open', 'high', 'low', 'close', 'strtime', 'utc_time', 'utc_endtime' ] barxmdic = DI.getBarDicAfterDomain(symbolinfo, K_MIN, cols) mtf.calOprResult(strategyName, rawdatapath, symbolinfo, K_MIN, nextmonth, colslist, barxmdic, positionRatio, initialCash, indexcols, indexcolsFlag, resultfilesuffix) endtime = datetime.now() print starttime print endtime
import theano import gzip import cPickle as pkl # from pylab import * import numpy as np import matplotlib.pyplot as plt floatX = theano.config.floatX #==================# ## LOADING DATA ## #==================# dface = DataInterface() x_train,x_valid,y_train,y_valid = dface.get_traindata(full=True) x_test = dface.get_testdata() labels = dface.classes NUM_FEATURES = x_train.shape[1] NUM_CLASSES = len(labels) # Convert to theano types x_train = x_train.astype(dtype=floatX) x_valid = x_valid.astype(dtype=floatX) x_test = x_test.astype(dtype=floatX) y_train = y_train.astype(dtype=np.int32) y_valid = y_valid.astype(dtype=np.int32)
from DataInterface import DataInterface import random from math import floor import numpy as np src = "/home/andrei/low-dose/DATASET-256 LOW-DOSE/15_projections/" prop = [.7, .1, .2] data = DataInterface(src) scans = data.get_tomo_list() random.shuffle(scans) training = scans[0:floor(len(scans) * prop[0])] validation = scans[floor(len(scans) * prop[0]):floor(len(scans) * prop[0]) + floor(len(scans) * prop[1])] test = scans[floor(len(scans) * prop[0]) + floor(len(scans) * prop[1]):len(scans)] np.save("training", training) np.save("validation", validation) np.save("test", test)