def __save_phone_trade_list(phone_trade_list): server_host = server_constant.get_server_model('host') session_history = server_host.get_db_session('history') for phone_trade_info in phone_trade_list: phone_trade_info.update_time = date_utils.get_now() session_history.add(phone_trade_info) session_history.commit()
def download_depositplat_log_check(job_name): deposit_servers_list = server_constant.get_deposit_servers() download_day_str = date_utils.get_today_str() for server_name in deposit_servers_list: server_model = server_constant.get_server_model(server_name) ftp_server = server_model source_folder_path = server_model.ftp_download_folder source_date_folder_path = '%s/%s' % (source_folder_path, download_day_str) log_save_path = LOG_BACKUP_FOLDER_TEMPLATE % server_name check_flag = False for file_name in os.listdir(log_save_path): if 'tradeplat_log_%s' % download_day_str in file_name: source_file_path = '%s/%s' % (source_date_folder_path, file_name) check_file_path = '%s/%s' % (log_save_path, file_name) if os.path.exists(log_save_path): if os.path.exists(check_file_path): if long(ftp_server.get_size(source_file_path)) == long( os.stat(check_file_path).st_size): check_flag = True break email_utils1.send_email_group_all( '[ERROR]After Check_Job:%s' % job_name, 'Download Error.File:%s Download Fail!' % check_file_path) if not check_flag: email_utils1.send_email_group_all( '[ERROR]After Check_Job:%s' % job_name, 'Download Error.Log File Missing!')
def insert_statement_info(): csv_file = file(u"./衍盛中港精选.csv", 'rb') statement_info_list = [] reader = csv.reader(csv_file) i = 0 for line in reader: if i == 0: i += 1 continue statement_info = StatementInfo() statement_info.fund_name = 'ch_selection' statement_info.fund = u'衍盛中港精选' statement_info.account = line[0].decode("gbk") statement_info.date = line[1] statement_info.type = line[3].decode("gbk") statement_info.confirm_date = line[2] if line[2] != '' else None statement_info.net_asset_value = __format_number(line[4]) statement_info.request_money = __format_number(line[5]) statement_info.confirm_money = __format_number(line[6]) statement_info.confirm_units = __format_number(line[7]) statement_info.fee = __format_number(line[8]) + __format_number( line[9]) statement_info.performance_pay = __format_number(line[10]) statement_info_list.append(statement_info) # if ('000300' == line[0]) and (u'次日权重' == line[5].decode("gbk")): host_server_model = server_constant.get_server_model('host') session_jobs = host_server_model.get_db_session('jobs') for statement_info_db in statement_info_list: session_jobs.merge(statement_info_db) session_jobs.commit()
def restart_mktdtcenter_service(): stock_servers = server_constant.get_stock_servers() for server_name in stock_servers: server_model = server_constant.get_server_model(server_name) if server_model.type == 'trade_server': for service_name in ('MktDTCenter', 'HFCalculator'): restart_server_service(server_name, service_name)
def pkill_server_service(server_name, service_name): """ 以pkill的方式关闭单个服务 """ server_model = server_constant.get_server_model(server_name) pkill_sever_cmd = 'pkill %s' % service_name server_model.run_cmd_str(pkill_sever_cmd)
def stop_server_service(server_name, service_name): """ 以-x的方式关闭单个服务 """ server_model = server_constant.get_server_model(server_name) stop_sever_cmd = 'screen -r %s -X quit' % service_name server_model.run_cmd_str(stop_sever_cmd)
def upgrade_server_tradeplat(server_name, upgrade_file_path): upgrade_flag = False try: server_model = server_constant.get_server_model(server_name) upgrade_file_name = os.path.basename(upgrade_file_path) source_file_path = upgrade_file_path target_folder_path = '%s/bin' % server_model.server_path_dict[ 'tradeplat_project_folder'] server_model.upload_file( source_file_path, '%s/%s' % (target_folder_path, upgrade_file_name)) cmd_list = [ 'cd %s' % target_folder_path, 'tar -zxvf %s' % upgrade_file_name, 'rm %s' % upgrade_file_name, 'cd ..', 'rm build64_release', 'ln -s bin/%s/build64_release' % upgrade_file_name.split('.')[0] ] server_model.run_cmd_str(';'.join(cmd_list)) upgrade_flag = True except Exception: error_msg = traceback.format_exc() print error_msg return upgrade_flag
def change_future_main_contract(): query_params = request.json print query_params contract_change_parameter = query_params.get('contract_change_parameter') if not contract_change_parameter: return make_response(jsonify(code=200, message=u'没有换月合约', data={}), 200) contract_change_parameter_list = contract_change_parameter.split('\n') server_host = server_constant.get_server_model('host') session_common = server_host.get_db_session('common') for item in contract_change_parameter_list: if len(item) == 0: continue temp_data = item.split(',') if len(temp_data) != 6: session_common.close() err_msg = '%s:换月合约参数不正确' % item return make_response(jsonify(code=200, message=err_msg, data={}), 200) obj = FutureMainContract() obj.ticker_type = temp_data[1] obj.pre_main_symbol = temp_data[2] obj.main_symbol = temp_data[3] obj.next_main_symbol = temp_data[4] obj.exchange_id = temp_data[5] obj.update_flag = 1 session_common.merge(obj) session_common.commit() session_common.close() return make_response(jsonify(code=200, message='换月合约参数插入成功', data={}), 200)
def __init__(self, check_server='host'): self.__trading_future_list = self.__query_future_main_contract() server_model = server_constant.get_server_model(check_server) session_common = server_model.get_db_session('common') self.__instrument_df = self.__query_instrument_df(session_common) self.__error_ticker_dict = dict()
def save_intraday_parameter(): params = request.json intraday_parameter_data = params.get('params') server_host = server_constant.get_server_model('host') session_jobs = server_host.get_db_session('jobs') for item in intraday_parameter_data: obj = StrategyIntradayParameter() obj.fund_name = item['fund_name'] if obj.fund_name == '': return make_response(jsonify(code=100, message=u'fund_name不能为空'), 200) obj.parameter_value = item['parameter_value'] obj.strategy_name = item['strategy_name'] obj.parameter = item['parameter'] session_jobs.merge(obj) # for item in add_parameter_data: # add_obj = StrategyIntradayParameter() # add_obj.fund_name = item['fund_name'] # add_obj.parameter_value = item['parameter_value'] # add_obj.strategy_name = item['strategy_name'] # add_obj.parameter = item['parameter'] # session_jobs.add(add_obj) session_jobs.commit() session_jobs.close() return make_response(jsonify(code=200, message=u"保存成功"), 200)
def asset_value_history_detail(): query_params = request.json fundname = query_params.get('params') server_host = server_constant.get_server_model('host') session = server_host.get_db_session('jobs') sql = "select `unit_net`,`net_asset_value`,`product_name`,`date_str`,`sum_value`,`real_capital`,`nav_change` from \ asset_value_info where `product_name`='%s'" % fundname date_utils = DateUtils() data = [] for line in session.execute(sql): tmp_data = dict( unit_net=float(line[0]), net_asset_value=float('%.2f' % float(line[1])), product_name=line[2], date=date_utils.datetime_toString(line[3]), sum_value=float(line[4]), real_capital=float('%.2f' % float(line[5])), nav_change=float('%.4f' % float(line[6])) ) data.append(tmp_data) session.close() data = sorted(data, key=lambda data_item: data_item['date']) date_list = map(lambda data_item: data_item['date'], data) asset_value_list = map(lambda data_item: data_item['net_asset_value'], data) unit_net_list = map(lambda data_item: data_item['unit_net'], data) sum_unit_net_list = map(lambda data_item: data_item['sum_value'], data) real_capital_list = map(lambda data_item: data_item['real_capital'], data) result = {'date_list': date_list, 'asset_value_list': asset_value_list, 'unit_net_list': unit_net_list, 'sum_unit_net_list': sum_unit_net_list, 'real_capital_list': real_capital_list} return make_response(jsonify(code=200, data=result), 200)
def query_fund_info_df(query_date_str): host_server_model = server_constant.get_server_model('host') session_jobs = host_server_model.get_db_session('jobs') assetvalue_info_list = [] for x in session_jobs.query(AssetValueInfo).filter( AssetValueInfo.date_str == query_date_str): assetvalue_info_list.append(x.to_dict()) assetvalue_info_df = pd.DataFrame(assetvalue_info_list) if len(assetvalue_info_df) == 0: return [] assetvalue_info_df["unit_net"] = assetvalue_info_df["unit_net"].astype( float) assetvalue_info_df.rename(columns={'product_name': 'fund_name'}, inplace=True) fund_info_list = [] for x in session_jobs.query(FundInfo): fund_info_list.append(x.to_dict()) fund_info_df = pd.DataFrame(fund_info_list) fund_info_df.rename(columns={ 'name': 'fund_name', 'name_chinese': 'fund' }, inplace=True) fund_merge_df = pd.merge(assetvalue_info_df[['fund_name', 'unit_net']], fund_info_df[['fund_name', 'fund']], how='left', on=['fund_name']).fillna(0) return fund_merge_df
def save_statement_info(): params = request.json id = params.get('id') server_model = server_constant.get_server_model('host') session_job = server_model.get_db_session('jobs') if id: statement_info_db = session_job.query(StatementInfo).filter(StatementInfo.id == id).first() else: statement_info_db = StatementInfo() statement_info_db.date = params.get('date') statement_info_db.fund = params.get('fund') statement_info_db.account = params.get('account') statement_info_db.type = params.get('type') statement_info_db.confirm_date = params.get('confirm_date') statement_info_db.net_asset_value = params.get('net_asset_value') statement_info_db.request_money = params.get('request_money') statement_info_db.confirm_money = params.get('confirm_money') statement_info_db.confirm_units = params.get('confirm_units') statement_info_db.fee = params.get('fee') statement_info_db.performance_pay = params.get('performance_pay') session_job.merge(statement_info_db) session_job.commit() result_message = u"保存成功" return make_response(jsonify(code=200, data=result_message), 200)
def save_account_fund(): params = request.json server_host = server_constant.get_server_model('host') session_jobs = server_host.get_db_session('jobs') id = params.get('id') if id: fund_account_info_db = session_jobs.query(FundAccountInfo).filter(FundAccountInfo.id == id).first() else: fund_account_info_db = FundAccountInfo() fund_account_info_db.account_name = params.get('account_name') fund_account_info_db.product_name = params.get('product_name') fund_account_info_db.type = params.get('type') fund_account_info_db.broker = params.get('broker') fund_account_info_db.server = params.get('server') fund_account_info_db.service_charge = params.get('service_charge') fund_account_info_db.inclusion_strategy = ','.join(params.get('inclusion_strategy_list')) fund_account_info_db.hedging_limit = params.get('hedging_limit') fund_account_info_db.investor = params.get('investor') fund_account_info_db.matters_attention = params.get('matters_attention') fund_account_info_db.margin_trading = params.get('margin_trading') fund_account_info_db.copper_options = params.get('copper_options') fund_account_info_db.kechuang_plate = params.get('kechuang_plate') fund_account_info_db.describe = params.get('describe') session_jobs.merge(fund_account_info_db) session_jobs.commit() return make_response(jsonify(code=200, data=u"保存基金账户:%s成功" % params.get('account_name')), 200)
def query_fund_change(): query_params = request.json query_fund_id = query_params.get('fund_id') fund_change_list = [] server_host = server_constant.get_server_model('host') session_jobs = server_host.get_db_session('jobs') fund_info_dict = dict() for fund_info_db in session_jobs.query(FundInfo): fund_info_dict[fund_info_db.id] = fund_info_db for fund_change_info_db in session_jobs.query(FundChangeInfo): if query_fund_id and query_fund_id != fund_change_info_db.fund_id: continue fund_change_item_dict = fund_change_info_db.to_dict() fund_change_item_dict['date'] = fund_change_info_db.date.strftime('%Y-%m-%d') fund_change_item_dict['fund_name'] = fund_info_dict[fund_change_info_db.fund_id].name fund_change_list.append(fund_change_item_dict) sort_prop = query_params.get('sort_prop') sort_order = query_params.get('sort_order') if sort_prop: if sort_order == 'ascending': fund_change_list = sorted(fund_change_list, key=lambda fund_item: fund_item[sort_prop], reverse=True) else: fund_change_list = sorted(fund_change_list, key=lambda fund_item: fund_item[sort_prop]) else: fund_change_list.sort(key=lambda obj: obj['date']) query_page = int(query_params.get('page')) query_size = int(query_params.get('size')) result_list = fund_change_list[(query_page - 1) * query_size: query_page * query_size] query_result = {'data': result_list, 'total': len(fund_change_list)} return make_response(jsonify(code=200, data=query_result), 200)
def load_message_fils(self): if self.__operation_type == operation_enums.Close: folder_suffix = 'close' elif self.__operation_type == operation_enums.Add: folder_suffix = 'add' elif self.__operation_type == operation_enums.Change: folder_suffix = 'change' else: raise Exception("Error operation_type:%s" % self.__operation_type) server_model = server_constant.get_server_model(self.__server_name) file_folder = '%s/%s/%s_%s' % (STOCK_SELECTION_FOLDER, self.__server_name, self.__date_str2, folder_suffix) for file_name in os.listdir(file_folder): if not file_name.endswith('.txt'): continue (strategy_name, group_name, fund_item, sub_algo_type, peg_level, temp) = file_name.replace('.txt', '').split('-') fund_name, server_ip = fund_item.split('@') algo_strategy = '%s.%s' % (group_name, strategy_name) fund_group = '%s@%s' % ( fund_name, server_model.connect_address.replace('tcp://', '')) symbol_content_list = [] with open(os.path.join(file_folder, file_name)) as fr: for line in fr.readlines(): symbol_content_list.append(line.replace('\n', '')) self.__build_order(algo_strategy, fund_group, sub_algo_type, peg_level, symbol_content_list)
def update_strategy_online_check(): server_list = server_constant.get_cta_servers() root_path = const.EOD_CONFIG_DICT['source_backtest_info_path'] folder_list = [('backtest_info_str', '.csv', []), ('backtest_parameter_str', '.txt', []), ('server_parameter', '.txt', server_list)] th_list = [] path_list = [] for (folder_name, file_type, children_path_list) in folder_list: if len(children_path_list) == 0: tmp_dic = {'label': folder_name, 'prop': '', 'children': []} path = '%s/%s' % (root_path, folder_name) path_list.append(path) else: tmp_dic = {'label': folder_name, 'prop': '', 'children': []} for children_path_name in children_path_list: prop_str = '%s_%s' % (folder_name, children_path_name) children_dict = { 'label': children_path_name, 'prop': prop_str, 'children': [] } children_path = '%s/%s/%s' % (root_path, folder_name, children_path_name) path_list.append(children_path) tmp_dic['children'].append(children_dict) th_list.append(tmp_dic) td_data = [] server_host = server_constant.get_server_model('host') session_strategy = server_host.get_db_session('strategy') for strategy_online_db in session_strategy.query(StrategyOnline).filter( StrategyOnline.enable == 1, StrategyOnline.strategy_type == 'CTA'): strategy_name = strategy_online_db.name tmp_data_dic = {'strategy_name': strategy_name} for (folder_name, file_type, children_path_list) in folder_list: file_name = strategy_name + file_type if len(children_path_list) == 0: file_path = '%s%s/%s' % (root_path, folder_name, file_name) tmp_data_dic_key = folder_name if os.path.exists(file_path): tmp_data_dic[tmp_data_dic_key] = True else: tmp_data_dic[tmp_data_dic_key] = False else: for children_path_name in children_path_list: file_path = '%s%s/%s/%s' % (root_path, folder_name, children_path_name, file_name) tmp_data_dic_key = '%s_%s' % (folder_name, children_path_name) if os.path.exists(file_path): tmp_data_dic[tmp_data_dic_key] = True else: tmp_data_dic[tmp_data_dic_key] = False td_data.append(tmp_data_dic) result = {'th_list': th_list, 'td_data': td_data} return make_response(jsonify(code=200, data=result), 200)
def query_future_main_contract(): query_params = request.json ticker_type = query_params.get('ticker_type') sort_prop = query_params.get('sort_prop') sort_order = query_params.get('sort_order') server_host = server_constant.get_server_model('host') session_common = server_host.get_db_session('common') data = [] for obj in session_common.query(FutureMainContract): # print obj.update_flag, bool(obj.update_flag) data.append( dict( ticker_type=obj.ticker_type, exchange_id=obj.exchange_id, pre_main_symbol=obj.pre_main_symbol, main_symbol=obj.main_symbol, next_main_symbol=obj.next_main_symbol, night_flag=bool(int(obj.night_flag)), update_flag=bool(int(obj.update_flag)), )) session_common.close() data = sorted(data, key=lambda item: item['update_flag'], reverse=True) if sort_prop: if sort_order == 'ascending': data = sorted(data, key=lambda data_item: data_item[sort_prop], reverse=True) else: data = sorted(data, key=lambda data_item: data_item[sort_prop]) if ticker_type: data = filter(lambda item: item['ticker_type'] == ticker_type, data) result = {'data': data} return make_response(jsonify(code=200, message=u'参数修改成功', data=result), 200)
def update_index_price(): try: filter_date_str = date_utils.get_today_str('%Y-%m-%d') with StockWindUtils() as stock_wind_utils: ticker_type_list = [ const.INSTRUMENT_TYPE_ENUMS.Index, ] index_ticker_list = stock_wind_utils.get_ticker_list( ticker_type_list) prev_close_dict = stock_wind_utils.get_prev_close_dict( filter_date_str, index_ticker_list) all_local_server_list = server_constant.get_all_local_servers() for server_name in all_local_server_list: server_model = server_constant.get_server_model(server_name) session_common = server_model.get_db_session('common') for index_db in session_common.query(Instrument).filter( Instrument.type_id == const.INSTRUMENT_TYPE_ENUMS.Index): if index_db.ticker not in prev_close_dict: print 'Error ticker:' % index_db.ticker continue if str(prev_close_dict[index_db.ticker]) == 'nan': continue index_db.prev_close = prev_close_dict[index_db.ticker] session_common.merge(index_db) session_common.commit() except Exception: error_msg = traceback.format_exc() custom_log.log_info_task(error_msg) email_utils.send_email_group_all('[Error]update_index_price Fail!', error_msg) return make_response(jsonify(code=100, data=u'执行失败'), 200) return make_response(jsonify(code=200, data=u'指数行情更新成功'), 200)
def rerun_stkintraday_jobs(): try: params = request.json server_name = params.get('server_name') download_sql_flag = params.get('download_sql_flag') deeplearning_run_flag = params.get('deeplearning_run_flag') server_model = server_constant.get_server_model(server_name) if download_sql_flag == 'Yes' and server_model.type == 'deposit_server': sql_library_list = ['common', 'portfolio'] update_deposit_server_db_job((server_name, ), sql_library_list) if deeplearning_run_flag == 'Yes': stock_deeplearning_init_job(server_name) index_deeplearning_init_job(server_name) email_list1, email_list2 = [], [] strategy_multifactor_init_job(server_name, email_list1, email_list2) if len(email_list1) > 0: email_title = '[Warning]Algo File Build Report' email_utils.send_email_group_all(email_title, ''.join(email_list1), 'html') if server_model.type == 'deposit_server': upload_deposit_server_job((server_name, )) except Exception: error_msg = traceback.format_exc() custom_log.log_info_task(error_msg) email_utils.send_email_group_all('[Error]rerun_stkintraday_jobs Fail!', error_msg) return make_response(jsonify(code=100, data=u'执行失败'), 200) return make_response(jsonify(code=200, data=u'执行成功'), 200)
def start_service_check(job_name): if job_name == 'start_server_am': server_list = server_constant.get_trade_servers() elif job_name == 'start_server_pm': server_list = server_constant.get_night_session_servers() else: email_utils2.send_email_group_all( '[ERROR]After Check_Job:%s' % job_name, 'Undefined Job Name:%s' % job_name) return server_service_dict = dict() server_host = server_constant.get_server_model('host') session_common = server_host.get_db_session('common') query = session_common.query(AppInfo) for sever_info_db in query: server_service_dict.setdefault(sever_info_db.server_name, []).append(sever_info_db.app_name) server_host.close() error_message_list = [] for server_name in server_list: service_status_dict = __query_server_service_status( server_name, server_service_dict[server_name]) for service_name, detached_flag in service_status_dict.items(): if not detached_flag: error_message_list.append('Server:%s Service:%s Is Inactive' % (server_name, service_name)) if error_message_list: email_utils2.send_email_group_all( '[ERROR]After Check_Job:%s' % job_name, '\n'.join(error_message_list))
def __build_instrument_dict(): server_model = server_constant.get_server_model('host') session_common = server_model.get_db_session('common') query = session_common.query(Instrument) for instrument_db in query.filter(Instrument.type_id.in_((4, 6))): instrument_minbar_info = Instrument_Minbar_Info(instrument_db.ticker, instrument_db.prev_close) instrument_dict[instrument_db.ticker] = instrument_minbar_info
def __get_instrument_dict(): instrument_dict = dict() server_model = server_constant.get_server_model(server_name) session_common = server_model.get_db_session('common') query = session_common.query(Instrument) for instrument_db in query.filter(Instrument.type_id == 4): instrument_dict[instrument_db.ticker] = instrument_db return instrument_dict
def __enter__(self): self.w = w_ys() server_model = server_constant.get_server_model('host') session_common = server_model.get_db_session('common') query = session_common.query(Instrument) for instrument_db in query: self.instrument_dict[instrument_db.ticker] = instrument_db return self
def server_service_rum_cmd(server_name, service_name, cmd_str): """ 单个服务执行命令接口 """ server_model = server_constant.get_server_model(server_name) tmp_cmd_list = ['cd %s' % server_model.server_path_dict['server_python_folder'], '/home/trader/anaconda2/bin/python screen_tools.py -s %s -c "%s"' % (service_name, cmd_str)] server_model.run_cmd_str(';'.join(tmp_cmd_list))
def query_strategy(): query_params = request.json query_name = query_params.get('name') query_instance_name = query_params.get('instance_name') query_enable = query_params.get('enable') strategy_online_list = [] server_host = server_constant.get_server_model('host') session_strategy = server_host.get_db_session('strategy') strategy_grouping_dict = dict() for strategy_grouping_db in session_strategy.query(StrategyGrouping): strategy_grouping_dict[ strategy_grouping_db.strategy_name] = strategy_grouping_db for strategy_online_db in session_strategy.query(StrategyOnline): if query_name and query_name.lower( ) not in strategy_online_db.name.lower(): continue if query_instance_name and query_instance_name not in strategy_online_db.instance_name: continue if query_enable != '' and query_enable != strategy_online_db.enable: continue target_server_list = [] cta_server_list = server_constant.get_cta_servers() if strategy_online_db.target_server is not None and strategy_online_db.target_server != '': target_server_list = strategy_online_db.target_server.split('|') target_server_list = filter(lambda x: x in cta_server_list, target_server_list) temp_item_dict = strategy_online_db.to_dict() temp_item_dict['target_server_list'] = target_server_list temp_item_dict['grouping_sub_name'] = strategy_grouping_dict[ strategy_online_db.strategy_name].sub_name strategy_online_list.append(temp_item_dict) sort_prop = query_params.get('sort_prop') sort_order = query_params.get('sort_order') if sort_prop: if sort_order == 'ascending': strategy_online_list = sorted( strategy_online_list, key=lambda market_item: market_item[sort_prop], reverse=True) else: strategy_online_list = sorted( strategy_online_list, key=lambda market_item: market_item[sort_prop]) else: strategy_online_list.sort(key=lambda obj: obj['name']) query_page = int(query_params.get('page')) query_size = int(query_params.get('size')) result_list = strategy_online_list[(query_page - 1) * query_size:query_page * query_size] query_result = {'data': result_list, 'total': len(strategy_online_list)} return make_response(jsonify(code=200, data=query_result), 200)
def __query_order_df(self): server_model = server_constant.get_server_model('local118') session_aggregation = server_model.get_db_session('aggregation') query_sql = "select * from aggregation.order where CREATE_TIME >= '%s' and CREATE_TIME <= '%s'" % \ ('%s 09:00:00' % self.__start_date, '%s 09:30:00' % self.__start_date) data_list = [] for x in session_aggregation.execute(query_sql): item_list = list(x) ticker = item_list[5].split(' ')[0] if not ticker.isdigit(): continue if int(item_list[21]) not in include_algo_types: continue item_list[21] = algo_type_dict[int(item_list[21])] ticker_wind = '%s.SH' % ticker if ticker[ 0] == '6' else '%s.SZ' % ticker item_list.append(ticker_wind) data_list.append(item_list) columns = [ 'id', 'server_name', 'sys_id', 'account', 'hedge_flag', 'symbol', 'direction', 'type', 'trade_type', 'status', 'op_status', 'property', 'create_time', 'transaction_time', 'user_id', 'strategy_id', 'parent_ord_id', 'qty', 'price', 'ex_qty', 'ex_price', 'algo_type', 'ticker_wind' ] order_df = pd.DataFrame(data_list, columns=columns) order_df = order_df[[ 'create_time', 'symbol', 'direction', 'ex_qty', 'ex_price', 'strategy_id', 'server_name', 'account', 'algo_type', 'ticker_wind' ]] order_df['cashflow'] = order_df['ex_qty'] * order_df['ex_price'] * ( -1) * order_df['direction'] order_df['feerate'] = np.nan b1 = order_df['server_name'] == 'guosen' b2 = order_df['server_name'] == 'huabao' b3 = order_df['direction'] == 1 b4 = order_df['direction'] == -1 b5 = order_df['symbol'] > 399999 b6 = order_df['symbol'] <= 399999 order_df.loc[b1 & b3 & b5, 'feerate'] = Decimal(0.0001811) order_df.loc[b1 & b3 & b6, 'feerate'] = Decimal(0.0001556) order_df.loc[b1 & b4 & b5, 'feerate'] = Decimal(0.0011811) order_df.loc[b1 & b4 & b6, 'feerate'] = Decimal(0.0011556) order_df.loc[b2 & b3, 'feerate'] = Decimal(0.000252) order_df.loc[b2 & b4, 'feerate'] = Decimal(0.001252) order_df['direction'] = order_df['direction'].apply( lambda x: direct(x)) order_df['amt'] = order_df['ex_qty'] * order_df['ex_price'] order_df['cost'] = order_df['amt'] * order_df['feerate'] order_df['netcash'] = order_df['cashflow'] - order_df['cost'] self.__order_df = order_df
def start_service_omaproxy(server_name): """ 启动服务omaproxy """ server_model = server_constant.get_server_model(server_name) start_cmd_list = ['cd %s' % server_model.server_path_dict['omaproxy_project_folder'], './start.omaproxy.sh' ] server_model.run_cmd_str(';'.join(start_cmd_list))
def __query_future_main_contract(self): server_model = server_constant.get_server_model('host') session_common = server_model.get_db_session('common') query = session_common.query(FutureMainContract) future_list = [] for x in query: future_list.extend( [x.pre_main_symbol, x.main_symbol, x.next_main_symbol]) return list(set(future_list))
def __query_avg_price_dict(self): server_model = server_constant.get_server_model('wind_db') session_dump_wind = server_model.get_db_session('dump_wind') query_sql = "select S_INFO_WINDCODE, S_DQ_AVGPRICE from ASHAREEODPRICES where TRADE_DT = '%s'" % \ self.__start_date2 self.__avg_price_dict = { x[0]: x[1] for x in session_dump_wind.execute(query_sql) }