def main(): base_dir, config_names, config_files, add_ons = parse_conf_args(__file__, config_names=["xmq", "redis", "mysql", "exchange"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) start_md_calc(context=context, conf=conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args(__file__, config_names=["exchange", "xmq"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files, add_ons=add_ons) fifth_level(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["hosts", "mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) process_assert(sync_all_prepare_data(context, conf))
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["exchange", "xmq"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) makemarket_order(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["exchange", "xmq"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) start_md_service(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["hosts"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) archive.tar_archive(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) start_http_server(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) process_assert(publish_etf(context, conf))
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["hosts"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) rsync.rsync_groups(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) gen_robots(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["xmq"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) pubsub.PSServer.start_server(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) process_assert(settle_stock_userpwd(context, conf))
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) print(get_current_tradingday(context, conf))
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["hosts"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files, add_ons=add_ons) service_shell.execute_command(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["hosts", "mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) # process_assert(sync_dump_csvs(context, conf)) result = sync_dump_csvs(context, conf) # 失败处理,超时等待 while result == -1: logger = log.get_logger(category="SyncDumpCsvs") logger.error("获取文件失败,%s分钟后重新获取" % str(conf.get("time_await"))) await = float(conf.get("time_await")) * 60 time.sleep(await) result = sync_dump_csvs(context, conf)
else: writer = csv.writer(csvfile) writer.writerow(csv_tool.covert_to_gbk(columns['columns'])) writer.writerows(csv_tool.covert_to_gbk(csv_data)) self.logger.info("%s%s%s" % ("生成 ", table_name, ".csv 文件完成")) # 生成txt文件 def __produce_txt(self, file_name, txt_data): self.logger.info("%s%s%s" % ("开始生成", file_name, ".txt")) _csv_path = "%s%s%s" % (str(self.csv_path), os.path.sep, "md") _path = "%s%s%s%s%s%s" % (str( self.csv_path), os.path.sep, "md", os.path.sep, file_name, '.txt') # 如果不存在目录则先创建 if not os.path.exists(_csv_path): os.makedirs(_csv_path) with open(_path, 'wb') as txt_file: for ins in txt_data: ins = str(ins[0]) txt_file.write(ins + '\n') self.logger.info("%s%s%s" % ("生成 ", file_name, ".txt 文件完成")) if __name__ == '__main__': base_dir, config_names, config_files, add_ons = parse_conf_args( __file__, config_names=["mysql", "log", "csv"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) # 启动脚本 exchange_stock_csv(context=context, configs=conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args(__file__, config_names=["hosts"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) process_assert(sync_stock_broker_csvs(context, conf))
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args(__file__, config_names=["mysql", "oracle"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) sync_rankable_activity_investors(context, conf)
def main(): base_dir, config_names, config_files, add_ons = parse_conf_args(__file__, config_names=["hosts:hosts", "mysql"]) context, conf = Configuration.load(base_dir=base_dir, config_names=config_names, config_files=config_files) process_assert(prepare_settle_future(context, conf))
async def reload_config(self, ctx): """ Reload configuration from disk """ Configuration.load() await ctx.send("Config file reloaded")