def main(): parser = build_parser() options = parser.parse_args() if not os.path.exists("./" + "train_package"): os.makedirs("./" + "train_package") if not os.path.exists("./" + "database"): os.makedirs("./" + "database") if options.mode == "train": import pgportfolio.autotrain.training pgportfolio.autotrain.training.train_all(int(options.processes), options.device, options.initial_asset) elif options.mode == "generate": import pgportfolio.autotrain.generate as generate logging.basicConfig(level=logging.INFO) generate.add_packages(load_config(), int(options.repeat)) elif options.mode == "backtest": config = _config_by_algo(options.algo) _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo, "backtestlog") execute_backtest(options.algo, config) elif options.mode == "save_test_data": # This is used to export the test data save_test_data(load_config(options.folder)) elif options.mode == "plot": logging.basicConfig(level=logging.INFO) algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_"," ") labels = labels.split(",") else: labels = algos plot.plot_backtest(load_config(), algos, labels) elif options.mode == "table": algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_"," ") labels = labels.split(",") else: labels = algos plot.table_backtest(load_config(), algos, labels, format=options.format)
def main(): parser = build_parser() options = parser.parse_args() if not os.path.exists("./" + "train_package"): os.makedirs("./" + "train_package") if not os.path.exists("./" + "database"): os.makedirs("./" + "database") if options.mode == "train": import pgportfolio.autotrain.training if not options.algo: pgportfolio.autotrain.training.train_all(int(options.processes), options.device) else: for folder in options.train_floder: raise NotImplementedError() elif options.mode == "generate": import pgportfolio.autotrain.generate as generate logging.basicConfig(level=logging.INFO) generate.add_packages(load_config(), int(options.repeat)) elif options.mode == "download_data": from pgportfolio.marketdata.datamatrices import DataMatrices with open("./pgportfolio/net_config.json") as file: config = json.load(file) config = preprocess_config(config) start = time.mktime(datetime.strptime(config["input"]["start_date"], "%Y/%m/%d").timetuple()) end = time.mktime(datetime.strptime(config["input"]["end_date"], "%Y/%m/%d").timetuple()) DataMatrices(start=start, end=end, feature_number=config["input"]["feature_number"], window_size=config["input"]["window_size"], online=True, period=config["input"]["global_period"], volume_average_days=config["input"]["volume_average_days"], coin_filter=config["input"]["coin_number"], is_permed=config["input"]["is_permed"], test_portion=config["input"]["test_portion"], portion_reversed=config["input"]["portion_reversed"]) elif options.mode == "backtest": config = _config_by_algo(options.algo) _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo, "backtestlog") execute_backtest(options.algo, config) elif options.mode == "save_test_data": # This is used to export the test data save_test_data(load_config(options.folder)) elif options.mode == "plot": logging.basicConfig(level=logging.INFO) algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_"," ") labels = labels.split(",") else: labels = algos plot.plot_backtest(load_config(), algos, labels) elif options.mode == "table": algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_"," ") labels = labels.split(",") else: labels = algos plot.table_backtest(load_config(), algos, labels, format=options.format)
def main(): parser = build_parser() options = parser.parse_args() if not os.path.exists("./" + "train_package"): os.makedirs("./" + "train_package") if not os.path.exists("./" + "database"): os.makedirs("./" + "database") if options.mode == "train": import pgportfolio.autotrain.training if not options.algo: pgportfolio.autotrain.training.train_all(int(options.processes), options.device) else: for folder in options.folder: raise NotImplementedError() elif options.mode == "generate": import pgportfolio.autotrain.generate as generate logging.basicConfig(level=logging.INFO) generate.add_packages(load_config(), int(options.repeat)) elif options.mode == "download_data": from pgportfolio.marketdata.datamatrices import DataMatrices with open("./pgportfolio/net_config.json") as file: config = json.load(file) config = preprocess_config(config) start = time.mktime(datetime.strptime(config["input"]["start_date"], "%Y/%m/%d").timetuple()) end = time.mktime(datetime.strptime(config["input"]["end_date"], "%Y/%m/%d").timetuple()) DataMatrices(start=start, end=end, feature_number=config["input"]["feature_number"], window_size=config["input"]["window_size"], online=True, period=config["input"]["global_period"], volume_average_days=config["input"]["volume_average_days"], coin_filter=config["input"]["coin_number"], is_permed=config["input"]["is_permed"], test_portion=config["input"]["test_portion"], portion_reversed=config["input"]["portion_reversed"]) elif options.mode == "backtest": config = _config_by_algo(options.algo) _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo, "backtestlog") execute_backtest(options.algo, config) elif options.mode == "save_test_data": # This is used to export the test data save_test_data(load_config(options.folder)) elif options.mode == "plot": logging.basicConfig(level=logging.INFO) algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_"," ") labels = labels.split(",") else: labels = algos plot.plot_backtest(load_config(), algos, labels) elif options.mode == "table": algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_"," ") labels = labels.split(",") else: labels = algos plot.table_backtest(load_config(), algos, labels, format=options.format)
def main(logPath, device): parser = build_parser() options = parser.parse_args() ''' options.folder = oneKey options.stockList = stockList options.featureList = featureList options.start_date = startDate options.end_date = endDate ''' if not os.path.exists("./" + "database"): os.makedirs("./" + "database") #options.repeat = 1 if options.mode == "train": #训练数据 if not options.algo: save_path = logPath + str(options.folder) + "/netfile" # 读取配置文件 with open(logPath + str(options.folder) + "\\net_config.json") as file: config_json = None config_json = json.load(file) config = preprocess_config(config_json) log_file_dir = logPath + str(options.folder) + "/tensorboard" # 定义错误等级 logfile_level = logging.DEBUG console_level = logging.INFO logging.basicConfig(filename=log_file_dir.replace( "tensorboard", "programlog"), level=logfile_level) console = logging.StreamHandler() console.setLevel(console_level) logging.getLogger().addHandler(console) trainer = TraderTrainer(config, options.stockList, options.featureList, options.start_date, options.end_date, save_path=save_path, device=device) #初始化训练器 trainer.train_net(log_file_dir=log_file_dir, index=str(options.folder)) #训练网络 else: for folder in options.folder: raise NotImplementedError() # 生成配置文件到路径中,要想修改配置,直接修改PGPortfolio\pgportfolio\net_config.json elif options.mode == "generate": import pgportfolio.autotrain.generate as generate logging.basicConfig(level=logging.INFO) config_ = load_config() train_dir = logPath generate.add_packages(train_dir, load_config(), int(options.repeat)) elif options.mode == "download_data": from pgportfolio.marketdata.datamatrices import DataMatrices with open("./pgportfolio/net_config.json") as file: config = json.load(file) config = preprocess_config(config) start = time.mktime( datetime.strptime(options.start_date, "%Y/%m/%d").timetuple()) end = time.mktime( datetime.strptime(options.end_date, "%Y/%m/%d").timetuple()) DataMatrices( start=start, end=end, feature_number=len(options.featureList), window_size=config["input"]["window_size"], online=True, period=config["input"]["global_period"], volume_average_days=config["input"]["volume_average_days"], coin_filter=len(options["stockList"]), is_permed=config["input"]["is_permed"], test_portion=config["input"]["test_portion"], portion_reversed=config["input"]["portion_reversed"]) elif options.mode == "backtest": config = _config_by_algo(options.algo) #读取配置文件 _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo, "backtestlog") #设置log的路径 values = execute_backtest(options.algo, config) #执行回测的步数为训练集的长度 elif options.mode == "save_test_data": # This is used to export the test data save_test_data(load_config(options.folder)) #保存测试集数据 elif options.mode == "plot": logging.basicConfig(level=logging.INFO) algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_", " ") labels = labels.split(",") else: labels = algos plot.plot_backtest(load_config(), algos, labels) elif options.mode == "table": algos = options.algos.split(",") if options.labels: labels = options.labels.replace("_", " ") labels = labels.split(",") else: labels = algos plot.table_backtest(load_config(), algos, labels, format=options.format)