Esempio n. 1
0
def main():
    parser = build_parser()
    options = parser.parse_args()
    if not os.path.exists('./train_package'):
        os.makedirs('./train_package')
    if not os.path.exists('./database'):
        os.makedirs('./database')

    if options.mode == 'train':
        import pgportfolio.autotrain.training
        if not options.algo:
            pgportfolio.autotrain.training.train_all(int(options.processes), options.device)
        else:
            raise NotImplementedError()
    elif options.mode == 'generate':
        import pgportfolio.autotrain.generate as generate
        logging.basicConfig(level=logging.INFO)
        generate.add_packages(load_config(), int(options.repeat))
    elif options.mode == 'download_data':
        from pgportfolio.marketdata.datamatrices import DataMatrices
        with open('./pgportfolio/net_config.json') as file:
            config =json.load(file)
        config = preprocess_config(file)
        # コンフィグのインプットから開始日、終了日を取得
        start = time.mktime(datetime.strptime(config["input"]["start_date"], "%Y/%m/%d").timetuple())
        end = time.mktime(datetime.strptime(config["input"]["end_date"], "%Y/%m/%d").timetuple())
Esempio n. 2
0
    def decide_by_history(self, x, last_b):
        if self.last_b is None:
            from pgportfolio.tools.trade import get_test_data
            from pgportfolio.tools.configprocess import preprocess_config
            import json
            with open("pgportfolio/net_config.json") as file:
                config = json.load(file)
            config = preprocess_config(config)
            data = self.test_relative
            self.last_b = self.get_weight(data.T)

        return self.last_b
Esempio n. 3
0
    def decide_by_history(self, x, last_b):
        if self.last_b is None:
            from pgportfolio.tools.trade import get_test_data
            from pgportfolio.tools.configprocess import preprocess_config
            import json
            with open("pgportfolio/net_config.json") as file:
                config = json.load(file)
            config = preprocess_config(config)
            data = get_test_data(config)
            self.last_b = self.get_weight(data.T)

        return self.last_b
Esempio n. 4
0
 def decide_by_history(self, data, last_b=None):
     if self.last_b is None:
         from pgportfolio.tools.trade import get_test_data
         from pgportfolio.tools.configprocess import preprocess_config
         import json
         with open("pgportfolio/net_config.json") as file:
             config = json.load(file)
         config = preprocess_config(config)
         data = get_test_data(config)
         data = data.T
         n, m = data.shape
         tmp_cumprod_ret = np.cumprod(data, axis=0)
         best_ind = np.argmax(tmp_cumprod_ret[-1,:])
         self.last_b = np.zeros(m)
         self.last_b[best_ind] = 1
     return self.last_b.ravel()
Esempio n. 5
0
def main():
    parser = build_parser()
    options = parser.parse_args()
    if not os.path.exists("./" + "train_package"):
        os.makedirs("./" + "train_package")
    if not os.path.exists("./" + "database"):
        os.makedirs("./" + "database")

    if options.mode == "train":
        import pgportfolio.autotrain.training
        if not options.algo:
            pgportfolio.autotrain.training.train_all(int(options.processes), options.device)
        else:
            for folder in options.train_floder:
                raise NotImplementedError()
    elif options.mode == "generate":
        import pgportfolio.autotrain.generate as generate
        logging.basicConfig(level=logging.INFO)
        generate.add_packages(load_config(), int(options.repeat))
    elif options.mode == "download_data":
        from pgportfolio.marketdata.datamatrices import DataMatrices
        with open("./pgportfolio/net_config.json") as file:
            config = json.load(file)
        config = preprocess_config(config)
        start = time.mktime(datetime.strptime(config["input"]["start_date"], "%Y/%m/%d").timetuple())
        end = time.mktime(datetime.strptime(config["input"]["end_date"], "%Y/%m/%d").timetuple())
        DataMatrices(start=start,
                     end=end,
                     feature_number=config["input"]["feature_number"],
                     window_size=config["input"]["window_size"],
                     online=True,
                     period=config["input"]["global_period"],
                     volume_average_days=config["input"]["volume_average_days"],
                     coin_filter=config["input"]["coin_number"],
                     is_permed=config["input"]["is_permed"],
                     test_portion=config["input"]["test_portion"],
                     portion_reversed=config["input"]["portion_reversed"])
    elif options.mode == "backtest":
        config = _config_by_algo(options.algo)
        _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo, "backtestlog")
        execute_backtest(options.algo, config)
    elif options.mode == "save_test_data":
        # This is used to export the test data
        save_test_data(load_config(options.folder))
    elif options.mode == "plot":
        logging.basicConfig(level=logging.INFO)
        algos = options.algos.split(",")
        if options.labels:
            labels = options.labels.replace("_"," ")
            labels = labels.split(",")
        else:
            labels = algos
        plot.plot_backtest(load_config(), algos, labels)
    elif options.mode == "table":
        algos = options.algos.split(",")
        if options.labels:
            labels = options.labels.replace("_"," ")
            labels = labels.split(",")
        else:
            labels = algos
        plot.table_backtest(load_config(), algos, labels, format=options.format)
Esempio n. 6
0
    import pgportfolio.autotrain.training
    if not options_algo:
        pgportfolio.autotrain.training.train_all(int(options_processes),
                                                 options_device)
    else:
        for folder in options_folder:
            raise NotImplementedError()
elif execute_mode == "generate":
    import pgportfolio.autotrain.generate as generate
    logging.basicConfig(level=logging.INFO)
    generate.add_packages(load_config(), int(options_repeat))
elif execute_mode == "download_data":
    from pgportfolio.marketdata.datamatrices import DataMatrices
    with open("./pgportfolio/net_config.json") as file:
        config = json.load(file)
    config = preprocess_config(config)
    start = time.mktime(
        datetime.strptime(config["input"]["start_date"],
                          "%Y/%m/%d").timetuple())
    end = time.mktime(
        datetime.strptime(config["input"]["end_date"], "%Y/%m/%d").timetuple())
    DataMatrices(start=start,
                 end=end,
                 feature_number=config["input"]["feature_number"],
                 window_size=config["input"]["window_size"],
                 online=True,
                 period=config["input"]["global_period"],
                 volume_average_days=config["input"]["volume_average_days"],
                 coin_filter=config["input"]["coin_number"],
                 is_permed=config["input"]["is_permed"],
                 test_portion=config["input"]["test_portion"],
Esempio n. 7
0
def main():
    parser = build_parser()
    options = parser.parse_args()
    if not os.path.exists("./" + "train_package"):
        os.makedirs("./" + "train_package")
    if not os.path.exists("./" + "database"):
        os.makedirs("./" + "database")

    if options.mode == "train":
        import pgportfolio.autotrain.training
        if not options.algo:
            pgportfolio.autotrain.training.train_all(int(options.processes), options.device)
        else:
            for folder in options.folder:
                raise NotImplementedError()
    elif options.mode == "generate":
        import pgportfolio.autotrain.generate as generate
        logging.basicConfig(level=logging.INFO)
        generate.add_packages(load_config(), int(options.repeat))
    elif options.mode == "download_data":
        from pgportfolio.marketdata.datamatrices import DataMatrices
        with open("./pgportfolio/net_config.json") as file:
            config = json.load(file)
        config = preprocess_config(config)
        start = time.mktime(datetime.strptime(config["input"]["start_date"], "%Y/%m/%d").timetuple())
        end = time.mktime(datetime.strptime(config["input"]["end_date"], "%Y/%m/%d").timetuple())
        DataMatrices(start=start,
                     end=end,
                     feature_number=config["input"]["feature_number"],
                     window_size=config["input"]["window_size"],
                     online=True,
                     period=config["input"]["global_period"],
                     volume_average_days=config["input"]["volume_average_days"],
                     coin_filter=config["input"]["coin_number"],
                     is_permed=config["input"]["is_permed"],
                     test_portion=config["input"]["test_portion"],
                     portion_reversed=config["input"]["portion_reversed"])
    elif options.mode == "backtest":
        config = _config_by_algo(options.algo)
        _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo, "backtestlog")
        execute_backtest(options.algo, config)
    elif options.mode == "save_test_data":
        # This is used to export the test data
        save_test_data(load_config(options.folder))
    elif options.mode == "plot":
        logging.basicConfig(level=logging.INFO)
        algos = options.algos.split(",")
        if options.labels:
            labels = options.labels.replace("_"," ")
            labels = labels.split(",")
        else:
            labels = algos
        plot.plot_backtest(load_config(), algos, labels)
    elif options.mode == "table":
        algos = options.algos.split(",")
        if options.labels:
            labels = options.labels.replace("_"," ")
            labels = labels.split(",")
        else:
            labels = algos
        plot.table_backtest(load_config(), algos, labels, format=options.format)
Esempio n. 8
0
    def __init__(self,
                 start,
                 end,
                 period,
                 market,
                 feature_number,
                 features_list,
                 batch_size=50,
                 volume_average_days=30,
                 buffer_bias_ratio=0,
                 coin_filter=1,
                 window_size=50,
                 test_portion=0.15,
                 portion_reversed=False,
                 online=False,
                 is_permed=False):
        """
        :param start: Unix time
        :param end: Unix time
        :param access_period: the data access period of the input matrix.
        :param trade_period: the trading period of the agent.
        :param global_period: the data access period of the global price matrix.
                              if it is not equal to the access period, there will be inserted observations
        :param coin_filter: number of coins that would be selected
        :param window_size: periods of input data
        :param train_portion: portion of training set
        :param is_permed: if False, the sample inside a mini-batch is in order
        :param validation_portion: portion of cross-validation set
        :param test_portion: portion of test set
        :param portion_reversed: if False, the order to sets are [train, validation, test]
        else the order is [test, validation, train]
        """

        # access json file and retrieve any relevant information the user wants to include
        with open("./pgportfolio/net_config.json") as file:
            config = json.load(file)
        config = preprocess_config(config)
        market = config["input"]["market_type"]
        start = int(start)
        self.__end = int(end)
        # assert window_size >= MIN_NUM_PERIOD
        self.__coin_no = coin_filter
        self.__features = features_list
        self.feature_number = feature_number
        volume_forward = get_volume_forward(self.__end - start, test_portion,
                                            portion_reversed)
        if market == "poloniex":
            self.__history_manager = gdm.HistoryManager(
                coin_number=coin_filter,
                end=self.__end,
                volume_average_days=volume_average_days,
                volume_forward=volume_forward,
                online=online)
            self.__global_data = self.__history_manager.get_global_dataframe(
                start, self.__end, period=period, features=features_list)
        elif market == "yahoo":
            with open("./pgportfolio/net_config.json") as file:
                config = json.load(file)
            config = preprocess_config(config)
            stock_data = config["input"]["stocks"]
            self.__history_manager = sgdm.StockHistoryManager(
                coin_number=coin_filter,
                end=self.__end,
                stocks=stock_data,
                volume_average_days=volume_average_days,
                volume_forward=volume_forward,
                online=online)

            self.__global_data = self.__history_manager.get_global_dataframe(
                start, self.__end, features_list, stock_data)
        elif market == "alphaVantage":
            with open("./pgportfolio/net_config.json") as file:
                config = json.load(file)
            config = preprocess_config(config)
            stock_data = config["input"][
                "stocks"]  # contains user defined securities
            api_key = config["input"]["api_key"]  # user Alpha Vantage API Key
            api_call_limit = config["input"][
                "api_call_limit"]  # user Alpha Vantage call limit
            api_interval = config["input"][
                "api_interval"]  # time interval for the data
            # initiatize stock history manager class
            self.__history_manager = avgdm.AlphaVantageHistoryManager(
                coin_number=coin_filter,
                end=self.__end,
                online=online,
                stocks=stock_data,
                api_key=api_key,
                api_call_limit=api_call_limit,
                api_interval=api_interval,
                volume_average_days=volume_average_days,
                volume_forward=volume_forward)
            # return a dataframe of all securities data and corresponding tech. ind.
            self.__global_data = self.__history_manager.get_global_dataframe(
                start, self.__end, online, features_list, stock_data, api_key,
                api_call_limit, api_interval)
            # fill_dates_alphaVantage()
        else:
            raise ValueError("market {} is not valid".format(market))
            #Go from [coins*features, index] to [features, coins, index]

        self.raw = self.__global_data.values.reshape(
            len(self.__global_data.index),
            len(self.__global_data.columns.levels[0]),
            len(self.__global_data.columns.levels[1]),
        )
        self.raw = self.raw.transpose(2, 1, 0)
        self.__period_length = period
        # portfolio vector memory, [time, assets]
        self.__PVM = pd.DataFrame(
            index=self.__global_data.index,  #time index
            columns=self.__global_data.columns.levels[0]
        )  #first level is coin names
        self.__PVM = self.__PVM.fillna(1.0 / self.__coin_no)

        print(self.__PVM)
        self._window_size = window_size
        self._num_periods = len(self.__global_data.index)
        self.__divide_data(test_portion, portion_reversed)

        self._portion_reversed = portion_reversed
        self.__is_permed = is_permed

        self.__batch_size = batch_size
        self.__delta = 0  # the count of global increased
        end_index = self._train_ind[-1]
        self.__replay_buffer = rb.ReplayBuffer(start_index=self._train_ind[0],
                                               end_index=end_index,
                                               sample_bias=buffer_bias_ratio,
                                               batch_size=self.__batch_size,
                                               coin_number=self.__coin_no,
                                               is_permed=self.__is_permed)

        logging.info("the number of training examples is %s"
                     ", of test examples is %s" %
                     (self._num_train_samples, self._num_test_samples))
        logging.debug("the training set is from %s to %s" %
                      (min(self._train_ind), max(self._train_ind)))
        logging.debug("the test set is from %s to %s" %
                      (min(self._test_ind), max(self._test_ind)))
Esempio n. 9
0
def main(logPath, device):
    parser = build_parser()
    options = parser.parse_args()
    '''
    options.folder = oneKey
    options.stockList = stockList
    options.featureList = featureList
    options.start_date = startDate
    options.end_date = endDate
    '''
    if not os.path.exists("./" + "database"):
        os.makedirs("./" + "database")
    #options.repeat = 1
    if options.mode == "train":  #训练数据
        if not options.algo:
            save_path = logPath + str(options.folder) + "/netfile"
            # 读取配置文件
            with open(logPath + str(options.folder) +
                      "\\net_config.json") as file:
                config_json = None
                config_json = json.load(file)
            config = preprocess_config(config_json)
            log_file_dir = logPath + str(options.folder) + "/tensorboard"
            # 定义错误等级
            logfile_level = logging.DEBUG
            console_level = logging.INFO
            logging.basicConfig(filename=log_file_dir.replace(
                "tensorboard", "programlog"),
                                level=logfile_level)
            console = logging.StreamHandler()
            console.setLevel(console_level)
            logging.getLogger().addHandler(console)
            trainer = TraderTrainer(config,
                                    options.stockList,
                                    options.featureList,
                                    options.start_date,
                                    options.end_date,
                                    save_path=save_path,
                                    device=device)  #初始化训练器
            trainer.train_net(log_file_dir=log_file_dir,
                              index=str(options.folder))  #训练网络
        else:
            for folder in options.folder:
                raise NotImplementedError()

    # 生成配置文件到路径中,要想修改配置,直接修改PGPortfolio\pgportfolio\net_config.json
    elif options.mode == "generate":
        import pgportfolio.autotrain.generate as generate
        logging.basicConfig(level=logging.INFO)
        config_ = load_config()
        train_dir = logPath
        generate.add_packages(train_dir, load_config(), int(options.repeat))
    elif options.mode == "download_data":
        from pgportfolio.marketdata.datamatrices import DataMatrices
        with open("./pgportfolio/net_config.json") as file:
            config = json.load(file)
        config = preprocess_config(config)
        start = time.mktime(
            datetime.strptime(options.start_date, "%Y/%m/%d").timetuple())
        end = time.mktime(
            datetime.strptime(options.end_date, "%Y/%m/%d").timetuple())
        DataMatrices(
            start=start,
            end=end,
            feature_number=len(options.featureList),
            window_size=config["input"]["window_size"],
            online=True,
            period=config["input"]["global_period"],
            volume_average_days=config["input"]["volume_average_days"],
            coin_filter=len(options["stockList"]),
            is_permed=config["input"]["is_permed"],
            test_portion=config["input"]["test_portion"],
            portion_reversed=config["input"]["portion_reversed"])
    elif options.mode == "backtest":
        config = _config_by_algo(options.algo)  #读取配置文件
        _set_logging_by_algo(logging.DEBUG, logging.DEBUG, options.algo,
                             "backtestlog")  #设置log的路径
        values = execute_backtest(options.algo, config)  #执行回测的步数为训练集的长度
    elif options.mode == "save_test_data":
        # This is used to export the test data
        save_test_data(load_config(options.folder))  #保存测试集数据
    elif options.mode == "plot":
        logging.basicConfig(level=logging.INFO)
        algos = options.algos.split(",")
        if options.labels:
            labels = options.labels.replace("_", " ")
            labels = labels.split(",")
        else:
            labels = algos
        plot.plot_backtest(load_config(), algos, labels)
    elif options.mode == "table":
        algos = options.algos.split(",")
        if options.labels:
            labels = options.labels.replace("_", " ")
            labels = labels.split(",")
        else:
            labels = algos
        plot.table_backtest(load_config(),
                            algos,
                            labels,
                            format=options.format)
Esempio n. 10
0
            import json
            with open("pgportfolio/net_config.json") as file:
                config = json.load(file)
            config = preprocess_config(config)
            data = get_test_data(config)
            self.last_b = self.get_weight(data.T)

        return self.last_b


def opt_weights(X, max_leverage=1):
    x_0 = max_leverage * np.ones(X.shape[1]) / float(X.shape[1])
    objective = lambda b: -np.prod(X.dot(b))
    cons = ({'type': 'eq', 'fun': lambda b: max_leverage-np.sum(b)},)
    bnds = [(0., max_leverage)]*len(x_0)
    res = minimize(objective, x_0, bounds=bnds, constraints=cons, method='slsqp', options={'ftol': 1e-07})
    return res.x


if __name__ == '__main__':
    from pgportfolio.tools.backtest import get_test_data
    from pgportfolio.tools.configprocess import preprocess_config
    import json
    with open("pgportfolio/net_config.json") as file:
        config = json.load(file)
    config = preprocess_config(config)
    data = get_test_data(config)
    bcrp = BCRP()
    result = bcrp.get_weight(data.T)