Пример #1
0
def load_data(path_data="data/cache",
              augmento_coin=None,
              augmento_source=None,
              binance_symbol=None,
              dt_bin_size=None,
              datetime_start=None,
              datetime_end=None,
              augmento_api_key=None):

    datetime_end = min(datetime.datetime.now(), datetime_end)

    # check the input arguments
    if None in [
            binance_symbol, augmento_coin, augmento_source, dt_bin_size,
            datetime_start, datetime_end
    ]:
        raise Exception("missing required param(s) in load_data()")

    # specify the path for the binance data cache
    path_augmento_data = "{:s}/augmento/{:s}/{:s}/{:d}".format(
        *(path_data, augmento_source, augmento_coin, dt_bin_size))
    path_augmento_topics = "{:s}/augmento/".format(path_data)

    # specify the path for the augmento data cache
    #path_binance_data = "{:s}/binance/{:s}/{:d}".format(*(path_data, binance_symbol, dt_bin_size))
    path_binance_data = "{:s}/kraken/{:s}/{:d}".format(*(path_data,
                                                         binance_symbol,
                                                         dt_bin_size))

    # make sure all the paths exist
    ioh.check_path(path_augmento_data, create_if_not_exist=True)
    ioh.check_path(path_binance_data, create_if_not_exist=True)

    # check which days of data exist for the augmento data and binance data
    augmento_dates = dh.list_file_dates_for_path(path_augmento_data,
                                                 ".msgpack.zlib", "%Y%m%d")
    binance_dates = dh.list_file_dates_for_path(path_binance_data,
                                                ".msgpack.zlib", "%Y%m%d")

    # remove any dates from the last 3 days, so we reload recent data
    datetime_now = datetime.datetime.now()
    augmento_dates = [
        el for el in augmento_dates
        if el < dh.add_days_to_datetime(datetime_now, -3)
    ]
    binance_dates = [
        el for el in binance_dates
        if el < dh.add_days_to_datetime(datetime_now, -3)
    ]

    # get a list of the days we need
    required_dates = dh.get_datetimes_between_datetimes(
        datetime_start, datetime_end)

    # get a list of the days we're missing for augmento and binance data
    augmento_missing_dates = sorted(
        list(set(required_dates) - set(augmento_dates)))
    binance_missing_dates = sorted(
        list(set(required_dates) - set(binance_dates)))

    # group the missing days by batch
    augmento_missing_batches = find_missing_date_batches(
        augmento_missing_dates, required_dates)
    binance_missing_batches = find_missing_date_batches(
        binance_missing_dates, required_dates)

    # load the augmento keys
    aug_keys = ladh.load_keys(path_augmento_topics)

    # load the binance keys
    bin_keys = lbdh.load_keys()

    # for each of the missing batches of augmento data, get the data and cache it
    for abds in augmento_missing_batches:

        # get the data for the batch and cache it
        ladh.load_and_cache_data(path_augmento_data, augmento_source,
                                 augmento_coin, dt_bin_size, abds[0],
                                 dh.add_days_to_datetime(abds[-1], 1))

    # for each of the missing batches of binance data, get the data and cache it
    for bbds in binance_missing_batches:

        # get the data for the batch and cache it
        lbdh.load_and_cache_data(path_binance_data, binance_symbol,
                                 dt_bin_size, bbds[0],
                                 dh.add_days_to_datetime(bbds[-1], 1))

    # load the data
    t_aug_data, aug_data = ladh.load_cached_data(path_augmento_data,
                                                 datetime_start, datetime_end)
    t_bin_data, bin_data = lbdh.load_cached_data(path_binance_data,
                                                 datetime_start, datetime_end)

    # strip the data
    t_min = max(
        [t_aug_data[0], t_bin_data[0],
         dh.datetime_to_epoch(datetime_start)])
    t_max = min(
        [t_aug_data[-1], t_bin_data[-1],
         dh.datetime_to_epoch(datetime_end)])
    t_aug_data, aug_data = strip_data_by_time(t_aug_data, aug_data, t_min,
                                              t_max)
    t_bin_data, bin_data = strip_data_by_time(t_bin_data, bin_data, t_min,
                                              t_max)

    return t_aug_data, t_bin_data, aug_data, bin_data, aug_keys, bin_keys
		'wallet':pnl[i],
	}
	funding_results['data'].append(day)

# print(json.dumps(final_data, indent=4))



# Saving the final json

# Save location
path_save_data = "results"
filename_funding_results = "{:s}/funding_results.json".format(path_save_data)

# check if the data path exists
ioh.check_path(path_save_data, create_if_not_exist=True)

# save the data
print("saving data to {:s}".format(filename_funding_results))
with open(filename_funding_results, "w") as f:
	json.dump(funding_results,f)

print("Saved!")




# set up the figure
fig, ax = plt.subplots(4, 1, sharex=True, sharey=False)

# plot stuff