def write_hourly_volume_index_to_csv(print_components=0): bitstamp, time_list_bitstampM, prices_bitstampM, volumes_bitstampM = di.get_list(exc="bitstampusd") coinbase, time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM = di.get_list(exc="coinbaseusd") korbit, time_list_korbitM, prices_korbitM, volumes_korbitM = di.get_list(exc="korbitkrw") krak, time_list_krakM, prices_krakM, volumes_krakM = di.get_list(exc="krakeneur") cc, time_list_ccM, prices_ccM, volumes_ccM = di.get_list(exc="coincheckjpy") bitstamp_hour_list, prices, volumes_bitstampH = dis.convert_to_hour(time_list_bitstampM, prices_bitstampM,volumes_bitstampM) coinbase_hour_list, prices, volumes_coinbaseH = dis.convert_to_hour(time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM) korbit_hour_list, prices, volumes_korbitH = dis.convert_to_hour(time_list_korbitM, prices_korbitM, volumes_korbitM) krak_hour_list, prices, volumes_krakH = dis.convert_to_hour(time_list_krakM, prices_krakM, volumes_krakM) cc_hour_list, prices, volumes_ccH = dis.convert_to_hour(time_list_ccM, prices_ccM, volumes_ccM) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(coinbase_hour_list, volumes_coinbaseH, bitstamp_hour_list, volumes_bitstampH) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(time_list_combined, volumes_combined, korbit_hour_list, volumes_korbitH) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(time_list_combined, volumes_combined, krak_hour_list, volumes_krakH) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(cc_hour_list, volumes_ccH, time_list_combined, volumes_combined) if print_components == 1: total = sum(volumes_bitstampH) + sum(volumes_coinbaseH) + sum(volumes_korbitH) + sum(volumes_krakH) + sum(volumes_ccH) # bitstamp_perc = 100 * float(sum(volumes_bitstampH) / total) # coinbase_perc = 100 * float(sum(volumes_coinbaseH) / total) # korbit_perc = 100 * float(sum(volumes_korbitH) / total) # krak_perc = 100 * float(sum(volumes_krakH) / total) # cc_perc = 100 * float(sum(volumes_ccH) / total) # # print("%s approximate share of the total volume") location = "data/export_csv/" file_name = location + "global_hourly_volume_index.csv" with open(file_name, 'w', newline='') as csvfile: writ = csv.writer(csvfile, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL) print("\033[0;32;0m Writing to file '%s'...\033[0;0;0m" % file_name) header2 = ["Time"] header2.append("Volume") writ.writerow(header2) for i in range(len(volumes_combined)): rowdata = [time_list_combined[i]] rowdata.append(volumes_combined[i]) writ.writerow(rowdata)
intraweek = 0 plots = 0 print_table = 0 time_list_minutes, prices, volumes = data_import.fetch_aggregate_csv(file_name, n_exc) y, mo, d, h, mi = supp.fix_time_list(time_list_minutes, move_n_hours=0) time_list_minutes = supp.make_time_list(y, mo, d, h, mi ) time_listH, returnsH, spreadH, log_volumesH, illiqH, log_illiqH, rvolH, log_rvolH = dis.clean_series_hour(time_list_minutes, prices, volumes) prices_minutes = prices[0, :] volumes_minutes = volumes[0, :] returns_minutes = jake_supp.logreturn(prices_minutes) time_list_hours, prices_hours, volumes_hours = dis.convert_to_hour(time_list_minutes, prices_minutes, volumes_minutes) spread_hours = rolls.rolls(prices_minutes, time_list_minutes, calc_basis="h", kill_output=1)[1] illiq_hours_time, illiq_hours = ILLIQ.illiq(time_list_minutes, returns_minutes, volumes_minutes, hourly_or_daily="h", threshold=0) rvol_hours, time_list_hours_rvol = realized_volatility.RVol(time_list_minutes, prices_minutes, daily=0, annualize=1) if intraday == 1: time_list_removed = [] time_list_hours, time_list_removed, volumes_hours, prices_hours, rvol_hours, spread_hours = supp.remove_list1_zeros_from_all_lists(time_list_hours, time_list_removed, volumes_hours, prices_hours, rvol_hours, spread_hours) time_list_hours, time_list_removed, rvol_hours, volumes_hours, prices_hours, spread_hours, illiq_hours = supp.remove_list1_zeros_from_all_lists(time_list_hours, time_list_removed, rvol_hours, volumes_hours, prices_hours, spread_hours, illiq_hours) time_list_hours, time_list_removed, spread_hours, rvol_hours, volumes_hours, prices_hours, illiq_hours = supp.remove_list1_zeros_from_all_lists(time_list_hours, time_list_removed, spread_hours, rvol_hours, volumes_hours, prices_hours, illiq_hours) time_list_hours, time_list_removed, illiq_hours, spread_hours, rvol_hours, volumes_hours, prices_hours = supp.remove_list1_zeros_from_all_lists(time_list_hours, time_list_removed, illiq_hours, spread_hours, rvol_hours, volumes_hours, prices_hours) print() print("removed: ") for i in range(len(time_list_removed)):
# plt.plot(volumes_actualD) # plt.figure() # plt.plot(volume_indexD) # plt.show() # # # corr = np.corrcoef(volumes_actualD, volume_indexD) # print("Our index accounts for %0.1f%% of the volume and has a correlation of %0.1f%% with the actual volumes" % (100*sum(volume_indexD)/sum(volumes_actualD), 100*corr[0,1])) # # plot.time_series_single(time_list_indexD,volume_indexD,"global_volumes_index") # plot.time_series_single(time_listD,volumes_actualD,"actual_global_volumes") if spread_vs_global_volume_regression_daily: exc_name, time_listM, priceM, volumeM = di.get_list(exc="korbit", freq="m") time_list_nativeH, priceH, volume_nativeH = dis.convert_to_hour(time_listM, priceM, volumeM) spread_abs, spreadH, time_list_spread, count_value_error = rolls.rolls(priceM, time_listM, calc_basis="h", kill_output=1) start_i = time_list_spread.index(time_list_indexH[0]) end_i = time_list_spread.index(time_list_indexH[-1]) + 1 time_list_spread = time_list_spread[start_i:end_i] spreadH = spreadH[start_i:end_i] volume_nativeH = volume_nativeH[start_i:end_i] print("It is %s that the time lists are equal" % (time_list_spread == time_list_indexH)) time_listH = time_list_spread time_list_removed = [] time_listH, time_list_removed, spreadH, volume_indexH, volume_nativeH = supp.remove_list1_zeros_from_all_lists(time_listH, time_list_removed, spreadH, volume_indexH, volume_nativeH)
print("Time: ", time_listH[start+i], "Index: ", i+start) """ """ #""" """ timestamps = time_listH[305370:] highs = hi[0][305370:] lows = lo[0][305370:] prices = prices[0][305370:] """ #""" #""" hour_time, hi_hour, lo_hour = dis.convert_to_hour(time_list, hi, lo, list2_basis=0) hour_time_closing, closing_prices, closing_prices_dummy = dis.convert_to_hour( time_list1, prices, prices, list2_basis=0) """ #Index-finder start = 4000 end = 6000 for i in range(len(hour_time[start:end])): print("Time: ", hour_time[start+i], "Index: ", i+start) """ #""" timestamps = hour_time[1827:] highs = hi_hour[0][1827:] lows = lo_hour[0][1827:] prices = closing_prices[0][1827:]
import numpy as np import data_import as di import legacy from Jacob import jacob_support as jake_supp import data_import_support as dis from matplotlib import pyplot as plt import os os.chdir("/Users/sondre/Documents/GitHub/krypto") # Blir ikke likt på deres pc exchanges = ["bitstampusd", "btceusd", "coinbaseusd", "krakenusd"] # Importing data for all minutes of the day exchanges, time_list, prices, volumes = legacy.get_lists_legacy(opening_hours="n", make_totals="n") # Converting to hourly data time_list, prices, volumes = dis.convert_to_hour(time_list, prices, volumes) full_returns = jake_supp.logreturn(prices[0, :]) # Only extracting opening hours time_list_open, prices_open, volumes_opeen = legacy.opening_hours_w_weekends(time_list, prices, volumes) bitstamp_price_open = np.transpose(prices_open[0, :]) open_returns = jake_supp.logreturn(bitstamp_price_open) # Finding average for entire day full_day_time, full_day_avg_returns = dis.cyclical_average_legacy(time_list, full_returns) # Finding average for opening hours open_time, open_avg_returns = dis.cyclical_average_legacy(time_list_open, open_returns) # Want to compare the two in a single graph x = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23] opening_y = np.zeros(24) for i in range(0, 7):
remove_extremes = 1 remove_zero_volume = 1 remove_zero_bas = 1 print_cyclical = 1 print_results = 0 sum_volumes = 1 if sum_volumes == 1: summed_volume = [] bitstamp, time_list_bitstampM, prices_bitstampM, volumes_bitstampM = di.get_list( 0) coinbase, time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM = di.get_list( 3) kraken, time_list_krakenM, prices_krakenM, volumes_krakenM = di.get_list(5) bitstamp_hour_list, prices, volumes_bitstampH = dis.convert_to_hour( time_list_bitstampM, prices_bitstampM, volumes_bitstampM) coinbase_hour_list, prices, volumes_coinbaseH = dis.convert_to_hour( time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM) kraken_hour_list, prices, volumes_krakenH = dis.convert_to_hour( time_list_krakenM, prices_krakenM, volumes_krakenM) j = 0 #coinbase counter t = 0 #kraken counter partsum = 0 for i in range(len(bitstamp_hour_list)): partsum = volumes_bitstampH[i] if not supp.A_before_B( bitstamp_hour_list[i], kraken_hour_list[t]) and t < len(kraken_hour_list) - 1: if bitstamp_hour_list[i] == kraken_hour_list[t]: