def write_hourly_volume_index_to_csv(print_components=0): bitstamp, time_list_bitstampM, prices_bitstampM, volumes_bitstampM = di.get_list(exc="bitstampusd") coinbase, time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM = di.get_list(exc="coinbaseusd") korbit, time_list_korbitM, prices_korbitM, volumes_korbitM = di.get_list(exc="korbitkrw") krak, time_list_krakM, prices_krakM, volumes_krakM = di.get_list(exc="krakeneur") cc, time_list_ccM, prices_ccM, volumes_ccM = di.get_list(exc="coincheckjpy") bitstamp_hour_list, prices, volumes_bitstampH = dis.convert_to_hour(time_list_bitstampM, prices_bitstampM,volumes_bitstampM) coinbase_hour_list, prices, volumes_coinbaseH = dis.convert_to_hour(time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM) korbit_hour_list, prices, volumes_korbitH = dis.convert_to_hour(time_list_korbitM, prices_korbitM, volumes_korbitM) krak_hour_list, prices, volumes_krakH = dis.convert_to_hour(time_list_krakM, prices_krakM, volumes_krakM) cc_hour_list, prices, volumes_ccH = dis.convert_to_hour(time_list_ccM, prices_ccM, volumes_ccM) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(coinbase_hour_list, volumes_coinbaseH, bitstamp_hour_list, volumes_bitstampH) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(time_list_combined, volumes_combined, korbit_hour_list, volumes_korbitH) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(time_list_combined, volumes_combined, krak_hour_list, volumes_krakH) time_list_combined, volumes_combined = dis.add_two_series_w_different_times(cc_hour_list, volumes_ccH, time_list_combined, volumes_combined) if print_components == 1: total = sum(volumes_bitstampH) + sum(volumes_coinbaseH) + sum(volumes_korbitH) + sum(volumes_krakH) + sum(volumes_ccH) # bitstamp_perc = 100 * float(sum(volumes_bitstampH) / total) # coinbase_perc = 100 * float(sum(volumes_coinbaseH) / total) # korbit_perc = 100 * float(sum(volumes_korbitH) / total) # krak_perc = 100 * float(sum(volumes_krakH) / total) # cc_perc = 100 * float(sum(volumes_ccH) / total) # # print("%s approximate share of the total volume") location = "data/export_csv/" file_name = location + "global_hourly_volume_index.csv" with open(file_name, 'w', newline='') as csvfile: writ = csv.writer(csvfile, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL) print("\033[0;32;0m Writing to file '%s'...\033[0;0;0m" % file_name) header2 = ["Time"] header2.append("Volume") writ.writerow(header2) for i in range(len(volumes_combined)): rowdata = [time_list_combined[i]] rowdata.append(volumes_combined[i]) writ.writerow(rowdata)
def write_daily_volume_index_to_csv(): bitstamp, time_list_bitstampM, prices_bitstampM, volumes_bitstampM = di.get_list(exc="bitstampusd") coinbase, time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM = di.get_list(exc="coinbaseusd") korbit, time_list_korbitM, prices_korbitM, volumes_korbitM = di.get_list(exc="korbitkrw") krak, time_list_krakM, prices_krakM, volumes_krakM = di.get_list(exc="krakeneur") cc, time_list_ccM, prices_ccM, volumes_ccM = di.get_list(exc="coincheckjpy") bitstamp_day_list, prices, volumes_bitstampD = dis.convert_to_day(time_list_bitstampM, prices_bitstampM,volumes_bitstampM) coinbase_day_list, prices, volumes_coinbaseD = dis.convert_to_day(time_list_coinbaseM, prices_coinbaseM,volumes_coinbaseM) korbit_day_list, prices, volumes_korbitD = dis.convert_to_day(time_list_korbitM, prices_korbitM, volumes_korbitM) krak_day_list, prices, volumes_krakD = dis.convert_to_day(time_list_krakM, prices_krakM, volumes_krakM) cc_day_list, prices, volumes_ccD = dis.convert_to_day(time_list_ccM, prices_ccM, volumes_ccM) # # time_list_combined, volumes_combined = dis.add_two_series_w_different_times(coinbase_day_list, volumes_coinbaseD, bitstamp_day_list, volumes_bitstampD) # time_list_combined, volumes_combined = dis.add_two_series_w_different_times(time_list_combined, volumes_combined, korbit_day_list, volumes_korbitD) # time_list_combined, volumes_combined = dis.add_two_series_w_different_times(time_list_combined, volumes_combined, krak_day_list, volumes_krakD) # time_list_combined, volumes_combined = dis.add_two_series_w_different_times(cc_day_list, volumes_ccD, time_list_combined, volumes_combined) time_list_combined, volumes_combined = dis.fix_gv(bitstamp_day_list, volumes_bitstampD, coinbase_day_list, volumes_coinbaseD, korbit_day_list, volumes_korbitD, krak_day_list, volumes_krakD, cc_day_list, volumes_ccD) location = "data/export_csv/" file_name = location + "global_daily_volume_index.csv" with open(file_name, 'w', newline='') as csvfile: writ = csv.writer(csvfile, delimiter=';', quotechar='|', quoting=csv.QUOTE_MINIMAL) print("\033[0;32;0m Writing to file '%s'...\033[0;0;0m" % file_name) header2 = ["Time"] header2.append("Volume") writ.writerow(header2) for i in range(len(volumes_combined)): rowdata = [time_list_combined[i]] rowdata.append(volumes_combined[i]) writ.writerow(rowdata)
import data_import as di import plot from Jacob import jacob_support as jake_supp import data_import_support as dis spec = "01012015_6" intraday = 0 intraweek = 1 exch = [0] # 0=bitstamp, 1=coincheck #exchanges, time_list_minutes, prices_minutes, volumes_minutes = di.get_lists_legacy(opening_hours="n", make_totals="n") exchanges, time_list, prices, volume = di.get_list(0) """ for exc in exch: exc_name = "_" + exchanges[exc] + "_TEST_" + spec print() print("SEASONALITY FOR", exchanges[exc].upper()) if intraday == 1: # HOURS ---------------------------------------------------------------------------------------------------- print("------ INTRADAY ------") time_list_hours, returns_hours, spread_hours, log_volumes_hours, illiq_hours, \ illiq_hours_time, log_illiq_hours, rvol_hours, log_rvol_hours = \ dis.clean_series_hour(time_list_minutes, prices_minutes, volumes_minutes, exc=exc, convert_time_zones=1) # Finding average for every hour of the day hour_of_day, avg_volumes_hour, low_volumes_hour, upper_volumes_hour = dis.cyclical_average(time_list_hours, log_volumes_hours, frequency="h") hour_of_day, avg_spread_hour, low_spread_hour, upper_spread_hour = dis.cyclical_average(time_list_hours, spread_hours, frequency="h")
import regression_support from Sondre import sondre_support_formulas as supp import data_import_support as dis import plot exchanges = ["korbit"] hours_in_window = [ 1, 2, 4 ] # La denne være en liste med de forskjellige vinduene analysen skal gjøres for convert_coeffs_to_percentage = 1 # Convert coeffs and std.errs. of returnsH and spreadH to percentage convert_logs = 0 # Convert coeffs and std.errs. of rvol and illiq to percentage, i.e. 100*exp(coeff) NB! Doesn't work subtract_means = 1 log_illiqs = True for exc in exchanges: exc_name, time_listH, returnsH, spreadH, volumesH, log_volumesH, illiqH, log_illiqH, rvolH, log_rvolH = di.get_list( exc=exc, freq=1, local_time=1) hour = supp.fix_time_list(time_listH)[3] print() print("------------------------ INTRADAY REGRESSION FOR", exc_name.upper()[0:-3], "-------------------------") print() print() print( " ------------------------------------------------Regression table for Intraday seasonality-----------------------------------------" ) n = len(hour) for h in hours_in_window: # Itererer over de forskjellige vinduene
import realized_volatility import rolls import ILLIQ from Jacob import jacob_support as jake_supp os.chdir("/Users/sondre/Documents/GitHub/krypto") unedited = 0 transformed = 1 # Transformation exch = ["bitstamp", "coinbase", "btcn", "korbit"] for exc in exch: exc_name, time_listD, returnsD, volumesD, log_volumesD, spreadD, illiqD, log_illiqD, rvolD, log_rvolD = di.get_list( exc, freq="d", local_time=0) exc_name, time_listM, pricesM, volumesM = di.get_list(exc, freq="m") timeD, pricesD, volumes_rawD = dis.convert_to_day(time_listM, pricesM, volumesM) # plot.time_series_single(timeD, pricesD, "price_" + exc_name) plot.time_series_single(time_listD, volumesD, "volume_" + exc_name) plot.time_series_single(time_listD, illiqD, "Log_ILLIQ_" + exc_name, logy=1, perc=1, ndigits=3) plot.time_series_single(time_listD, rvolD,
os.chdir("/Users/Sondre/Documents/GitHub/krypto") #local_time = 0 import_new_exchanges = False global_volumes = False global_volumes_experimental = True spread_vs_global_volume_regression_daily = False spread_vs_global_volume_regression_hourly= False make_real_spread_csv = False real_spread_vs_our_spread = False if import_new_exchanges: for exc in range(6): exc_name, time_listM, pricesM, volumesM = di.get_list(exc) time_listH, returnsH, spreadH, volumesH, log_volumesH, illiqH, log_illiqH, rvolH, log_rvolH = dis.clean_series_hour(time_listM, pricesM, volumesM, exc=exc, convert_time_zones=local_time, plot_for_extreme=0) dis.write_clean_csv(exc_name, time_listH, returnsH, spreadH, volumesH, log_volumesH, illiqH, log_illiqH, rvolH, log_rvolH, freq="h", local_time=local_time) time_listD, returnsD, spreadD, volumesD, log_volumesD, illiqD, log_illiqD, rvolD, log_rvolD = dis.clean_series_days(time_listM, pricesM, volumesM, exc=exc, convert_time_zones=1, plot_for_extreme=0) dis.write_clean_csv(exc_name, time_listD, returnsD, spreadD, volumesD, log_volumesD, illiqD, log_illiqD, rvolD, log_rvolD, freq="d") if global_volumes: # gvi.write_hourly_volume_index_to_csv() gvi.write_daily_volume_index_to_csv() time_list_indexD, volume_indexD = gvi.get_global_daily_volume_index() # time_list_indexH, volume_indexH = gvi.get_global_hourly_volume_index() time_listD, volumes_actualD = di.get_global_volume_actual_daily() plt.plot(volume_indexD)
from Sondre import sondre_support_formulas as supp import data_import_support as dis import rolls exchanges = [3, 4] # select pair remove_extremes = 1 remove_zero_volume = 1 remove_zero_bas = 1 print_cyclical = 1 print_results = 0 sum_volumes = 1 if sum_volumes == 1: summed_volume = [] bitstamp, time_list_bitstampM, prices_bitstampM, volumes_bitstampM = di.get_list( 0) coinbase, time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM = di.get_list( 3) kraken, time_list_krakenM, prices_krakenM, volumes_krakenM = di.get_list(5) bitstamp_hour_list, prices, volumes_bitstampH = dis.convert_to_hour( time_list_bitstampM, prices_bitstampM, volumes_bitstampM) coinbase_hour_list, prices, volumes_coinbaseH = dis.convert_to_hour( time_list_coinbaseM, prices_coinbaseM, volumes_coinbaseM) kraken_hour_list, prices, volumes_krakenH = dis.convert_to_hour( time_list_krakenM, prices_krakenM, volumes_krakenM) j = 0 #coinbase counter t = 0 #kraken counter partsum = 0
""" korea = 1 regular = 0 cutoff = 0 if korea == 1: time_list_tick, price_tick, volume_tick = dis.quick_import(4) time_list_minute = dis.unix_to_timestamp(time_list_tick[cutoff:]) price_tick = price_tick[cutoff:] volume_tick = volume_tick[cutoff:] else: exch, time_list_minute, price_tick, volume_tick = di.get_list(regular) """ gap = 0 equal = 0 volume_checker = volume_tick[0] volume_checker_zero = 0 price_checker = price_tick[0] price_checker_zero = 0 last_year, last_month, last_day, last_hour, last_minute = supp.fix_time_list(time_list_minute[0], single_time_stamp=1) for i in range(0, len(time_list_minute)): volume_checker += volume_tick[i] price_checker += price_tick[i] if volume_tick[i] <= 0: volume_checker_zero += 1