Ejemplo n.º 1
0
def cw_hist_cleaning(vol_fixed_df,
                     start_date,
                     crypto_list=CRYPTO_ASSET,
                     exc_list=EXCHANGES):

    tot_date_arr = date_gen(start_date)

    cleaned_df = pd.DataFrame(columns=CLEAN_DATA_HEAD)

    for crypto in crypto_list:

        pair_arr = crypto_fiat_pair_gen(crypto)

        for exchange in exc_list:

            ex_matrix = vol_fixed_df.loc[vol_fixed_df["Exchange"] == exchange]

            for cp in pair_arr:

                crypto = cp[:3]

                cp_matrix = ex_matrix.loc[ex_matrix["Pair"] == cp]
                cp_matrix = cp_matrix.drop(columns=["Exchange", "Pair"])
                # checking if the matrix is not empty
                if cp_matrix.shape[0] > 1:

                    # check if the historical series start at the same date as
                    # the start date if not fill the dataframe with zero values
                    cp_matrix = homogenize_series(cp_matrix, tot_date_arr)

                    # check if the series stopped at certain point in
                    # the past, if yes fill with zero
                    cp_matrix = homogenize_dead_series(cp_matrix, tot_date_arr)

                    # checking if the matrix has missing data and if ever fixing it
                    if cp_matrix.shape[0] != tot_date_arr.size:

                        print("fixing")
                        cp_matrix = CW_series_fix_missing(
                            cp_matrix,
                            exchange,
                            cp,
                            tot_date_arr,
                            DB_NAME,
                            MONGO_DICT.get("coll_vol_chk"),
                        )

                    # turn Time columns into string
                    [str(date) for date in cp_matrix["Time"]]

                    # add exchange and currency_pair column
                    cp_matrix["Exchange"] = exchange
                    cp_matrix["Pair"] = cp
                    reordered = df_reorder(cp_matrix, column_set="conversion")
                    cleaned_df = cleaned_df.append(reordered)

    return cleaned_df
Ejemplo n.º 2
0
def index_board_logic_op(crypto_asset, logic_one_arr, daily_ewma,
                         curr_reb_start, next_reb_start, curr_board_eve):

    # turn the first logic row into a dataframe and add the 'Time' column
    # the first logic row will be used for the next quarter weights computation
    first_logic_row = pd.DataFrame(logic_one_arr, columns=crypto_asset)

    # computing the new second logic row that is used to compute the
    # weights relative to the next rebalance period
    ewma_fraction = daily_ewma_fraction(daily_ewma, first_logic_row,
                                        curr_reb_start, curr_board_eve)
    print("ewma_fraction")
    print(ewma_fraction)

    daily_ewma_double_row = index_norm_logic_op(crypto_asset, daily_ewma)

    print(daily_ewma_double_row)

    second_logic_row = (ewma_fraction >= 0.02) * 1

    double_checked_ewma = daily_double_log_check(first_logic_row,
                                                 second_logic_row, daily_ewma,
                                                 curr_reb_start,
                                                 curr_board_eve)

    # adding the Time columns to the double checked ewma
    human_curr_start = timestamp_to_human([curr_reb_start],
                                          date_format="%m-%d-%y")
    human_curr_board = timestamp_to_human([curr_board_eve],
                                          date_format="%m-%d-%y")
    period_date_list = date_gen(human_curr_start[0],
                                human_curr_board[0],
                                EoD="N")
    double_checked_ewma["Time"] = period_date_list

    print(double_checked_ewma)
    # giving "Time" and "Date" columns to the df containing the logic rows
    human_next_start = timestamp_to_human([next_reb_start],
                                          date_format="%m-%d-%y")
    first_logic_row["Time"] = next_reb_start
    first_logic_row["Date"] = human_next_start
    second_logic_row["Time"] = next_reb_start
    second_logic_row["Date"] = human_next_start

    # computing the new weights that will be used starting from the
    # next rebalance date
    weights_for_board = quarter_weights(double_checked_ewma,
                                        [int(curr_board_eve)], crypto_asset)
    weights_for_board["Time"] = next_reb_start
    weights_for_board["Date"] = human_next_start

    return first_logic_row, second_logic_row, weights_for_board, daily_ewma_double_row
Ejemplo n.º 3
0
def test_date_gen():

    # testing the function with start_date and end_date
    # date mm-dd-yyy
    start_date = "01-01-2020"
    end_date = "01-06-2020"

    ts = np.array([1577836800, 1577923200, 1578009600, 1578096000, 1578182400])
    date = date_gen(start_date, end_date)

    assert np.array_equal(date, ts)

    # testing the function with only start_date
    day_in_sec = 86400
    today = datetime.now().strftime("%m-%d-%Y")
    today = datetime.strptime(today, "%m-%d-%Y")

    # without EOD defatult arg the fun create an array of date
    # utill yesterday
    yesterday_ts = int(
        today.replace(tzinfo=timezone.utc).timestamp()) - day_in_sec

    date2 = date_gen(start_date)

    assert yesterday_ts == date2[-1]

    # without a default EOD arg the function generate today date as well
    date = date_gen(start_date, EoD='N')
    today_ts = int(today.replace(tzinfo=timezone.utc).timestamp())

    assert today_ts == date[-1]

    # with timeST != 'Y' the function generates an array or list
    # of str date in 'mm-dd-yyyy' format
    # with clss != 'array', the function generates a list of dates
    date = date_gen(start_date, timeST='N', clss='list')

    assert type(date) is list
    assert date[0] == start_date
Ejemplo n.º 4
0
def hist_time_array_set(start_date=START_DATE):

    # define the variable containing all the date from start_date to yesterday.
    # the date are displayed as timestamp and each day refers to 12:00 am UTC
    reference_date_arr = date_gen(start_date)

    # define all the useful arrays containing the rebalance
    # start date, stop date, board meeting date
    reb_start_date = start_q(start_date)
    reb_stop_date = stop_q(reb_start_date)
    board_date = board_meeting_day()
    board_date_eve = day_before_board()
    next_reb_date = next_start()

    return (reference_date_arr, reb_start_date, reb_stop_date, board_date,
            board_date_eve, next_reb_date)
Ejemplo n.º 5
0
def hist_data_feed_op():

    # define the array containing the date where the index uses CW feed data
    CW_date_arr = date_gen(START_DATE, EXC_START_DATE)
    CW_date_str = [str(date) for date in CW_date_arr]

    # drop the pre-existing collection (if there is one)
    mongo_coll_drop("index_feed")

    # downloading the EXC series from MongoDB
    EXC_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_final"))
    EXC_series = EXC_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    CW_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_final"))
    CW_series["Time"] = [str(x) for x in CW_series["Time"]]
    print("CW")
    print(CW_series)
    CW_sub_series = CW_series.loc[CW_series.Time.isin(CW_date_str)]
    print(CW_sub_series)
    CW_sub_series = CW_sub_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]
    CW_sub_series["Time"] = [int(x) for x in CW_sub_series["Time"]]
    CW_sub_series.reset_index(drop=True, inplace=True)
    print(CW_sub_series)

    # creting an unique dataframe containing the two different data source
    data_feed = CW_sub_series.append(EXC_series, sort=True)
    data_feed.reset_index(drop=True, inplace=True)

    data_feed = data_feed[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    print(data_feed)
    data_feed = homogeneize_feed(data_feed)
    print("post hom")
    print(data_feed)

    # put the converted data on MongoDB
    mongo_upload(data_feed, "collection_data_feed")

    return None
Ejemplo n.º 6
0
def cw_exc_merging(start_date=START_DATE, exc_start=EXC_START_DATE,
                   db=DB_NAME, coll_cw="coll_cw_final",
                   coll_exc="coll_exc_final"):

    cw_date_arr = date_gen(start_date, exc_start, EoD="N")

    exc_series = query_mongo(db, MONGO_DICT.get(coll_exc))
    exc_part = df_reorder(exc_series, column_set="conversion")

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    cw_series = query_mongo(db, MONGO_DICT.get(coll_cw))
    cw_part = cw_series.loc[cw_series.Time.isin(cw_date_arr)]
    cw_part = df_reorder(cw_part, column_set="conversion")

    # creting an unique dataframe containing the two different data source
    merged_series = cw_part.append(exc_part, sort=True)
    merged_series["Time"] = [int(d) for d in merged_series["Time"]]

    return merged_series
Ejemplo n.º 7
0
def cw_hist_operation(start_date=START_DATE):

    date_tot = date_gen(start_date)
    last_day_TS = date_tot[len(date_tot) - 1]

    mongo_indexing()

    # deleting previous MongoDB collections
    mongo_coll_drop("cw_hist_clean")
    mongo_coll_drop("cw_hist_conv")

    # fix and upload the series for the "pair volume" info
    tot_raw_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"))
    cw_vol_fix_data = cw_hist_pair_vol_fix(tot_raw_data)
    mongo_upload(cw_vol_fix_data, "collection_cw_vol_check")

    # clean and upload all the series
    cleaned_df = cw_hist_cleaning(cw_vol_fix_data, start_date)
    mongo_upload(cleaned_df, "collection_cw_clean")

    # compute and upload USDC and USDT rates series
    usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean", None)
    mongo_upload(usdt_rates, "collection_stable_rate")
    mongo_upload(usdc_rates, "collection_stable_rate")

    # convert and upload all the data into USD
    converted_df = cw_hist_conv_op(cleaned_df)
    mongo_upload(converted_df, "collection_cw_converted")

    # logic matrix of crypto-fiat keys
    key_df = key_log_mat(DB_NAME, "coll_cw_conv", last_day_TS, EXCHANGES,
                         CRYPTO_ASSET, PAIR_ARRAY)
    mongo_upload(key_df, "collection_CW_key")
    mongo_upload(key_df, "collection_EXC_key")

    # fill zero-volume data and upload on MongoDB
    final_df = cw_hist_zero_vol_fill_op(converted_df)
    mongo_upload(final_df, "collection_cw_final_data")

    return None
Ejemplo n.º 8
0
def ecb_hist_download(start_date):

    # drop the pre-existing collection related to ecb_rawdata
    mongo_coll_drop("ecb_hist_d")

    # set today as end_date
    end_date = datetime.now().strftime("%Y-%m-%d")

    # create an array of date containing the list of date to download
    date_list = date_gen(start_date,
                         end_date,
                         timeST="N",
                         clss="list",
                         EoD="N")

    date_list_str = [
        datetime.strptime(day, "%m-%d-%Y").strftime("%Y-%m-%d")
        for day in date_list
    ]

    ecb_hist_series = pd.DataFrame()

    for date in date_list_str:

        # retrieving data from ECB website
        single_date_ex_matrix = ECB_rates_extractor(ECB_FIAT, date)
        # put a sllep time in order to do not overuse API connection
        time.sleep(0.02)

        # put all the downloaded data into a DafaFrame
        if ecb_hist_series.size == 0:

            ecb_hist_series = single_date_ex_matrix

        else:

            ecb_hist_series = ecb_hist_series.append(single_date_ex_matrix,
                                                     sort=True)

    return ecb_hist_series
Ejemplo n.º 9
0
def homogeneize_feed(initial_df):

    df = initial_df.copy()
    list_of_exchanges = list(np.array(df["Exchange"].unique()))
    list_of_pair = list(np.array(df["Pair"].unique()))

    # today_str = datetime.now().strftime("%Y-%m-%d")
    # today = datetime.strptime(today_str, "%Y-%m-%d")
    # today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
    # day_before_TS = today_TS - DAY_IN_SEC
    list_of_missing = date_gen(EXC_START_DATE)

    ref_shape = df.loc[(df.Exchange == "coinbase-pro")
                       & (df.Pair) == "btcusd"].shape[0]

    for ex in list_of_exchanges:
        for p in list_of_pair:
            sub_df = df.loc[(df.Exchange == ex)]
            sub_df = sub_df.loc[sub_df.Pair == p]
            print(ex)
            print(p)
            print(sub_df.shape)
            if sub_df.shape[0] == ref_shape:
                pass
            elif sub_df.shape[0] == 1569:
                print(ex)
                print(p)
                zero_mat = np.zeros((len(list_of_missing), 6))
                zero_sub_df = pd.DataFrame(zero_mat, columns=df.columns)
                zero_sub_df["Time"] = list_of_missing
                zero_sub_df["Exchange"] = ex
                zero_sub_df["Pair"] = p
                print(zero_sub_df)

                df = df.append(zero_sub_df)

    new_df = df.copy()

    return new_df
Ejemplo n.º 10
0
def cw_new_key_mngm(logic_key_df, daily_mat, time_to_check, date_tot_str):

    # selecting only the keys with 0 value
    key_absent = logic_key_df.loc[logic_key_df.logic_value == 0]
    key_absent.drop(columns=["logic_value"])

    # merging the dataframe in order to find the potenatial new keys
    merg_absent = pd.merge(key_absent, daily_mat, on="key", how="left")
    merg_absent.fillna("NaN", inplace=True)
    new_key_df = merg_absent.loc[merg_absent["Close Price"] != "NaN"]

    if new_key_df.empty is False:

        print("Message: New exchange-pair couple(s) found.")
        new_key_list = new_key_df["key"]

        date_tot_int = date_gen(START_DATE)
        # converting the timestamp format date into string
        date_tot_str = [str(single_date) for single_date in date_tot_int]

        for key in new_key_list:

            # updating the logic matrix of exchange-pair keys
            logic_row_update = log_key_update(key, "collection_CW_key")
            mongo_upload(logic_row_update, "collection_CW_key")

            key_hist_df = new_series_composer(key,
                                              new_key_df,
                                              date_tot_str,
                                              time_to_check,
                                              kind_of_series="CW")

            # upload the dataframe on MongoDB collection "CW_cleandata"
            mongo_upload(key_hist_df, "collection_cw_clean")

    else:
        pass

    return None
Ejemplo n.º 11
0
def exc_new_key_mng(logic_key_df, daily_mat_00, day_to_clean_TS):

    # define a subset of keys that has not been present in the past
    key_absent = logic_key_df.loc[logic_key_df.logic_value == 0]
    key_absent.drop(columns=["logic_value"])

    # merging the two dataframe (left join) in order to find potential
    # new keys in the data of the day
    merg_absent = pd.merge(key_absent, daily_mat_00, on="key", how="left")
    merg_absent.fillna("NaN", inplace=True)
    new_key_df = merg_absent.loc[merg_absent["Close Price"] != "NaN"]

    if new_key_df.empty is False:

        print("Message: New exchange-pair couple(s) found.")
        new_key_list = new_key_df["key"]
        print(new_key_list)

        date_tot_int = date_gen(START_DATE)
        # converting the timestamp format date into string
        date_tot_str = [int(single_date) for single_date in date_tot_int]

        for key in new_key_list:

            # updating the logic matrix of exchange-pair keys
            logic_row_update = log_key_update(key, "collection_EXC_key")
            mongo_upload(logic_row_update, "collection_EXC_key")

            key_hist_df = new_series_composer(
                key, new_key_df, date_tot_str, day_to_clean_TS)

    else:

        key_hist_df = []

    return key_hist_df
Ejemplo n.º 12
0
# define the start date as MM-DD-YYYY
start_date = "01-01-2016"

# define today in various format
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
y_TS = today_TS - 86400
two_before_TS = y_TS - 86400
today_human = data_setup.timestamp_to_human([today_TS])
yesterday_human = data_setup.timestamp_to_human([y_TS])
two_before_human = data_setup.timestamp_to_human([two_before_TS])

# define the variable containing all the date from start_date to today.
# the date are displayed as timestamp and each day refers to 12:00 am UTC
reference_date_vector = data_setup.date_gen(start_date)

# define all the useful arrays containing the rebalance start
# date, stop date, board meeting date
rebalance_start_date = calc.start_q("01-01-2016")
rebalance_stop_date = calc.stop_q(rebalance_start_date)
board_date = calc.board_meeting_day()
board_date_eve = calc.day_before_board()
next_rebalance_date = calc.next_start()


# call the function that creates a object containing
# the couple of quarterly start-stop date
quarterly_date = calc.quarterly_period()

# ############# MAIN PART #########################
Ejemplo n.º 13
0
# third party import
import numpy as np

# local import
from cryptoindex.data_download import CW_raw_to_mongo
from cryptoindex.data_setup import (date_gen, Diff, timestamp_to_human)
from cryptoindex.mongo_setup import (mongo_coll, mongo_indexing, query_mongo)
from cryptoindex.config import (START_DATE, MONGO_DICT, PAIR_ARRAY,
                                CRYPTO_ASSET, EXCHANGES, DB_NAME)

# #########################################################

# set today
today = datetime.now().strftime("%Y-%m-%d")
# defining the array containing all the date from start_period until today
date_tot = date_gen(START_DATE)
# converting the timestamp format date into string
date_tot = [str(single_date) for single_date in date_tot]

# ########## MongoDB setup ################################

# create the indexing for MongoDB and define the variable containing the
# MongoDB collections where to upload data
mongo_indexing()
collection_dict_upload = mongo_coll()

# ####################### check for the cw_rawdata ####################

# selecting the last five days and put them into an array
last_five_days = date_tot[(len(date_tot) - 5):len(date_tot)]
print(last_five_days)
Ejemplo n.º 14
0
key_curr_vector = ["USD", "GBP", "CAD", "JPY"]

# setup mongo connection ###################################

# connecting to mongo in local
connection = MongoClient("localhost", 27017)
# creating the database called index
db = connection.index

# naming the exixting ecb_raw collection as a variable
collection_ECB_raw = db.ecb_raw

# ecb_raw collection check ###########################################

# defining the array containing all the date from start_period until today
date_tot = data_setup.date_gen(start_period)

# converting the timestamp format date into string
date_tot = [str(single_date) for single_date in date_tot]

# searching only the last five days
last_five_days = date_tot[(len(date_tot) - 5): len(date_tot)]

# defining the MongoDB path where to look for the rates
database = "index"
collection = "ecb_raw"
query = {"CURRENCY": "USD"}

# retrieving data from MongoDB 'index' and 'ecb_raw' collection
matrix = mongo.query_mongo(database, collection, query)
Ejemplo n.º 15
0
# local import
import cryptoindex.data_setup as data_setup
import cryptoindex.data_download as data_download
from cryptoindex.mongo_setup import (mongo_coll, mongo_coll_drop,
                                     mongo_indexing)
from cryptoindex.config import (START_DATE, PAIR_ARRAY, CRYPTO_ASSET,
                                EXCHANGES)

# ################### initial settings #########################

# set end_date as today, otherwise comment and choose an end_date
end_date = datetime.now().strftime("%m-%d-%Y")

# define the variable containing all the date from start_date to today.
# the date are displayed as timestamp and each day refers to 12:00 am UTC
reference_date_vector = data_setup.date_gen(START_DATE)

# ################# setup MongoDB connection #####################

mongo_coll_drop("cw_hist_d")

mongo_indexing()

collection_dict_upload = mongo_coll()

# ################# downloading and storing part ################

for Crypto in CRYPTO_ASSET:
    print(Crypto)

    ccy_pair_array = []
Ejemplo n.º 16
0
def cw_daily_operation(day=None):
    '''
    @param day has to be either None or "%Y-%m-%d" string format

    '''

    # create the indexing for MongoDB and define the variable containing the
    # MongoDB collections where to upload data
    mongo_indexing()

    date_tot = date_gen(START_DATE)
    # converting the timestamp format date into string
    date_tot_str = [str(single_date) for single_date in date_tot]

    day_before_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_cw_raw", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            try:
                cw_rawdata_daily = cw_daily_download(day_before_TS +
                                                     DAY_IN_SEC)
            except Exception:
                error("Exception occurred", exc_info=True)
                info('Daily download from CryptoWatch failed')
            mongo_upload(cw_rawdata_daily, "collection_cw_raw")

        else:

            print("The CW_rawdata collection on MongoDB is updated.")

        if daily_check_mongo("coll_vol_chk", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            mat_vol_fix = daily_pair_vol_fix(day_before_TS)

            try:

                mongo_upload(mat_vol_fix, "collection_cw_vol_check")

            except AttributeError:
                pass

        else:

            mat_vol_fix = []
            print(
                "Message: No need to fix pair volume. The collection on MongoDB is updated."
            )

        # new and dead crypto-fiat key management

        daily_complete_df = cw_daily_key_mngm(mat_vol_fix, day_before_TS,
                                              date_tot_str)

        # missing data fixing

        if daily_check_mongo("coll_cw_clean", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            daily_fixed_df = daily_fix_miss_op(daily_complete_df, day,
                                               "coll_cw_clean")
            mongo_upload(daily_fixed_df, "collection_cw_clean")

        else:

            print("Message: The collection cw_clean on MongoDB is updated.")

        if daily_check_mongo("coll_stable_rate",
                             {"Currency": "USDT/USD"}) is False:

            usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean",
                                                     str(day_before_TS))

            mongo_upload(usdt_rates, "collection_stable_rate")
            mongo_upload(usdc_rates, "collection_stable_rate")

        else:

            print("The stable_rates_collection on MongoDB is already updated.")

        if daily_check_mongo("coll_cw_conv", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            converted_data = cw_daily_conv_op(day_before_TS)
            mongo_upload(converted_data, "collection_cw_converted")

        else:

            print(
                "Message: The cw_converted_data collection on MongoDB is already updated."
            )

        if daily_check_mongo("coll_cw_final", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            mongo_upload(converted_data, "collection_cw_final_data")

        else:

            print(
                "The CW_final_data collection on MongoDB is already updated.")

    else:

        cw_rawdata_daily = cw_daily_download(day_before_TS)
        mongo_upload(cw_rawdata_daily, "collection_cw_raw")
        mat_vol_fix = daily_pair_vol_fix(day_before_TS)
        try:

            mongo_upload(mat_vol_fix, "collection_cw_vol_check")

        except AttributeError:
            pass

        daily_complete_df = cw_daily_key_mngm(mat_vol_fix, day_before_TS,
                                              date_tot_str)
        daily_fixed_df = daily_fix_miss_op(daily_complete_df, day,
                                           "coll_cw_clean")
        mongo_upload(daily_fixed_df, "collection_cw_clean")
        usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean",
                                                 str(day_before_TS))
        mongo_upload(usdt_rates, "collection_stable_rate")
        mongo_upload(usdc_rates, "collection_stable_rate")
        converted_data = cw_daily_conv_op(day_before_TS)
        mongo_upload(converted_data, "collection_cw_converted")
        mongo_upload(converted_data, "collection_cw_final_data")

    return None
collection_dict_upload = mongo_coll()

# ################### DATE SETTINGS ####################

# define today and yesterady date as timestamp
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
y_TS = today_TS - DAY_IN_SEC

# # define end date as as MM-DD-YYYY
# end_date = datetime.now().strftime("%m-%d-%Y")

# define the variable containing all the date from start_date to yesterday.
# the date are displayed as timestamp and each day refers to 12:00 am UTC
reference_date_vector = date_gen(START_DATE)

# define all the useful arrays containing the rebalance
# start date, stop date, board meeting date
rebalance_start_date = start_q(START_DATE)
rebalance_stop_date = stop_q(rebalance_start_date)
board_date = board_meeting_day()
board_date_eve = day_before_board()
next_rebalance_date = next_start()
print(rebalance_start_date)
print(rebalance_stop_date)
print(next_rebalance_date)
print(board_date_eve)
# defining time variables
last_reb_start = str(int(rebalance_start_date[len(rebalance_start_date) - 1]))
next_reb_stop = str(int(rebalance_stop_date[len(rebalance_stop_date) - 1]))
Ejemplo n.º 18
0
def index_hist_uploader(crypto_asset_price, crypto_asset_vol, exc_vol_tot,
                        price_ret, weights_for_board, first_logic_matrix_df,
                        second_logic_matrix_df, ewma_df, double_checked_EWMA,
                        syntethic, syntethic_relative_matrix, divisor_array,
                        reshaped_divisor, index_values, index_1000_base):

    # creating the array with human readable Date
    ref_date_arr = date_gen()
    human_date = timestamp_to_human(ref_date_arr)

    # put the "Crypto_Asset_Prices" dataframe on MongoDB
    crypto_asset_price["Date"] = human_date
    mongo_upload(crypto_asset_price,
                 "collection_price",
                 reorder="Y",
                 column_set_val="complete")

    # put the "Crypto_Asset_Volumes" dataframe on MongoDB
    crypto_asset_vol["Date"] = human_date
    mongo_upload(crypto_asset_vol,
                 "collection_volume",
                 reorder="Y",
                 column_set_val="complete")

    # put the exchange volumes on MongoDB
    mongo_upload(exc_vol_tot, "collection_all_exc_vol")

    # put the "price_ret" dataframe on MongoDB
    price_ret["Date"] = human_date
    mongo_upload(price_ret,
                 "collection_price_ret",
                 reorder="Y",
                 column_set_val="complete")

    # put the "weights" dataframe on MongoDB
    weight_human_date = timestamp_to_human(weights_for_board["Time"])
    weights_for_board["Date"] = weight_human_date
    mongo_upload(weights_for_board,
                 "collection_weights",
                 reorder="Y",
                 column_set_val="complete")
    print(weights_for_board)
    # put the first logic matrix on MongoDB
    first_date = timestamp_to_human(first_logic_matrix_df["Time"])
    first_logic_matrix_df["Date"] = first_date
    mongo_upload(first_logic_matrix_df,
                 "collection_logic_one",
                 reorder="Y",
                 column_set_val="complete")

    # put the second logic matrix on MongoDB
    second_date = timestamp_to_human(second_logic_matrix_df["Time"])
    second_logic_matrix_df["Date"] = second_date
    mongo_upload(second_logic_matrix_df,
                 "collection_logic_two",
                 reorder="Y",
                 column_set_val="complete")

    # put the EWMA dataframe on MongoDB
    ewma_df["Date"] = human_date
    ewma_df["Time"] = ref_date_arr
    mongo_upload(ewma_df,
                 "collection_EWMA",
                 reorder="Y",
                 column_set_val="complete")

    # put the double checked EWMA on MongoDB
    double_checked_EWMA["Date"] = human_date
    mongo_upload(double_checked_EWMA,
                 "collection_EWMA_check",
                 reorder="Y",
                 column_set_val="complete")

    # put the synth matrix on MongoDB
    syntethic["Date"] = human_date
    syntethic["Time"] = ref_date_arr
    mongo_upload(syntethic,
                 "collection_synth",
                 reorder="Y",
                 column_set_val="complete")

    # put the relative synth matrix on MongoDB
    syntethic_relative_matrix["Date"] = human_date
    syntethic_relative_matrix["Time"] = ref_date_arr
    mongo_upload(syntethic_relative_matrix,
                 "collection_relative_synth",
                 reorder="Y",
                 column_set_val="complete")

    # put the divisor array on MongoDB
    divisor_date = timestamp_to_human(divisor_array["Time"])
    divisor_array["Date"] = divisor_date
    mongo_upload(divisor_array,
                 "collection_divisor",
                 reorder="Y",
                 column_set_val="divisor")

    # put the reshaped divisor array on MongoDB
    reshaped_divisor_date = timestamp_to_human(reshaped_divisor["Time"])
    reshaped_divisor["Date"] = reshaped_divisor_date
    mongo_upload(reshaped_divisor,
                 "collection_divisor_reshaped",
                 reorder="Y",
                 column_set_val="divisor")

    # put the index level raw on MongoDB
    index_values["Date"] = human_date
    index_values["Time"] = ref_date_arr
    mongo_upload(index_values,
                 "collection_index_level_raw",
                 reorder="Y",
                 column_set_val="index")

    # put the index level 1000 on MongoDB
    index_1000_base["Date"] = human_date
    index_1000_base["Time"] = ref_date_arr
    mongo_upload(index_1000_base,
                 "collection_index_level_1000",
                 reorder="Y",
                 column_set_val="index")
Ejemplo n.º 19
0
def index_hist_loop(data_matrix, crypto_asset, exc_list, pair_list):

    ref_date_arr = date_gen()

    # initialize the matrices that will contain the prices
    # and volumes of all the single_cryptosset
    crypto_asset_price = np.matrix([])
    crypto_asset_vol = np.matrix([])

    logic_matrix_one = np.matrix([])

    # initialize the matrix that contain the volumes per Exchange
    exc_head = [
        "coinbase-pro",
        "poloniex",
        "bitstamp",
        "gemini",
        "bittrex",
        "kraken",
        "bitflyer",
    ]
    exc_head.append("Time")
    exc_head.append("Crypto")
    exc_vol_tot = pd.DataFrame(columns=exc_head)

    for single_crypto in crypto_asset:

        # initialize useful matrices
        crypto_fiat_arr = []
        exc_price = np.matrix([])
        exc_vol = np.matrix([])
        exc_price_vol = np.matrix([])

        crypto_fiat_arr = crypto_fiat_gen(single_crypto, crypto_fiat_arr,
                                          pair_list)

        for exchange in exc_list:
            print(exchange)
            # initialize the matrices that will contain the data related
            # to all currencypair for the single exchange
            ccy_fiat_price_vol = np.matrix([])
            ccy_fiat_vol = np.matrix([])
            ccy_fiat_price = np.matrix([])

            for cp in crypto_fiat_arr:
                print(cp)
                # selecting the data referring to specific exchange and crypto-fiat pair
                matrix = data_matrix.loc[(data_matrix["Exchange"] == exchange)
                                         & (data_matrix["Pair"] == cp)]
                print(matrix.shape)
                if matrix.empty is False:

                    price = np.array((matrix["Close Price"]))
                    volume = np.array((matrix["Pair Volume"]))
                    price_vol = np.array(price * volume)

                    # every "cp" the loop adds a column in the matrices referred
                    # to the single "exchange"
                    if ccy_fiat_price_vol.size == 0:
                        ccy_fiat_price_vol = price_vol
                        ccy_fiat_vol = volume
                    else:
                        ccy_fiat_price_vol = np.column_stack(
                            (ccy_fiat_price_vol, price_vol))
                        ccy_fiat_vol = np.column_stack((ccy_fiat_vol, volume))

                else:
                    pass

            # computing the volume weighted average price of the single exchange

            ccy_fiat_price, ccy_fiat_vol, ccy_fiat_price_vol = loop_crypto_fiat_hist(
                ccy_fiat_vol, ccy_fiat_price_vol, ref_date_arr)

            exc_price, exc_vol, exc_price_vol = loop_single_exc_hist(
                exc_price, exc_vol, exc_price_vol, ccy_fiat_price,
                ccy_fiat_vol, ccy_fiat_price_vol, ref_date_arr)

        (exc_price, exc_vol, exc_vol_tot,
         logic_matrix_one) = loop_exc_value_hist(single_crypto, exc_price,
                                                 exc_vol, exc_price_vol,
                                                 exc_vol_tot, exc_list,
                                                 logic_matrix_one,
                                                 ref_date_arr)

        # creating every loop the matrices of all the single_cryptossets
        # Crypto_Asset_Price contains the prices of all the cryptocurrencies
        # crypto_asset_vol contains the volume of all the cryptocurrencies
        crypto_asset_price, crypto_asset_vol = loop_crypto_asset(
            exc_price, exc_vol, crypto_asset_price, crypto_asset_vol)

    return crypto_asset_price, crypto_asset_vol, exc_vol_tot, logic_matrix_one
Ejemplo n.º 20
0
]
# exchange complete = [ 'coinbase-pro', 'poloniex', 'bitstamp',
# 'gemini', 'bittrex', 'kraken', 'bitflyer']

start_date = "01-01-2016"
EXC_start_date = "04-17-2020"

day_in_sec = 86400

# set today
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())

# define the array containing the date where the index uses CW feed data
CW_date_arr = data_setup.date_gen(start_date, EXC_start_date)
CW_date_str = [str(date) for date in CW_date_arr]

# ######################## setup MongoDB connection ###########################

# connecting to mongo in local
connection = MongoClient("localhost", 27017)
# creating the database called index
db = connection.index

# drop the pre-existing collection (if there is one)
db.index_data_feed.drop()

# creating the empty collection cleandata within the database index
db.index_data_feed.create_index([("id", -1)])
collection_feed = db.index_data_feed
Ejemplo n.º 21
0
collection_dict_upload.get("collection_cw_clean").delete_many(my2)
##
# ############################ missing days check #############################

# this section allows to check if CW_clean data contains the new values of the
# day, the check is based on a 5-days period and allows

# assign date of interest to variables
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
y_TS = today_TS - DAY_IN_SEC
two_before_TS = y_TS - DAY_IN_SEC

# defining the array containing all the date from start_period until today
date_complete_int = date_gen(START_DATE)
# converting the timestamp format date into string
date_tot = [str(single_date) for single_date in date_complete_int]

# searching only the last five days
last_five_days = date_tot[(len(date_tot) - 5): len(date_tot)]

# defining the details to query on MongoDB
query = {"Exchange": "coinbase-pro", "Pair": "ethusd"}

# retrieving the wanted data on MongoDB collection
matrix = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_clean"), query)

# checking the time column
date_list = np.array(matrix["Time"])
last_five_days_mongo = date_list[(len(date_list) - 5): len(date_list)]
# define the start date as MM-DD-YYYY
start_date = "01-01-2016"

# define today in various format
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
y_TS = today_TS - 86400
two_before_TS = y_TS - 86400
today_human = data_setup.timestamp_to_human([today_TS])
yesterday_human = data_setup.timestamp_to_human([y_TS])
two_before_human = data_setup.timestamp_to_human([two_before_TS])

# define the variable containing all the date from start_date to today.
# the date are displayed as timestamp and each day refers to 12:00 am UTC
reference_date_vector = data_setup.date_gen(start_date)

# define all the useful arrays containing the rebalance start
# date, stop date, board meeting date
rebalance_start_date = calc.start_q("01-01-2016")
rebalance_stop_date = calc.stop_q(rebalance_start_date)
board_date = calc.board_meeting_day()
board_date_eve = calc.day_before_board()
next_rebalance_date = calc.next_start()

# call the function that creates a object containing
# the couple of quarterly start-stop date
quarterly_date = calc.quarterly_period()

# defining time variables
curr_reb_start = str(int(rebalance_start_date[len(rebalance_start_date) - 1]))
Ejemplo n.º 23
0
    else:
        pass

    return None

# ########## HISTORICAL EXC RAW DATA OPERATION ##############


# set today
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
y_TS = today_TS - DAY_IN_SEC

# creating the timestamp array at 12:00 AM
date_array = date_gen(EXC_START_DATE)
date_array_str = [str(el) for el in date_array]


def all_crypto_fiat_gen():

    # defining the crypto-fiat pairs array
    all_crypto_fiat_array = []

    for crypto in CRYPTO_ASSET:

        for fiat in PAIR_ARRAY:

            all_crypto_fiat_array.append(crypto.lower() + fiat)

    return all_crypto_fiat_array
Ejemplo n.º 24
0
from cryptoindex.data_setup import (date_gen)
from cryptoindex.mongo_setup import (query_mongo, mongo_coll, mongo_indexing,
                                     mongo_upload)
from cryptoindex.config import (START_DATE, DAY_IN_SEC, MONGO_DICT, DB_NAME)

# ############# INITIAL SETTINGS ################################

# set today
today_str = datetime.now().strftime("%Y-%m-%d")
today = datetime.strptime(today_str, "%Y-%m-%d")
today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
y_TS = today_TS - DAY_IN_SEC
two_before_TS = y_TS - DAY_IN_SEC

# defining the array containing all the date from start_period until today
date_complete_int = date_gen(START_DATE)
# converting the timestamp format date into string
date_tot = [str(single_date) for single_date in date_complete_int]

# #################### setup mongo connection ##################

# creating the empty collections cleandata within the database index
mongo_indexing()

collection_dict_upload = mongo_coll()

# ################# DAILY DATA CONVERSION MAIN PART ##################

# querying the data from mongo
query_data = {"Time": str(y_TS)}
query_rate = {"Date": str(y_TS)}
# creating the database called index
db = connection.index

# drop the pre-existing collection (if there is one)
db.ecb_raw.drop()
# creating the empty collection rawdata within the database index
db.ecb_raw.create_index([("id", -1)])
collection_ECB_raw = db.ecb_raw

# ####################### ECB rates raw data download #########################

# create an array of date containing the list of date to download

date = data_setup.date_gen(start_period,
                           End_Period,
                           timeST="N",
                           clss="list",
                           EoD="N")

date = [
    datetime.strptime(day, "%m-%d-%Y").strftime("%Y-%m-%d") for day in date
]

Exchange_Rate_List = pd.DataFrame()

for i, single_date in enumerate(date):

    # retrieving data from ECB website
    single_date_ex_matrix = data_download.ECB_rates_extractor(
        key_curr_vector, date[i])
    # put a sllep time in order to do not overuse API connection