Exemple #1
0
def exc_daily_cleaning(exc_list, day_to_clean):

    previous_day = int(day_to_clean) - DAY_IN_SEC

    # download from MongoDB the exc raw data of yesterday
    q_dict: Dict[str, str] = {}
    q_dict = {"Time": str(day_to_clean + DAY_IN_SEC)}
    daily_mat = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_exc_raw"), q_dict)

    # fixing the "pair volume" information in the daily df
    daily_mat_vol_fix = daily_exc_pair_vol_fix(daily_mat, exc_list)

    # creating different df based on the hour of download
    (daily_mat_00, daily_mat_12, _, _) = exc_time_split(daily_mat_vol_fix)

    # transpose the Time information by 86400 seconds (1 day) ###### str o int???
    daily_mat_00["Time"] = [str(int(element) - DAY_IN_SEC)
                            for element in daily_mat_00["Time"]]
    daily_mat_12["Time"] = [str(int(element) - DAY_IN_SEC)
                            for element in daily_mat_12["Time"]]

    # completing the daily matrix with dead crypto-fiat pair
    daily_mat_complete = exc_daily_key_mngm(
        daily_mat_00, daily_mat_12, day_to_clean)

    # downloading from MongoDB the matrix referring to the previuos day
    q_dict_bfr: Dict[str, int] = {}
    q_dict_bfr = {"Time": int(previous_day)}
    day_bfr_mat = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_exc_clean"), q_dict_bfr)

    fixed_daily_mat = exc_daily_fix_op(day_bfr_mat, daily_mat_complete)

    return fixed_daily_mat
Exemple #2
0
def daily_conv_op(day_to_conv_TS, conversion_fiat=CONVERSION_FIAT,
                  stable=STABLE_COIN, series="CW"):

    # querying the data from mongo
    query_data = {"Time": int(day_to_conv_TS)}
    query_rate = {"Date": str(day_to_conv_TS)}
    query_stable = {"Time": int(day_to_conv_TS)}
    # querying the data from mongo
    matrix_rate = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_ecb_clean"), query_rate)

    if series == "CW":

        matrix_data = query_mongo(
            DB_NAME, MONGO_DICT.get("coll_cw_clean"), query_data)

    elif series == "EXC":

        matrix_data = query_mongo(
            DB_NAME, MONGO_DICT.get("coll_exc_clean"), query_data)

    matrix_rate_stable = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_stable_rate"), query_stable)

    # converting the "matrix_rate" df
    converted_df = conv_into_usd(
        DB_NAME, matrix_data, matrix_rate,
        matrix_rate_stable, conversion_fiat, stable)

    return converted_df
Exemple #3
0
def cw_hist_conv_op(cleaned_df, conv_fiat=CONVERSION_FIAT, stable=STABLE_COIN):

    matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"))
    matrix_rate_stable = query_mongo(DB_NAME,
                                     MONGO_DICT.get("coll_stable_rate"))

    # converting the "matrix_rate" df
    converted_df = conv_into_usd(DB_NAME, cleaned_df, matrix_rate,
                                 matrix_rate_stable, conv_fiat, stable)

    return converted_df
Exemple #4
0
def daily_check_mongo(coll_to_check, query, day_to_check=None, coll_kind=None):

    day_before_TS, _ = days_variable(day_to_check)

    if coll_kind is None:

        query["Time"] = int(day_before_TS)

    elif coll_kind == "ecb_raw":

        query["TIME_PERIOD"] = str(day_before_TS)

    elif coll_kind == "ecb_clean":

        query["Date"] = str(day_before_TS)

    # retrieving the wanted data on MongoDB collection
    matrix = query_mongo(DB_NAME, MONGO_DICT.get(coll_to_check), query)

    if isinstance(matrix, list):

        res = False

    else:
        res = True

    return bool(res)
Exemple #5
0
def index_hist_total(coll_to_use="coll_data_feed",
                     crypto_asset=CRYPTO_ASSET,
                     exc_list=EXCHANGES,
                     pair_list=PAIR_ARRAY):

    # drop the pre-existing collections
    mongo_coll_drop("index_hist")

    # define the mongo indexing
    mongo_indexing()

    data_df = query_mongo(DB_NAME, MONGO_DICT.get(coll_to_use))

    (crypto_asset_price_arr, crypto_asset_vol_arr, exc_vol_tot,
     logic_matrix_one) = index_hist_loop(data_df, crypto_asset, exc_list,
                                         pair_list)
    print(crypto_asset_vol_arr)
    (crypto_asset_price, crypto_asset_vol, price_ret, weights_for_board,
     first_logic_matrix_df, second_logic_matrix_df, ewma_df,
     double_checked_EWMA, syntethic, syntethic_relative_matrix, divisor_array,
     reshaped_divisor, index_values,
     index_1000_base) = index_hist_op(crypto_asset_price_arr,
                                      crypto_asset_vol_arr, logic_matrix_one)

    index_hist_uploader(crypto_asset_price, crypto_asset_vol, exc_vol_tot,
                        price_ret, weights_for_board, first_logic_matrix_df,
                        second_logic_matrix_df, ewma_df, double_checked_EWMA,
                        syntethic, syntethic_relative_matrix, divisor_array,
                        reshaped_divisor, index_values, index_1000_base)
Exemple #6
0
def cw_hist_cleaning(vol_fixed_df,
                     start_date,
                     crypto_list=CRYPTO_ASSET,
                     exc_list=EXCHANGES):

    tot_date_arr = date_gen(start_date)

    cleaned_df = pd.DataFrame(columns=CLEAN_DATA_HEAD)

    for crypto in crypto_list:

        pair_arr = crypto_fiat_pair_gen(crypto)

        for exchange in exc_list:

            ex_matrix = vol_fixed_df.loc[vol_fixed_df["Exchange"] == exchange]

            for cp in pair_arr:

                crypto = cp[:3]

                cp_matrix = ex_matrix.loc[ex_matrix["Pair"] == cp]
                cp_matrix = cp_matrix.drop(columns=["Exchange", "Pair"])
                # checking if the matrix is not empty
                if cp_matrix.shape[0] > 1:

                    # check if the historical series start at the same date as
                    # the start date if not fill the dataframe with zero values
                    cp_matrix = homogenize_series(cp_matrix, tot_date_arr)

                    # check if the series stopped at certain point in
                    # the past, if yes fill with zero
                    cp_matrix = homogenize_dead_series(cp_matrix, tot_date_arr)

                    # checking if the matrix has missing data and if ever fixing it
                    if cp_matrix.shape[0] != tot_date_arr.size:

                        print("fixing")
                        cp_matrix = CW_series_fix_missing(
                            cp_matrix,
                            exchange,
                            cp,
                            tot_date_arr,
                            DB_NAME,
                            MONGO_DICT.get("coll_vol_chk"),
                        )

                    # turn Time columns into string
                    [str(date) for date in cp_matrix["Time"]]

                    # add exchange and currency_pair column
                    cp_matrix["Exchange"] = exchange
                    cp_matrix["Pair"] = cp
                    reordered = df_reorder(cp_matrix, column_set="conversion")
                    cleaned_df = cleaned_df.append(reordered)

    return cleaned_df
Exemple #7
0
def hist_data_feed_op():

    # define the array containing the date where the index uses CW feed data
    CW_date_arr = date_gen(START_DATE, EXC_START_DATE)
    CW_date_str = [str(date) for date in CW_date_arr]

    # drop the pre-existing collection (if there is one)
    mongo_coll_drop("index_feed")

    # downloading the EXC series from MongoDB
    EXC_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_final"))
    EXC_series = EXC_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    CW_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_final"))
    CW_series["Time"] = [str(x) for x in CW_series["Time"]]
    print("CW")
    print(CW_series)
    CW_sub_series = CW_series.loc[CW_series.Time.isin(CW_date_str)]
    print(CW_sub_series)
    CW_sub_series = CW_sub_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]
    CW_sub_series["Time"] = [int(x) for x in CW_sub_series["Time"]]
    CW_sub_series.reset_index(drop=True, inplace=True)
    print(CW_sub_series)

    # creting an unique dataframe containing the two different data source
    data_feed = CW_sub_series.append(EXC_series, sort=True)
    data_feed.reset_index(drop=True, inplace=True)

    data_feed = data_feed[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    print(data_feed)
    data_feed = homogeneize_feed(data_feed)
    print("post hom")
    print(data_feed)

    # put the converted data on MongoDB
    mongo_upload(data_feed, "collection_data_feed")

    return None
Exemple #8
0
def cw_exc_merging(start_date=START_DATE, exc_start=EXC_START_DATE,
                   db=DB_NAME, coll_cw="coll_cw_final",
                   coll_exc="coll_exc_final"):

    cw_date_arr = date_gen(start_date, exc_start, EoD="N")

    exc_series = query_mongo(db, MONGO_DICT.get(coll_exc))
    exc_part = df_reorder(exc_series, column_set="conversion")

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    cw_series = query_mongo(db, MONGO_DICT.get(coll_cw))
    cw_part = cw_series.loc[cw_series.Time.isin(cw_date_arr)]
    cw_part = df_reorder(cw_part, column_set="conversion")

    # creting an unique dataframe containing the two different data source
    merged_series = cw_part.append(exc_part, sort=True)
    merged_series["Time"] = [int(d) for d in merged_series["Time"]]

    return merged_series
Exemple #9
0
def exc_hist_conv(exc_fix_df):

    # querying the rates collection from MongoDB
    matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"))
    matrix_rate = matrix_rate.rename({"Date": "Time"}, axis="columns")
    matrix_rate = matrix_rate.loc[matrix_rate.Time.isin(date_array_str)]

    # querying the stable rates collection from MongoDB
    matrix_rate_stable = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_stable_rate"))
    matrix_rate_stable = matrix_rate_stable.loc[matrix_rate_stable.Time.isin(
        date_array_str)]

    converted_data = conv_into_usd(DB_NAME, exc_fix_df, matrix_rate,
                                   matrix_rate_stable, CONVERSION_FIAT, STABLE_COIN)

    converted_data["Time"] = [int(element)
                              for element in converted_data["Time"]]

    return converted_data
Exemple #10
0
def exc_key_mngmt(exc_clean_df):

    # downloading from MongoDB the matrix containing the exchange-pair logic values
    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_keys"))

    # creating the exchange-pair couples key for the daily matrix
    exc_clean_df["key"] = exc_clean_df["Exchange"] + "&" + exc_clean_df["Pair"]

    # ## adding the dead series to the daily values

    # selecting only the exchange-pair couples present in the historical series
    key_present = logic_key.loc[logic_key.logic_value == 1]
    key_present = key_present.drop(columns=["logic_value"])

    exc_clean_df = exc_clean_df.loc[exc_clean_df.Time != str(today_TS)]

    # selecting the last day of the EXC "historical" series
    last_day_with_val = max(exc_clean_df.Time)
    last_day = exc_clean_df.loc[exc_clean_df.Time
                                == last_day_with_val]

    # applying a left join between the prresent keys matrix and the last_day
    # matrix, this operation returns a matrix containing all the keys in
    # "key_present" and, if some keys are missing in "all_data" put NaN
    merged = pd.merge(key_present, last_day, on="key", how="left")

    # selecting only the absent keys
    merg_absent = merged.loc[merged["Close Price"].isnull()]

    header = CLEAN_DATA_HEAD
    header.extend(["key"])

    # create the historical series for each selected element
    for k in merg_absent["key"]:

        mat_to_add = pd.DataFrame(columns=header)
        mat_to_add["Time"] = date_array_str
        split_val = k.split("&")
        mat_to_add["Exchange"] = split_val[0]
        mat_to_add["Pair"] = split_val[1]
        mat_to_add["Close Price"] = 0.0
        mat_to_add["Crypto Volume"] = 0.0
        mat_to_add["Pair Volume"] = 0.0
        exc_clean_df = exc_clean_df.append(mat_to_add)

    # uploading the cleaned data on MongoDB in the collection EXC_cleandata
    exc_clean_df = exc_clean_df.drop(columns=["key"])
    exc_clean_df["Time"] = [int(element) for element in exc_clean_df["Time"]]

    # deleting the 17/04/2020 from the df (if presents)
    exc_complete_df = exc_clean_df.loc[exc_clean_df.Time != 1587081600]

    return exc_complete_df
Exemple #11
0
def cw_daily_conv_op(day_to_conv_TS):

    # querying the data from mongo
    query_data = {"Time": int(day_to_conv_TS)}
    query_rate = {"Date": str(day_to_conv_TS)}
    query_stable = {"Time": int(day_to_conv_TS)}
    # querying the data from mongo
    matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"),
                              query_rate)
    matrix_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_clean"),
                              query_data)
    matrix_rate_stable = query_mongo(DB_NAME,
                                     MONGO_DICT.get("coll_stable_rate"),
                                     query_stable)

    # converting the "matrix_rate" df
    converted_df = conv_into_usd(DB_NAME, matrix_data, matrix_rate,
                                 matrix_rate_stable, CONVERSION_FIAT,
                                 STABLE_COIN)

    return converted_df
Exemple #12
0
def cw_daily_key_mngm(volume_checked_df, time_to_check, date_tot_str):

    if isinstance(volume_checked_df, list):

        volume_checked_tot = query_mongo(DB_NAME,
                                         MONGO_DICT.get("coll_vol_chk"))

        last_day_with_val = max(volume_checked_tot.Time)

        volume_checked_df = volume_checked_tot.loc[volume_checked_tot.Time ==
                                                   last_day_with_val]

    # downloading from MongoDB the matrix with the daily values and the
    # matrix containing the exchange-pair logic values
    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_keys"))

    # creating the exchange-pair couples key for the daily matrix
    volume_checked_df["key"] = volume_checked_df["Exchange"] + \
        "&" + volume_checked_df["Pair"]

    # selecting only the exchange-pair couples present in the historical series
    key_present = logic_key.loc[logic_key.logic_value == 1]
    key_present = key_present.drop(columns=["logic_value"])
    # applying a left join between the prresent keys matrix and the daily
    # matrix, this operation returns a matrix containing all the keys in
    # "key_present" and NaN where some keys are missing
    merged = pd.merge(key_present, volume_checked_df, on="key", how="left")
    # assigning some columns values and substituting NaN with 0
    # in the "merged" df
    merged["Time"] = int(time_to_check)
    split_val = merged["key"].str.split("&", expand=True)
    merged["Exchange"] = split_val[0]
    merged["Pair"] = split_val[1]
    merged.fillna(0, inplace=True)

    #  checking potential new exchange-pair couple

    cw_new_key_mngm(logic_key, volume_checked_df, time_to_check, date_tot_str)

    return merged
Exemple #13
0
def index_norm_logic_op(crypto_asset, daily_ewma):

    past_start_quarter_list = start_q()
    last_start_q = past_start_quarter_list[len(past_start_quarter_list) - 1]
    # downloading from mongoDB the current logic matrices (1 e 2)
    logic_one = query_mongo(DB_NAME, MONGO_DICT.get("coll_log1"))
    # taking only the logic value referred to the current period
    # current_logic_one = logic_one.iloc[[len(logic_one["Date"]) - 2]]
    current_logic_one = logic_one.iloc[logic_one.Time == int(last_start_q)]
    current_logic_one = current_logic_one.drop(columns=["Date", "Time"])
    logic_two = query_mongo(DB_NAME, MONGO_DICT.get("coll_log2"))
    # taking only the logic value referred to the current period
    # current_logic_two = logic_two.iloc[[len(logic_two["Date"]) - 2]]
    current_logic_two = logic_two.iloc[logic_two.Time == int(last_start_q)]
    current_logic_two = current_logic_two.drop(columns=["Date", "Time"])

    # computing the ewma checked with both the first and second logic matrices
    daily_ewma_first_check = np.array(daily_ewma) * np.array(current_logic_one)
    daily_ewma_double_check = daily_ewma_first_check * \
        np.array(current_logic_two)
    daily_ewma_double_check = pd.DataFrame(daily_ewma_double_check,
                                           columns=crypto_asset)

    return daily_ewma_double_check
Exemple #14
0
def daily_fix_miss_op(daily_complete_df, day, collection):

    _, two_before_TS = days_variable(day)
    # defining the query details
    q_dict_time: Dict[str, int] = {}
    q_dict_time = {"Time": int(two_before_TS)}

    # downloading from MongoDB the matrix referring to the previuos day
    day_bfr_mat = query_mongo(DB_NAME, MONGO_DICT.get(collection), q_dict_time)

    # add the "key" column
    day_bfr_mat["key"] = day_bfr_mat["Exchange"] + "&" + day_bfr_mat["Pair"]

    # looping through all the daily keys looking for potential missing value
    for key_val in day_bfr_mat["key"]:

        new_val = daily_complete_df.loc[daily_complete_df.key == key_val]

        # if the new 'Close Price' referred to a certain key is 0 the script
        # check the previous day value: if is == 0 then pass, if is != 0
        # the values related to the selected key needs to be corrected
        if np.array(new_val["Close Price"]) == 0.0:

            d_before_val = day_bfr_mat.loc[day_bfr_mat.key == key_val]

            if np.array(d_before_val["Close Price"]) != 0.0:

                price_var = daily_fix_miss(new_val, daily_complete_df,
                                           day_bfr_mat)
                # applying the weighted variation to the day before 'Close Price'
                new_price = (1 + price_var) * d_before_val["Close Price"]
                # changing the 'Close Price' value using the new computed price
                daily_complete_df.loc[daily_complete_df.key == key_val,
                                      "Close Price"] = new_price

            else:
                pass

        else:
            pass

    daily_complete_df.drop(columns=["key"])
    daily_complete_df["Time"] = [int(d) for d in daily_complete_df["Time"]]

    daily_fixed_df = daily_complete_df

    return daily_fixed_df
Exemple #15
0
def new_synth_op(crypto_asset, daily_return_df, synt_ptf_value):

    past_start_quarter_list = start_q()
    new_start_q = past_start_quarter_list[len(past_start_quarter_list) - 1]

    # downloading from mongoDB the current weights
    tot_weights = query_mongo(DB_NAME, MONGO_DICT.get("coll_weights"))
    new_weights = tot_weights.loc[tot_weights.Time == int(new_start_q),
                                  crypto_asset]

    # daily syntethic matrix computation
    daily_ret_arr = np.array(daily_return_df[crypto_asset])
    synt_weights = np.array(new_weights) * synt_ptf_value
    new_synth = synt_weights * (1 + daily_ret_arr)
    daily_synthh = pd.DataFrame(new_synth, columns=crypto_asset)

    return daily_synthh
Exemple #16
0
def check_missing(tot_date_arr, coll_to_check, query, days_to_check=10):

    # selecting the last five days and put them into an array
    last_days = tot_date_arr[(len(tot_date_arr) -
                              days_to_check):len(tot_date_arr)]
    print(last_days)
    # retrieving the wanted data on MongoDB collection
    matrix = query_mongo(DB_NAME, MONGO_DICT.get(coll_to_check), query)

    # checking the time column and selecting only the last five days retrived
    # from MongoDB collection
    try:

        date_list = np.array(matrix["Time"])

    except KeyError:

        try:

            date_list = np.array(matrix["TIME_PERIOD"])

        except KeyError:

            date_list = np.array(matrix["Date"])

    to_del = np.array([0])
    date_list = np.setdiff1d(date_list, to_del)

    last_days_db = date_list[(len(date_list) - days_to_check):len(date_list)]

    # last_days_db_str = [str(single_date)
    #                     for single_date in last_days_db]

    # finding the date to download as difference between
    # complete array of date and date now stored on MongoDB
    date_to_add = Diff(last_days, last_days_db)
    print(date_to_add)

    if len(date_to_add) > 9:

        date_to_add = None

    return date_to_add
Exemple #17
0
def exc_hist_op():

    mongo_coll_drop("exc")

    mongo_indexing()

    # defining the crytpo_fiat array
    crypto_fiat_arr = all_crypto_fiat_gen()
    # querying all raw data from EXC_rawdata
    exc_raw_df = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_raw"))

    midnight_clean = exc_initial_clean(exc_raw_df, crypto_fiat_arr)
    mongo_upload(midnight_clean, "collection_exc_uniform")

    # deleting the values for xrp in the coinbase-pro exchange
    midnight_clean["key"] = midnight_clean["Exchange"] + \
        "&" + midnight_clean["Pair"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "coinbase-pro&xrpusd"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "coinbase-pro&xrpeur"]
    # deleting the values for zec and xmr in the bittrex exchange
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusd"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusdt"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusdc"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&xmrusdt"]

    midnight_clean = midnight_clean.drop(columns="key")

    exc_complete_df = exc_key_mngmt(midnight_clean)
    exc_fixed_df = exc_hist_fix(exc_complete_df)
    mongo_upload(exc_fixed_df, "collection_exc_clean")

    exc_converted = exc_hist_conv(exc_fixed_df)
    exc_converted.fillna(0, inplace=True)
    mongo_upload(exc_converted, "collection_exc_final_data")

    return None
Exemple #18
0
def exc_daily_feed(day=None):

    day_before_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_data_feed", {"Exchange": "coinbase-pro", "Pair": "ethusd"}) is False:

            query_data = {"Time": int(day_before_TS)}
            exc_daily_df = query_mongo(
                DB_NAME, MONGO_DICT.get("coll_exc_final"), query_data)
            mongo_upload(exc_daily_df, "collection_data_feed")

        else:
            print("The collection index_data_feed is already updated")

    else:
        pass

    return None
Exemple #19
0
def daily_pair_vol_fix2(time_to_fix):

    # defining the query details
    q_dict: Dict[str, int] = {}
    q_dict = {"Time": time_to_fix}

    # querying oin MongoDB collection
    daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"), q_dict)
    daily_mat = daily_mat.loc[daily_mat.Time != 0]
    daily_mat = daily_mat.drop(columns=["Low", "High", "Open"])

    for Crypto in CRYPTO_ASSET:

        ccy_pair_array = []

        for i in PAIR_ARRAY:

            ccy_pair_array.append(Crypto.lower() + i)

        for exchange in EXCHANGES:

            for cp in ccy_pair_array:

                mat = daily_mat.loc[daily_mat["Exchange"] == exchange]
                mat = mat.loc[mat["Pair"] == cp]
                # checking if the matrix is not empty
                if mat.shape[0] > 1:

                    mat["Pair Volume"] = mat["Close Price"] * \
                        mat["Crypto Volume"]

                # put the manipulated data on MongoDB
                try:

                    mongo_upload(mat, "collection_cw_vol_check")

                except TypeError:

                    pass

    return None
Exemple #20
0
def daily_pair_vol_fix(day):

    # defining the query details
    q_dict: Dict[str, int] = {}
    q_dict = {"Time": day}

    # querying on MongoDB collection
    daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"), q_dict)

    try:

        daily_mat = daily_mat.loc[daily_mat.Time != 0]
        daily_mat = daily_mat.drop(columns=["Low", "High", "Open"])

        daily_vol_fix = pair_vol_fix(daily_mat)

    except AttributeError:

        daily_vol_fix = []

    return daily_vol_fix
Exemple #21
0
def cw_hist_operation(start_date=START_DATE):

    date_tot = date_gen(start_date)
    last_day_TS = date_tot[len(date_tot) - 1]

    mongo_indexing()

    # deleting previous MongoDB collections
    mongo_coll_drop("cw_hist_clean")
    mongo_coll_drop("cw_hist_conv")

    # fix and upload the series for the "pair volume" info
    tot_raw_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"))
    cw_vol_fix_data = cw_hist_pair_vol_fix(tot_raw_data)
    mongo_upload(cw_vol_fix_data, "collection_cw_vol_check")

    # clean and upload all the series
    cleaned_df = cw_hist_cleaning(cw_vol_fix_data, start_date)
    mongo_upload(cleaned_df, "collection_cw_clean")

    # compute and upload USDC and USDT rates series
    usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean", None)
    mongo_upload(usdt_rates, "collection_stable_rate")
    mongo_upload(usdc_rates, "collection_stable_rate")

    # convert and upload all the data into USD
    converted_df = cw_hist_conv_op(cleaned_df)
    mongo_upload(converted_df, "collection_cw_converted")

    # logic matrix of crypto-fiat keys
    key_df = key_log_mat(DB_NAME, "coll_cw_conv", last_day_TS, EXCHANGES,
                         CRYPTO_ASSET, PAIR_ARRAY)
    mongo_upload(key_df, "collection_CW_key")
    mongo_upload(key_df, "collection_EXC_key")

    # fill zero-volume data and upload on MongoDB
    final_df = cw_hist_zero_vol_fill_op(converted_df)
    mongo_upload(final_df, "collection_cw_final_data")

    return None
Exemple #22
0
def exc_daily_key_mngm(daily_mat_00, daily_mat_12, day_to_clean_TS):

    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_keys"))

    # adding to the daily matrix the value referred to dead crypto-fiat pair
    daily_mat_with_dead = exc_dead_key_mng(
        logic_key, daily_mat_00, daily_mat_12, day_to_clean_TS)

    # searching for possible new crypto-fiat pair
    new_key_hist = exc_new_key_mng(logic_key, daily_mat_00, day_to_clean_TS)

    if new_key_hist != []:

        collection_dict_upload = mongo_coll()
        # upload the new artificial historical series on MongoDB
        # collection "EXC_cleandata"
        mongo_upload(new_key_hist, collection_dict_upload.get(
            "collection_exc_clean"))

    else:

        pass

    return daily_mat_with_dead
Exemple #23
0
date_tot = [str(single_date) for single_date in date_complete_int]

# #################### setup mongo connection ##################

# creating the empty collections cleandata within the database index
mongo_indexing()

collection_dict_upload = mongo_coll()

# ################# DAILY DATA CONVERSION MAIN PART ##################

# querying the data from mongo
query_data = {"Time": str(y_TS)}
query_rate = {"Date": str(y_TS)}
query_stable = {"Time": str(y_TS)}
matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"),
                          query_rate)
matrix_rate = matrix_rate.rename({"Date": "Time"}, axis="columns")
matrix_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_clean"),
                          query_data)
print(matrix_data)
matrix_rate_stable = query_mongo(DB_NAME, MONGO_DICT.get("coll_stable_rate"),
                                 query_stable)

# creating a column containing the fiat currency
matrix_rate["fiat"] = [x[:3].lower() for x in matrix_rate["Currency"]]
matrix_data["fiat"] = [x[3:].lower() for x in matrix_data["Pair"]]
matrix_rate_stable["fiat"] = [
    x[:4].lower() for x in matrix_rate_stable["Currency"]
]
Exemple #24
0
# create the indexing for MongoDB and define the variable containing the
# MongoDB collections where to upload data
mongo_indexing()
collection_dict_upload = mongo_coll()

# ####################### check for the cw_rawdata ####################

# selecting the last five days and put them into an array
last_five_days = date_tot[(len(date_tot) - 5):len(date_tot)]
print(last_five_days)

# defining the details to query on MongoDB
query = {"Exchange": "coinbase-pro", "Pair": "ethusd"}

# retrieving the wanted data on MongoDB collection
matrix = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"), query)

# checking the time column and selecting only the last five days retrived
# from MongoDB collection
date_list = np.array(matrix["Time"])
last_five_days_mongo = date_list[(len(date_list) - 5):len(date_list)]
last_five_days_mongo = [
    str(single_date) for single_date in last_five_days_mongo
]
print(last_five_days_mongo)

# finding the date to download as difference between
# complete array of date and date now stored on MongoDB
date_to_add = Diff(last_five_days, last_five_days_mongo)

if date_to_add != []:
Exemple #25
0
# creating the empty collection cleandata within the database index
mongo_indexing()

collection_dict_upload = mongo_coll()

# ########## USDC/USD and USDT/USD computation #####################

start = time.time()

Exchanges = EXCHANGES

# taking BTC/USD pair historical
first_query = {"Pair": "btcusd", "Exchange": "kraken"}
first_call = query_mongo(
    DB_NAME, MONGO_DICT.get("coll_cw_clean"), first_query)
# isolating some values in single variables
time_arr = first_call[["Time"]]
price_df = first_call[["Close Price"]]
volume_df = first_call[["Pair Volume"]]
price_df = price_df.rename(columns={"Close Price": "kraken"})
volume_df = volume_df.rename(columns={"Pair Volume": "kraken"})
Exchanges.remove("kraken")

for exchange in Exchanges:

    query = {"Pair": "btcusd", "Exchange": exchange}
    single_ex = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_cw_clean"), query)

    try:
Exemple #26
0
def ECB_setup(key_curr_vector,
              start_period,
              end_period,
              timeST="N",
              db=DB_NAME,
              coll_raw="coll_ecb_raw"):

    # defining the array of date to be used
    date_ECB = date_gen(start_period, end_period, EoD="N")

    # defining the headers of the returning data frame
    header = ["Date", "Currency", "Rate"]

    # for each date in "date" array the funcion retrieves data from
    # ECB website and append the result in the returning matrix
    Exchange_Matrix = np.array([])

    for i, single_date in enumerate(date_ECB):

        query = {"TIME_PERIOD": str(date_ECB[i])}

        # retrieving data from MongoDB 'index' and 'ecb_raw' collection
        single_date_ex_matrix = query_mongo(db, MONGO_DICT.get(coll_raw),
                                            query)

        # check if rates exist in the specified date
        if len(single_date_ex_matrix) != 0:

            # find the USD/EUR rates useful for conversions
            cambio_USD_EUR = float(
                np.array(single_date_ex_matrix.loc[
                    single_date_ex_matrix.CURRENCY == "USD", "OBS_VALUE"]))

            # add a column to DF with the USD based rates
            single_date_ex_matrix["USD based rate"] = (
                single_date_ex_matrix["OBS_VALUE"]) / cambio_USD_EUR

            # creat date array
            date_arr = np.full(len(key_curr_vector), single_date)

            # creating the array with 'XXX/USD' format
            curr_arr = single_date_ex_matrix["CURRENCY"] + "/USD"
            curr_arr = np.where(curr_arr == "USD/USD", "EUR/USD", curr_arr)

            # creating the array with rate values USD based
            rate_arr = single_date_ex_matrix["USD based rate"]
            rate_arr = np.where(
                rate_arr == 1.000000,
                1 / single_date_ex_matrix["OBS_VALUE"][0],
                rate_arr,
            )

            # stacking the array together
            array = np.column_stack((date_arr, curr_arr, rate_arr))

            # filling the return matrix
            if Exchange_Matrix.size == 0:

                Exchange_Matrix = array

            else:

                Exchange_Matrix = np.row_stack((Exchange_Matrix, array))

        # if the query returns an empty matrix, function will takes values of
        # the last useful day
        else:

            date_arr = np.full(len(key_curr_vector), single_date)

            # take the curr_arr values of the previous day
            curr_arr = curr_arr

            # take the rate_arr values of the pevious day
            rate_arr = rate_arr

            # stack the array together
            array = np.column_stack((date_arr, curr_arr, rate_arr))

            if Exchange_Matrix.size == 0:

                Exchange_Matrix = array

            else:

                Exchange_Matrix = np.row_stack((Exchange_Matrix, array))

    if timeST != "N":

        for j, element in enumerate(Exchange_Matrix[:, 0]):

            to_date = datetime.strptime(element, "%Y-%m-%d")
            today_TS = int(to_date.replace(tzinfo=timezone.utc).timestamp())

            Exchange_Matrix[j, 0] = today_TS

    return pd.DataFrame(Exchange_Matrix, columns=header)
Exemple #27
0
def ECB_daily_setup(key_curr_vector,
                    day_to_clean=None,
                    db=DB_NAME,
                    coll_raw="coll_ecb_raw",
                    coll_clean="coll_ecb_clean"):

    if day_to_clean is None:

        today_str = datetime.now().strftime("%Y-%m-%d")
        today = datetime.strptime(today_str, "%Y-%m-%d")

        # timestamp date
        today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
        day_to_clean_TS = today_TS - DAY_IN_SEC
        two_before_TS = day_to_clean_TS - DAY_IN_SEC

        # human format date
        day_to_clean_human = timestamp_to_human([day_to_clean_TS])

    else:

        day_to_clean_d = datetime.strptime(day_to_clean, "%Y-%m-%d")

        # timestamp date
        day_to_clean_TS = int(
            day_to_clean_d.replace(tzinfo=timezone.utc).timestamp())
        two_before_TS = day_to_clean_TS - DAY_IN_SEC

        # human format date
        day_to_clean_human = timestamp_to_human([day_to_clean_TS])

    # defining the headers of the returning data frame
    header = ["Currency", "Rate"]

    # retrieving data from MongoDB 'index' and 'ecb_raw' collection
    ecb_raw_mat = query_mongo(db, MONGO_DICT.get(coll_raw))

    # searching into the df only the values referred to yesterday
    y_ecb_raw = ecb_raw_mat.loc[ecb_raw_mat.TIME_PERIOD == str(
        day_to_clean_TS)]

    if y_ecb_raw.empty is False:

        # find the USD/EUR rates useful for conversions
        exc_USD_EUR = float(
            np.array(y_ecb_raw.loc[y_ecb_raw.CURRENCY == "USD", "OBS_VALUE"]))

        # add a column to DF with the USD based rates
        usd_based = y_ecb_raw["OBS_VALUE"] / exc_USD_EUR
        y_ecb_raw["USD based rate"] = usd_based

        # creating the array with 'XXX/USD' format
        curr_arr = y_ecb_raw["CURRENCY"] + "/USD"
        curr_arr = np.where(curr_arr == "USD/USD", "EUR/USD", curr_arr)

        # creating the array with rate values USD based
        rate_arr = y_ecb_raw["USD based rate"]
        exc_EUR_USD = float(y_ecb_raw.loc[y_ecb_raw.CURRENCY == "USD",
                                          "OBS_VALUE"])
        rate_arr = np.where(rate_arr == 1.000000, 1 / exc_EUR_USD, rate_arr)

        # stacking the array together
        array = np.column_stack((curr_arr, rate_arr))

        # converting into dataframe
        df = pd.DataFrame(array, columns=header)
        df["Date"] = str(day_to_clean_TS)
        df["Standard Date"] = day_to_clean_human[0]
        exc_ecb = df[["Date", "Standard Date", "Currency", "Rate"]]

    else:

        query = {"Date": str(two_before_TS)}
        prev_clean = query_mongo(db, MONGO_DICT.get(coll_clean), query)

        # changing "Date" and "Standard Date" from two day before to yesterday
        prev_clean["Date"] = str(day_to_clean_TS)
        prev_clean["Standard Date"] = day_to_clean_human[0]
        exc_ecb = prev_clean[["Date", "Standard Date", "Currency", "Rate"]]

    return exc_ecb
            crypto = cp[:3]
            fiat_curr = cp[3:]
            # ######### LEAVING OUT NEW CRYPTO-FIAT PAIRS ##################
            c_1 = exchange == "bittrex" and fiat_curr == "eur"
            c_2 = exchange == "bittrex" and crypto == "ltc" and fiat_curr == "usd"
            c_3 = exchange == "poloniex" and crypto == "bch" and fiat_curr == "usdc"

            if c_1 or c_2 or c_3:
                continue

            # ###############################################################

            # defining the dictionary for the MongoDB query
            query_dict = {"Exchange": exchange, "Pair": cp}
            # retriving the needed information on MongoDB
            matrix = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_final"),
                                 query_dict)

            try:

                matrix = matrix.drop(columns=["Low", "High", "Open"])

            except KeyError:

                pass

            except AttributeError:

                pass

            try:
Exemple #29
0
y_TS = today_TS - DAY_IN_SEC
two_before_TS = y_TS - DAY_IN_SEC

# defining the array containing all the date from start_period until today
date_complete_int = date_gen(START_DATE)
# converting the timestamp format date into string
date_tot = [str(single_date) for single_date in date_complete_int]

# searching only the last five days
last_five_days = date_tot[(len(date_tot) - 5): len(date_tot)]

# defining the details to query on MongoDB
query = {"Exchange": "coinbase-pro", "Pair": "ethusd"}

# retrieving the wanted data on MongoDB collection
matrix = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_clean"), query)

# checking the time column
date_list = np.array(matrix["Time"])
last_five_days_mongo = date_list[(len(date_list) - 5): len(date_list)]

# finding the date to download as difference between complete array of date and
# date now stored on MongoDB
date_to_add = Diff(last_five_days, last_five_days_mongo)
print(date_to_add)

if date_to_add != []:

    if len(date_to_add) > 1:

        date_to_add.sort()
# converting the timestamp format date into string
date_tot = [str(single_date) for single_date in date_complete_int]

# #################### setup mongo connection ##################

# creating the empty collections cleandata within the database index
mongo_indexing()

collection_dict_upload = mongo_coll()

# ################### fixing the "Pair Volume" information #################

q_dict: Dict[str, str] = {}
q_dict = {"Time": str(y_TS)}

daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_raw"), q_dict)
daily_mat = daily_mat[
    ["Pair", "Exchange", "Close Price", "Time", "Crypto Volume", "date"]
]

# selecting the exchange used in the index computation
daily_mat = daily_mat.loc[daily_mat["Exchange"].isin(EXCHANGES)]

# creating a column containing the hour of extraction
daily_mat["date"] = [str(d) for d in daily_mat["date"]]
daily_mat["hour"] = daily_mat["date"].str[11:16]

daily_mat = daily_mat.loc[daily_mat.Time != 0]

daily_mat = exc_pair_cleaning(daily_mat)
daily_mat = exc_value_cleaning(daily_mat)