Example #1
0
def exc_daily_op(day=None):

    day_before_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_exc_clean", {"Exchange": "coinbase-pro", "Pair": "ethusd"}) is False:

            cleaned_df = exc_daily_cleaning(EXCHANGES, day_before_TS)
            mongo_upload(cleaned_df, "collection_exc_clean")

        else:
            print("The collection EXC_cleandata is already updated")

        if daily_check_mongo("coll_exc_final", {"Exchange": "coinbase-pro", "Pair": "ethusd"}) is False:

            converted_df = daily_conv_op(day_before_TS, series="EXC")
            converted_df.fillna(0, inplace=True)
            mongo_upload(converted_df, "collection_exc_final_data")

        else:

            print("The collection EXC_final_data is already updated")

    else:
        pass

    return None
Example #2
0
def ecb_daily_op(day=None):

    day_to_download_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_ecb_raw", {"CURRENCY": "USD"},
                             coll_kind="ecb_raw") is False:

            ecb_daily_up(day_to_download_TS)

        else:

            print("The ecb_raw collection on MongoDB is already updated.")

        if daily_check_mongo("coll_ecb_clean", {"Currency": "EUR/USD"},
                             coll_kind="ecb_clean") is False:

            ecb_day_clean = ECB_daily_setup(ECB_FIAT)
            mongo_upload(ecb_day_clean, "collection_ecb_clean")

        else:

            print("The ecb_clean collection on MongoDB is already updated.")

    else:

        ecb_daily_up(day_to_download_TS)

        ecb_day_clean = ECB_daily_setup(ECB_FIAT, day)

        mongo_upload(ecb_day_clean, "collection_ecb_clean")
Example #3
0
def data_feed_op():

    mongo_coll_drop("index_feed")

    merged_series = cw_exc_merging()
    mongo_upload(merged_series, "collection_data_feed")

    return None
Example #4
0
def ecb_hist_op(start_date_d=ECB_START_DATE_D,
                start_date_s=ECB_START_DATE,
                fiat_curr=ECB_FIAT):

    ecb_hist_raw = ecb_hist_download(start_date_d)
    mongo_upload(ecb_hist_raw, "collection_ecb_raw")
    ecb_hist_clean = ecb_hist_setup(start_date_s, fiat_curr)
    mongo_upload(ecb_hist_clean, "collection_ecb_clean")

    return None
Example #5
0
def ecb_daily_up(day_to_download_TS):

    try:
        ecb_day_raw = ecb_daily_download(day_to_download_TS)
    except Exception:
        error("Exception occurred", exc_info=True)
        info('Daily download form ECB failed')
    try:

        mongo_upload(ecb_day_raw, "collection_ecb_raw")

    except TypeError:

        print("No rate on ECB website, the passed day is a holiday")
Example #6
0
def new_series_composer(key,
                        new_key_df,
                        date_tot_str,
                        day_to_check,
                        kind_of_series="CW"):

    # create the df containing the historical series of the new couple(s)
    # composed by zeros
    splited_key = key.split("&")
    key_hist_df = pd.DataFrame(date_tot_str, columns=["Time"])
    key_hist_df["Close Price"] = 0
    key_hist_df["Pair Volume"] = 0
    key_hist_df["Crypto Volume"] = 0
    key_hist_df["Exchange"] = splited_key[0]
    key_hist_df["Pair"] = splited_key[1]

    if kind_of_series == "CW":

        collection_dict_upload = mongo_coll()

        # uploading on MongoDB collections "CW_converted_data" and "CW_final_data"
        # the new series of zero except for the last value (yesterday)
        mongo_upload(key_hist_df, "collection_cw_converted")
        mongo_upload(key_hist_df, "collection_cw_final_data")

        query_to_del = {"Time": str(day_to_check)}
        collection_dict_upload.get("collection_cw_converted").delete_many(
            query_to_del)
        collection_dict_upload.get("collection_cw_final_data").delete_many(
            query_to_del)

    else:
        pass

    # inserting the today value of the new couple(s)
    new_price = np.array(new_key_df.loc[new_key_df.key == key, "Close Price"])
    new_p_vol = np.array(new_key_df.loc[new_key_df.key == key, "Pair Volume"])
    new_c_vol = np.array(new_key_df.loc[new_key_df.key == key,
                                        "Crypto Volume"])
    key_hist_df.loc[key_hist_df.Time == str(day_to_check),
                    "Close Price"] = new_price
    key_hist_df.loc[key_hist_df.Time == str(day_to_check),
                    "Pair Volume"] = new_p_vol
    key_hist_df.loc[key_hist_df.Time == str(day_to_check),
                    "Crypto Volume"] = new_c_vol

    return key_hist_df
Example #7
0
def cw_hist_download_op(start_date=START_DATE):

    # deleting previous MongoDB collection for rawdata
    mongo_coll_drop("cw_hist_down")
    collection_dict_upload = mongo_coll()

    print("Downloading all CW history...")
    cw_raw_data = cw_hist_download(start_date)
    mongo_upload(cw_raw_data, "collection_cw_raw")
    print("CW series download completed")

    # deleting 31/12/2015 values if present
    last_2015_TS = 1451520000
    query_ = {'Time': last_2015_TS}
    collection_dict_upload.get("collection_cw_raw").delete_many(query_)

    return None
Example #8
0
def hist_data_feed_op():

    # define the array containing the date where the index uses CW feed data
    CW_date_arr = date_gen(START_DATE, EXC_START_DATE)
    CW_date_str = [str(date) for date in CW_date_arr]

    # drop the pre-existing collection (if there is one)
    mongo_coll_drop("index_feed")

    # downloading the EXC series from MongoDB
    EXC_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_final"))
    EXC_series = EXC_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    CW_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_final"))
    CW_series["Time"] = [str(x) for x in CW_series["Time"]]
    print("CW")
    print(CW_series)
    CW_sub_series = CW_series.loc[CW_series.Time.isin(CW_date_str)]
    print(CW_sub_series)
    CW_sub_series = CW_sub_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]
    CW_sub_series["Time"] = [int(x) for x in CW_sub_series["Time"]]
    CW_sub_series.reset_index(drop=True, inplace=True)
    print(CW_sub_series)

    # creting an unique dataframe containing the two different data source
    data_feed = CW_sub_series.append(EXC_series, sort=True)
    data_feed.reset_index(drop=True, inplace=True)

    data_feed = data_feed[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    print(data_feed)
    data_feed = homogeneize_feed(data_feed)
    print("post hom")
    print(data_feed)

    # put the converted data on MongoDB
    mongo_upload(data_feed, "collection_data_feed")

    return None
Example #9
0
def exc_daily_feed(day=None):

    day_before_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_data_feed", {"Exchange": "coinbase-pro", "Pair": "ethusd"}) is False:

            query_data = {"Time": int(day_before_TS)}
            exc_daily_df = query_mongo(
                DB_NAME, MONGO_DICT.get("coll_exc_final"), query_data)
            mongo_upload(exc_daily_df, "collection_data_feed")

        else:
            print("The collection index_data_feed is already updated")

    else:
        pass

    return None
Example #10
0
def daily_pair_vol_fix2(time_to_fix):

    # defining the query details
    q_dict: Dict[str, int] = {}
    q_dict = {"Time": time_to_fix}

    # querying oin MongoDB collection
    daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"), q_dict)
    daily_mat = daily_mat.loc[daily_mat.Time != 0]
    daily_mat = daily_mat.drop(columns=["Low", "High", "Open"])

    for Crypto in CRYPTO_ASSET:

        ccy_pair_array = []

        for i in PAIR_ARRAY:

            ccy_pair_array.append(Crypto.lower() + i)

        for exchange in EXCHANGES:

            for cp in ccy_pair_array:

                mat = daily_mat.loc[daily_mat["Exchange"] == exchange]
                mat = mat.loc[mat["Pair"] == cp]
                # checking if the matrix is not empty
                if mat.shape[0] > 1:

                    mat["Pair Volume"] = mat["Close Price"] * \
                        mat["Crypto Volume"]

                # put the manipulated data on MongoDB
                try:

                    mongo_upload(mat, "collection_cw_vol_check")

                except TypeError:

                    pass

    return None
Example #11
0
def cw_new_key_mngm(logic_key_df, daily_mat, time_to_check, date_tot_str):

    # selecting only the keys with 0 value
    key_absent = logic_key_df.loc[logic_key_df.logic_value == 0]
    key_absent.drop(columns=["logic_value"])

    # merging the dataframe in order to find the potenatial new keys
    merg_absent = pd.merge(key_absent, daily_mat, on="key", how="left")
    merg_absent.fillna("NaN", inplace=True)
    new_key_df = merg_absent.loc[merg_absent["Close Price"] != "NaN"]

    if new_key_df.empty is False:

        print("Message: New exchange-pair couple(s) found.")
        new_key_list = new_key_df["key"]

        date_tot_int = date_gen(START_DATE)
        # converting the timestamp format date into string
        date_tot_str = [str(single_date) for single_date in date_tot_int]

        for key in new_key_list:

            # updating the logic matrix of exchange-pair keys
            logic_row_update = log_key_update(key, "collection_CW_key")
            mongo_upload(logic_row_update, "collection_CW_key")

            key_hist_df = new_series_composer(key,
                                              new_key_df,
                                              date_tot_str,
                                              time_to_check,
                                              kind_of_series="CW")

            # upload the dataframe on MongoDB collection "CW_cleandata"
            mongo_upload(key_hist_df, "collection_cw_clean")

    else:
        pass

    return None
Example #12
0
def exc_daily_key_mngm(daily_mat_00, daily_mat_12, day_to_clean_TS):

    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_keys"))

    # adding to the daily matrix the value referred to dead crypto-fiat pair
    daily_mat_with_dead = exc_dead_key_mng(
        logic_key, daily_mat_00, daily_mat_12, day_to_clean_TS)

    # searching for possible new crypto-fiat pair
    new_key_hist = exc_new_key_mng(logic_key, daily_mat_00, day_to_clean_TS)

    if new_key_hist != []:

        collection_dict_upload = mongo_coll()
        # upload the new artificial historical series on MongoDB
        # collection "EXC_cleandata"
        mongo_upload(new_key_hist, collection_dict_upload.get(
            "collection_exc_clean"))

    else:

        pass

    return daily_mat_with_dead
Example #13
0
def exc_new_key_mng(logic_key_df, daily_mat_00, day_to_clean_TS):

    # define a subset of keys that has not been present in the past
    key_absent = logic_key_df.loc[logic_key_df.logic_value == 0]
    key_absent.drop(columns=["logic_value"])

    # merging the two dataframe (left join) in order to find potential
    # new keys in the data of the day
    merg_absent = pd.merge(key_absent, daily_mat_00, on="key", how="left")
    merg_absent.fillna("NaN", inplace=True)
    new_key_df = merg_absent.loc[merg_absent["Close Price"] != "NaN"]

    if new_key_df.empty is False:

        print("Message: New exchange-pair couple(s) found.")
        new_key_list = new_key_df["key"]
        print(new_key_list)

        date_tot_int = date_gen(START_DATE)
        # converting the timestamp format date into string
        date_tot_str = [int(single_date) for single_date in date_tot_int]

        for key in new_key_list:

            # updating the logic matrix of exchange-pair keys
            logic_row_update = log_key_update(key, "collection_EXC_key")
            mongo_upload(logic_row_update, "collection_EXC_key")

            key_hist_df = new_series_composer(
                key, new_key_df, date_tot_str, day_to_clean_TS)

    else:

        key_hist_df = []

    return key_hist_df
Example #14
0
def exc_hist_op():

    mongo_coll_drop("exc")

    mongo_indexing()

    # defining the crytpo_fiat array
    crypto_fiat_arr = all_crypto_fiat_gen()
    # querying all raw data from EXC_rawdata
    exc_raw_df = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_raw"))

    midnight_clean = exc_initial_clean(exc_raw_df, crypto_fiat_arr)
    mongo_upload(midnight_clean, "collection_exc_uniform")

    # deleting the values for xrp in the coinbase-pro exchange
    midnight_clean["key"] = midnight_clean["Exchange"] + \
        "&" + midnight_clean["Pair"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "coinbase-pro&xrpusd"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "coinbase-pro&xrpeur"]
    # deleting the values for zec and xmr in the bittrex exchange
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusd"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusdt"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusdc"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&xmrusdt"]

    midnight_clean = midnight_clean.drop(columns="key")

    exc_complete_df = exc_key_mngmt(midnight_clean)
    exc_fixed_df = exc_hist_fix(exc_complete_df)
    mongo_upload(exc_fixed_df, "collection_exc_clean")

    exc_converted = exc_hist_conv(exc_fixed_df)
    exc_converted.fillna(0, inplace=True)
    mongo_upload(exc_converted, "collection_exc_final_data")

    return None
reshaped_divisor = divisor_reshape(divisor_array, reference_date_vector)

index_values = index_level_calc(Crypto_Asset_Prices, syntethic_relative_matrix,
                                divisor_array, reference_date_vector)

index_1000_base = index_based(index_values)
# pd.set_option('display.max_rows', None)

# #################### MONGO DB UPLOADS ###########################
# creating the array with human readable Date
human_date = timestamp_to_human(reference_date_vector)

# put the "Crypto_Asset_Prices" dataframe on MongoDB
Crypto_Asset_Prices["Date"] = human_date
mongo_upload(Crypto_Asset_Prices,
             "collection_price",
             reorder="Y",
             column_set_val="complete")

# put the "Crypto_Asset_Volumes" dataframe on MongoDB
Crypto_Asset_Volume["Date"] = human_date
mongo_upload(Crypto_Asset_Volume,
             "collection_volume",
             reorder="Y",
             column_set_val="complete")

# put the exchange volumes on MongoDB
mongo_upload(exc_vol_tot, "collection_all_exc_vol")

# put the "price_ret" dataframe on MongoDB
price_ret["Date"] = human_date
mongo_upload(price_ret,
Example #16
0
def index_daily_uploader(crypto_asset_price,
                         crypto_asset_vol,
                         exc_vol_tot,
                         price_ret,
                         daily_ewma,
                         daily_ewma_double_check,
                         daily_synth,
                         daily_rel,
                         curr_divisor,
                         daily_index_1000_df,
                         raw_index_df,
                         new_divisor=None,
                         new_logic_one=None,
                         new_logic_two=None,
                         new_weights=None,
                         day=None):
    if day is None:

        day_str = datetime.now().strftime("%Y-%m-%d")
        day_date = datetime.strptime(day_str, "%Y-%m-%d")
        day_TS = int(day_date.replace(tzinfo=timezone.utc).timestamp())
        day_before_TS = day_TS - DAY_IN_SEC
        yesterday_human = timestamp_to_human([day_before_TS])

    else:

        day_date = datetime.strptime(day, "%Y-%m-%d")
        day_before_TS = int(day_date.replace(tzinfo=timezone.utc).timestamp())
        yesterday_human = timestamp_to_human([day_before_TS])

    # put the "crypto_asset_price" dataframe on MongoDB
    crypto_asset_price["Date"] = yesterday_human
    mongo_upload(crypto_asset_price,
                 "collection_price",
                 reorder="Y",
                 column_set_val="complete")

    # put the "crypto_asset_vols" dataframe on MongoDB
    crypto_asset_vol["Date"] = yesterday_human
    mongo_upload(crypto_asset_vol,
                 "collection_volume",
                 reorder="Y",
                 column_set_val="complete")

    # put the exchange volumes on MongoDB
    mongo_upload(exc_vol_tot, "collection_all_exc_vol")

    # put the "price_ret" dataframe on MongoDB
    price_ret["Date"] = yesterday_human
    mongo_upload(price_ret,
                 "collection_price_ret",
                 reorder="Y",
                 column_set_val="complete")

    # put the EWMA dataframe on MongoDB
    daily_ewma["Date"] = yesterday_human
    daily_ewma["Time"] = day_before_TS
    mongo_upload(daily_ewma,
                 "collection_EWMA",
                 reorder="Y",
                 column_set_val="complete")

    # put the double checked EWMA on MongoDB
    daily_ewma_double_check["Date"] = yesterday_human
    daily_ewma_double_check["Time"] = day_before_TS
    mongo_upload(daily_ewma_double_check,
                 "collection_EWMA_check",
                 reorder="Y",
                 column_set_val="complete")

    # put the synth matrix on MongoDB
    daily_synth["Date"] = yesterday_human
    daily_synth["Time"] = day_before_TS
    mongo_upload(daily_synth,
                 "collection_synth",
                 reorder="Y",
                 column_set_val="complete")

    # put the relative synth matrix on MongoDB
    daily_rel["Date"] = yesterday_human
    daily_rel["Time"] = day_before_TS
    mongo_upload(daily_rel,
                 "collection_relative_synth",
                 reorder="Y",
                 column_set_val="complete")

    # put the reshaped divisor array on MongoDB
    curr_divisor["Date"] = yesterday_human
    curr_divisor["Time"] = day_before_TS
    mongo_upload(curr_divisor,
                 "collection_divisor_reshaped",
                 reorder="Y",
                 column_set_val="divisor")

    # put the index level 1000 on MongoDB
    daily_index_1000_df["Date"] = yesterday_human
    daily_index_1000_df["Time"] = day_before_TS
    mongo_upload(daily_index_1000_df,
                 "collection_index_level_1000",
                 reorder="Y",
                 column_set_val="index")

    # put the index level raw on MongoDB
    raw_index_df["Date"] = yesterday_human
    raw_index_df["Time"] = day_before_TS
    mongo_upload(raw_index_df,
                 "collection_index_level_raw",
                 reorder="Y",
                 column_set_val="index")

    if new_divisor is None:
        pass

    else:

        new_divisor["Date"] = yesterday_human
        new_divisor["Time"] = day_before_TS
        mongo_upload(new_divisor,
                     "collection_divisor_reshaped",
                     reorder="Y",
                     column_set_val="divisor")

    if new_logic_one is None:
        pass

    else:

        mongo_upload(new_logic_one,
                     "collection_logic_one",
                     reorder="Y",
                     column_set_val="complete")

    if new_logic_two is None:
        pass

    else:

        mongo_upload(new_logic_two,
                     "collection_logic_one",
                     reorder="Y",
                     column_set_val="complete")

    if new_weights is None:
        pass

    else:

        mongo_upload(new_weights,
                     "collection_weights",
                     reorder="Y",
                     column_set_val="complete")

    return None
Example #17
0
usdt_rates["Time"] = first_call["Time"]

usdt_rates.fillna("NaN", inplace=True)

index_to_remove = usdt_rates[usdt_rates.Time == "NaN"].index

usdt_rates = usdt_rates.drop(index_to_remove)
print(usdt_rates)
usdt_rates["Standard Date"] = timestamp_to_human(first_call["Time"])
print(usdt_rates)
# correcting the date 2016-10-02 using the previous day rate
prev_rate = np.array(usdt_rates.loc[usdt_rates.Time == '1475280000', "Rate"])
usdt_rates.loc[usdt_rates.Time == '1475366400', "Rate"] = prev_rate

# USDT mongoDB upload
mongo_upload(usdt_rates, "collection_stable_rate")

# usdt_data = usdt_rates.to_dict(orient="records")
# collection_stable.insert_many(usdt_data)


# ############# USDC exchange rates computation ############
# BTC/USDC is traded on Poloniex, Kraken and bittrex
# Poloniex has the entire historoical values from 01/01/2016

# POLONIEX usdc/usd exchange rate
query_usdc = {"Exchange": "poloniex", "Pair": "btcusdc"}
usdc_poloniex = query_mongo(
    DB_NAME, MONGO_DICT.get("coll_cw_clean"), query_usdc)

# KRAKEN usdc/usd exchange rate
Example #18
0
def cw_daily_operation(day=None):
    '''
    @param day has to be either None or "%Y-%m-%d" string format

    '''

    # create the indexing for MongoDB and define the variable containing the
    # MongoDB collections where to upload data
    mongo_indexing()

    date_tot = date_gen(START_DATE)
    # converting the timestamp format date into string
    date_tot_str = [str(single_date) for single_date in date_tot]

    day_before_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_cw_raw", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            try:
                cw_rawdata_daily = cw_daily_download(day_before_TS +
                                                     DAY_IN_SEC)
            except Exception:
                error("Exception occurred", exc_info=True)
                info('Daily download from CryptoWatch failed')
            mongo_upload(cw_rawdata_daily, "collection_cw_raw")

        else:

            print("The CW_rawdata collection on MongoDB is updated.")

        if daily_check_mongo("coll_vol_chk", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            mat_vol_fix = daily_pair_vol_fix(day_before_TS)

            try:

                mongo_upload(mat_vol_fix, "collection_cw_vol_check")

            except AttributeError:
                pass

        else:

            mat_vol_fix = []
            print(
                "Message: No need to fix pair volume. The collection on MongoDB is updated."
            )

        # new and dead crypto-fiat key management

        daily_complete_df = cw_daily_key_mngm(mat_vol_fix, day_before_TS,
                                              date_tot_str)

        # missing data fixing

        if daily_check_mongo("coll_cw_clean", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            daily_fixed_df = daily_fix_miss_op(daily_complete_df, day,
                                               "coll_cw_clean")
            mongo_upload(daily_fixed_df, "collection_cw_clean")

        else:

            print("Message: The collection cw_clean on MongoDB is updated.")

        if daily_check_mongo("coll_stable_rate",
                             {"Currency": "USDT/USD"}) is False:

            usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean",
                                                     str(day_before_TS))

            mongo_upload(usdt_rates, "collection_stable_rate")
            mongo_upload(usdc_rates, "collection_stable_rate")

        else:

            print("The stable_rates_collection on MongoDB is already updated.")

        if daily_check_mongo("coll_cw_conv", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            converted_data = cw_daily_conv_op(day_before_TS)
            mongo_upload(converted_data, "collection_cw_converted")

        else:

            print(
                "Message: The cw_converted_data collection on MongoDB is already updated."
            )

        if daily_check_mongo("coll_cw_final", {
                "Exchange": "coinbase-pro",
                "Pair": "btcusd"
        }) is False:

            mongo_upload(converted_data, "collection_cw_final_data")

        else:

            print(
                "The CW_final_data collection on MongoDB is already updated.")

    else:

        cw_rawdata_daily = cw_daily_download(day_before_TS)
        mongo_upload(cw_rawdata_daily, "collection_cw_raw")
        mat_vol_fix = daily_pair_vol_fix(day_before_TS)
        try:

            mongo_upload(mat_vol_fix, "collection_cw_vol_check")

        except AttributeError:
            pass

        daily_complete_df = cw_daily_key_mngm(mat_vol_fix, day_before_TS,
                                              date_tot_str)
        daily_fixed_df = daily_fix_miss_op(daily_complete_df, day,
                                           "coll_cw_clean")
        mongo_upload(daily_fixed_df, "collection_cw_clean")
        usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean",
                                                 str(day_before_TS))
        mongo_upload(usdt_rates, "collection_stable_rate")
        mongo_upload(usdc_rates, "collection_stable_rate")
        converted_data = cw_daily_conv_op(day_before_TS)
        mongo_upload(converted_data, "collection_cw_converted")
        mongo_upload(converted_data, "collection_cw_final_data")

    return None
Example #19
0
def test_mongo_upload():

    df = pd.DataFrame(file_data)

    mongo_upload(df, 'test_mongo')
Example #20
0
usdt_rates = pd.DataFrame(usdt_rates, columns=["Rate"])
usdt_rates = usdt_rates.replace([np.inf, -np.inf], np.nan)
usdt_rates.fillna(0, inplace=True)

# adding Currency (USDT/USD), Time (timestamp),
# and Standard Date (YYYY-MM-DD) columns
usdt_rates["Currency"] = np.zeros(len(usdt_rates["Rate"]))
usdt_rates["Currency"] = [
    str(x).replace("0.0", "USDT/USD") for x in usdt_rates["Currency"]
]
usdt_rates["Time"] = first_call["Time"]
usdt_rates["Standard Date"] = timestamp_to_human(first_call["Time"])

print(usdt_rates)
# USDT mongoDB upload
mongo_upload(usdt_rates, "collection_stable_rate")

# ############# USDC exchange rates computation ############
# BTC/USDC is traded on Poloniex, Kraken and bittrex
# Poloniex has the entire historoical values from 01/01/2016

# POLONIEX usdc/usd exchange rate
query_usdc = {"Exchange": "poloniex", "Pair": "btcusdc", "Time": str(y_TS)}
usdc_poloniex = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_clean"),
                            query_usdc)

# KRAKEN usdc/usd exchange rate
query_usdc = {"Exchange": "kraken", "Pair": "btcusdc", "Time": str(y_TS)}
usdc_kraken = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_clean"), query_usdc)

# COINBASE_PRO usdc exchange rate
Example #21
0
def index_hist_uploader(crypto_asset_price, crypto_asset_vol, exc_vol_tot,
                        price_ret, weights_for_board, first_logic_matrix_df,
                        second_logic_matrix_df, ewma_df, double_checked_EWMA,
                        syntethic, syntethic_relative_matrix, divisor_array,
                        reshaped_divisor, index_values, index_1000_base):

    # creating the array with human readable Date
    ref_date_arr = date_gen()
    human_date = timestamp_to_human(ref_date_arr)

    # put the "Crypto_Asset_Prices" dataframe on MongoDB
    crypto_asset_price["Date"] = human_date
    mongo_upload(crypto_asset_price,
                 "collection_price",
                 reorder="Y",
                 column_set_val="complete")

    # put the "Crypto_Asset_Volumes" dataframe on MongoDB
    crypto_asset_vol["Date"] = human_date
    mongo_upload(crypto_asset_vol,
                 "collection_volume",
                 reorder="Y",
                 column_set_val="complete")

    # put the exchange volumes on MongoDB
    mongo_upload(exc_vol_tot, "collection_all_exc_vol")

    # put the "price_ret" dataframe on MongoDB
    price_ret["Date"] = human_date
    mongo_upload(price_ret,
                 "collection_price_ret",
                 reorder="Y",
                 column_set_val="complete")

    # put the "weights" dataframe on MongoDB
    weight_human_date = timestamp_to_human(weights_for_board["Time"])
    weights_for_board["Date"] = weight_human_date
    mongo_upload(weights_for_board,
                 "collection_weights",
                 reorder="Y",
                 column_set_val="complete")
    print(weights_for_board)
    # put the first logic matrix on MongoDB
    first_date = timestamp_to_human(first_logic_matrix_df["Time"])
    first_logic_matrix_df["Date"] = first_date
    mongo_upload(first_logic_matrix_df,
                 "collection_logic_one",
                 reorder="Y",
                 column_set_val="complete")

    # put the second logic matrix on MongoDB
    second_date = timestamp_to_human(second_logic_matrix_df["Time"])
    second_logic_matrix_df["Date"] = second_date
    mongo_upload(second_logic_matrix_df,
                 "collection_logic_two",
                 reorder="Y",
                 column_set_val="complete")

    # put the EWMA dataframe on MongoDB
    ewma_df["Date"] = human_date
    ewma_df["Time"] = ref_date_arr
    mongo_upload(ewma_df,
                 "collection_EWMA",
                 reorder="Y",
                 column_set_val="complete")

    # put the double checked EWMA on MongoDB
    double_checked_EWMA["Date"] = human_date
    mongo_upload(double_checked_EWMA,
                 "collection_EWMA_check",
                 reorder="Y",
                 column_set_val="complete")

    # put the synth matrix on MongoDB
    syntethic["Date"] = human_date
    syntethic["Time"] = ref_date_arr
    mongo_upload(syntethic,
                 "collection_synth",
                 reorder="Y",
                 column_set_val="complete")

    # put the relative synth matrix on MongoDB
    syntethic_relative_matrix["Date"] = human_date
    syntethic_relative_matrix["Time"] = ref_date_arr
    mongo_upload(syntethic_relative_matrix,
                 "collection_relative_synth",
                 reorder="Y",
                 column_set_val="complete")

    # put the divisor array on MongoDB
    divisor_date = timestamp_to_human(divisor_array["Time"])
    divisor_array["Date"] = divisor_date
    mongo_upload(divisor_array,
                 "collection_divisor",
                 reorder="Y",
                 column_set_val="divisor")

    # put the reshaped divisor array on MongoDB
    reshaped_divisor_date = timestamp_to_human(reshaped_divisor["Time"])
    reshaped_divisor["Date"] = reshaped_divisor_date
    mongo_upload(reshaped_divisor,
                 "collection_divisor_reshaped",
                 reorder="Y",
                 column_set_val="divisor")

    # put the index level raw on MongoDB
    index_values["Date"] = human_date
    index_values["Time"] = ref_date_arr
    mongo_upload(index_values,
                 "collection_index_level_raw",
                 reorder="Y",
                 column_set_val="index")

    # put the index level 1000 on MongoDB
    index_1000_base["Date"] = human_date
    index_1000_base["Time"] = ref_date_arr
    mongo_upload(index_1000_base,
                 "collection_index_level_1000",
                 reorder="Y",
                 column_set_val="index")
Example #22
0
    for exchange in EXCHANGES:

        for cp in ccy_pair_array:

            mat = daily_mat.loc[daily_mat["Exchange"] == exchange]
            mat = mat.loc[mat["Pair"] == cp]
            # checking if the matrix is not empty
            if mat.shape[0] > 1:

                mat["Pair Volume"] = mat["Close Price"] * mat["Crypto Volume"]

            # put the manipulated data on MongoDB
            try:

                mongo_upload(mat, "collection_cw_vol_check")

            except TypeError:
                pass

# ############################################################################
# ########### DEAD AND NEW CRYPTO-FIAT MANAGEMENT ############################

# defining the query details
q_dict = {"Time": y_TS}

# downloading from MongoDB the matrix with the daily values and the
# matrix containing the exchange-pair logic values
daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_vol_chk"), q_dict)
logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_keys"))
Example #23
0
def cw_hist_operation(start_date=START_DATE):

    date_tot = date_gen(start_date)
    last_day_TS = date_tot[len(date_tot) - 1]

    mongo_indexing()

    # deleting previous MongoDB collections
    mongo_coll_drop("cw_hist_clean")
    mongo_coll_drop("cw_hist_conv")

    # fix and upload the series for the "pair volume" info
    tot_raw_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"))
    cw_vol_fix_data = cw_hist_pair_vol_fix(tot_raw_data)
    mongo_upload(cw_vol_fix_data, "collection_cw_vol_check")

    # clean and upload all the series
    cleaned_df = cw_hist_cleaning(cw_vol_fix_data, start_date)
    mongo_upload(cleaned_df, "collection_cw_clean")

    # compute and upload USDC and USDT rates series
    usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean", None)
    mongo_upload(usdt_rates, "collection_stable_rate")
    mongo_upload(usdc_rates, "collection_stable_rate")

    # convert and upload all the data into USD
    converted_df = cw_hist_conv_op(cleaned_df)
    mongo_upload(converted_df, "collection_cw_converted")

    # logic matrix of crypto-fiat keys
    key_df = key_log_mat(DB_NAME, "coll_cw_conv", last_day_TS, EXCHANGES,
                         CRYPTO_ASSET, PAIR_ARRAY)
    mongo_upload(key_df, "collection_CW_key")
    mongo_upload(key_df, "collection_EXC_key")

    # fill zero-volume data and upload on MongoDB
    final_df = cw_hist_zero_vol_fill_op(converted_df)
    mongo_upload(final_df, "collection_cw_final_data")

    return None
        key_hist_df["Pair Volume"] = 0
        key_hist_df["Crypto Volume"] = 0
        key_hist_df["Exchange"] = splited_key[0]
        key_hist_df["Pair"] = splited_key[1]

        # inserting the today value of the new couple(s)
        new_price = new_key.loc[new_key.key == key, "Close Price"]
        new_p_vol = new_key.loc[new_key.key == key, "Pair Volume"]
        new_c_vol = new_key.loc[new_key.key == key, "Crypto Volume"]
        key_hist_df.loc[key_hist_df.Time == y_TS, "Close Price"] = new_price
        key_hist_df.loc[key_hist_df.Time == y_TS, "Pair Volume"] = new_p_vol
        key_hist_df.loc[key_hist_df.Time == y_TS, "Crypto Volume"] = new_c_vol

        # upload the new artificial historical series on MongoDB
        # collection "EXC_cleandata"
        mongo_upload(key_hist_df, collection_dict_upload.get(
            "collection_exc_clean"))

else:
    pass

# ###########################################################################
# ######################## MISSING DATA FIXING ##############################

q_dict_str: Dict[str, str] = {}
q_dict_str = {"Time": str(two_before_TS)}

# downloading from MongoDB the matrix referring to the previuos day
day_bfr_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_clean"), q_dict_str)

# add the "key" column
day_bfr_mat["key"] = day_bfr_mat["Exchange"] + "&" + day_bfr_mat["Pair"]
Example #25
0
stablecoin_matrix = matrix_data.loc[matrix_data["fiat"].isin(stablecoin)]

# merging the dataset on 'Time' and 'fiat' column
stable_merged = pd.merge(stablecoin_matrix,
                         matrix_rate_stable,
                         on=["Time", "fiat"])

# converting the prices in usd
stable_merged["Close Price"] = stable_merged["Close Price"] / \
    stable_merged["Rate"]
stable_merged["Close Price"] = stable_merged["Close Price"].replace(
    [np.inf, -np.inf], np.nan)
stable_merged["Close Price"].fillna(0, inplace=True)
stable_merged["Pair Volume"] = stable_merged["Pair Volume"] / \
    stable_merged["Rate"]
stable_merged["Pair Volume"] = stable_merged["Pair Volume"].replace(
    [np.inf, -np.inf], np.nan)
stable_merged["Pair Volume"].fillna(0, inplace=True)

# subsetting the dataset with only the relevant columns
stable_merged = stable_merged[[
    "Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"
]]

# reunite the dataframes and put data on MongoDB
converted_data = conv_merged
converted_data = converted_data.append(stable_merged)
converted_data = converted_data.append(usd_matrix)
print(converted_data)
mongo_upload(converted_data, "collection_exc_final_data")
            # checking if the matrix is not empty
            if mat.shape[0] > 1:

                if exchange == "bittrex" and cp == "btcusdt":

                    sub_vol = np.array(
                        mat.loc[mat.Time == 1544486400, "Crypto Volume"])
                    mat.loc[mat.Time == 1544572800, "Crypto Volume"] = sub_vol
                    mat.loc[mat.Time == 1544659200, "Crypto Volume"] = sub_vol

                mat["Pair Volume"] = mat["Close Price"] * mat["Crypto Volume"]

            # put the manipulated data on MongoDB
            try:

                mongo_upload(mat, "collection_cw_vol_check")

            except TypeError:
                pass

end = time.time()

print("This script took: {} seconds".format(float(end - start)))

# ############## fixing historical series main part ##############
start = time.time()

tot_matrix = query_mongo(DB_NAME, MONGO_DICT.get("coll_vol_chk"))


for Crypto in CRYPTO_ASSET: