コード例 #1
0
def test_query_mongo():

    # uploading test json file
    df_test = pd.read_json(data_folder)

    df_test = df_test.drop(columns=["_id"])

    # querying the collection from db

    database = "index"
    collection = "test_mongo"

    df_mongo = query_mongo(database, collection)

    assert df_test.equals(df_mongo)

    query_dict = {"A": {"$gt": 0}}

    df_mongo = query_mongo(database, collection, query_dict)

    assert df_test.equals(df_mongo)

    query_dict = {"A": {"$gt": 111110}}
    ll = []

    df_mongo = query_mongo(database, collection, query_dict)

    assert ll == df_mongo

    collection = "tes"

    df_mongo = query_mongo(database, collection)

    assert ll == df_mongo
コード例 #2
0
def daily_conv_op(day_to_conv_TS, conversion_fiat=CONVERSION_FIAT,
                  stable=STABLE_COIN, series="CW"):

    # querying the data from mongo
    query_data = {"Time": int(day_to_conv_TS)}
    query_rate = {"Date": str(day_to_conv_TS)}
    query_stable = {"Time": int(day_to_conv_TS)}
    # querying the data from mongo
    matrix_rate = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_ecb_clean"), query_rate)

    if series == "CW":

        matrix_data = query_mongo(
            DB_NAME, MONGO_DICT.get("coll_cw_clean"), query_data)

    elif series == "EXC":

        matrix_data = query_mongo(
            DB_NAME, MONGO_DICT.get("coll_exc_clean"), query_data)

    matrix_rate_stable = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_stable_rate"), query_stable)

    # converting the "matrix_rate" df
    converted_df = conv_into_usd(
        DB_NAME, matrix_data, matrix_rate,
        matrix_rate_stable, conversion_fiat, stable)

    return converted_df
コード例 #3
0
def exc_daily_cleaning(exc_list, day_to_clean):

    previous_day = int(day_to_clean) - DAY_IN_SEC

    # download from MongoDB the exc raw data of yesterday
    q_dict: Dict[str, str] = {}
    q_dict = {"Time": str(day_to_clean + DAY_IN_SEC)}
    daily_mat = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_exc_raw"), q_dict)

    # fixing the "pair volume" information in the daily df
    daily_mat_vol_fix = daily_exc_pair_vol_fix(daily_mat, exc_list)

    # creating different df based on the hour of download
    (daily_mat_00, daily_mat_12, _, _) = exc_time_split(daily_mat_vol_fix)

    # transpose the Time information by 86400 seconds (1 day) ###### str o int???
    daily_mat_00["Time"] = [str(int(element) - DAY_IN_SEC)
                            for element in daily_mat_00["Time"]]
    daily_mat_12["Time"] = [str(int(element) - DAY_IN_SEC)
                            for element in daily_mat_12["Time"]]

    # completing the daily matrix with dead crypto-fiat pair
    daily_mat_complete = exc_daily_key_mngm(
        daily_mat_00, daily_mat_12, day_to_clean)

    # downloading from MongoDB the matrix referring to the previuos day
    q_dict_bfr: Dict[str, int] = {}
    q_dict_bfr = {"Time": int(previous_day)}
    day_bfr_mat = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_exc_clean"), q_dict_bfr)

    fixed_daily_mat = exc_daily_fix_op(day_bfr_mat, daily_mat_complete)

    return fixed_daily_mat
コード例 #4
0
def cw_hist_conv_op(cleaned_df, conv_fiat=CONVERSION_FIAT, stable=STABLE_COIN):

    matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"))
    matrix_rate_stable = query_mongo(DB_NAME,
                                     MONGO_DICT.get("coll_stable_rate"))

    # converting the "matrix_rate" df
    converted_df = conv_into_usd(DB_NAME, cleaned_df, matrix_rate,
                                 matrix_rate_stable, conv_fiat, stable)

    return converted_df
コード例 #5
0
ファイル: web_app_index.py プロジェクト: dginst/crypto-index
def update_indicator(timer):

    df_index = query_mongo("index", "index_level_1000")

    dff_ind = df_index.copy()
    dff_last_ind = dff_ind.tail(2)
    dff_ind_y = dff_last_ind[dff_last_ind['Date'] == dff_last_ind['Date'].min(
    )]['Index Value'].values[0]
    dff_ind_t = dff_last_ind[dff_last_ind['Date'] == dff_last_ind['Date'].max(
    )]['Index Value'].values[0]

    fig_indicator = go.Figure(
        go.Indicator(mode="delta",
                     value=dff_ind_t,
                     delta={
                         'reference': dff_ind_y,
                         'relative': True,
                         'valueformat': '.2%'
                     }))
    fig_indicator.update_traces(delta_font={'size': 22})
    fig_indicator.update_layout(height=60, width=100)

    if dff_ind_t >= dff_ind_y:
        fig_indicator.update_traces(delta_increasing_color='green')
    elif dff_ind_t < dff_ind_y:
        fig_indicator.update_traces(delta_decreasing_color='red')

    csv_string_index = dff_ind.to_csv(index=False, encoding='utf-8')
    csv_string_index = "data:text/csv;charset=utf-8," + quote(csv_string_index)

    return fig_indicator, csv_string_index
コード例 #6
0
ファイル: index_hist.py プロジェクト: dginst/crypto-index
def index_hist_total(coll_to_use="coll_data_feed",
                     crypto_asset=CRYPTO_ASSET,
                     exc_list=EXCHANGES,
                     pair_list=PAIR_ARRAY):

    # drop the pre-existing collections
    mongo_coll_drop("index_hist")

    # define the mongo indexing
    mongo_indexing()

    data_df = query_mongo(DB_NAME, MONGO_DICT.get(coll_to_use))

    (crypto_asset_price_arr, crypto_asset_vol_arr, exc_vol_tot,
     logic_matrix_one) = index_hist_loop(data_df, crypto_asset, exc_list,
                                         pair_list)
    print(crypto_asset_vol_arr)
    (crypto_asset_price, crypto_asset_vol, price_ret, weights_for_board,
     first_logic_matrix_df, second_logic_matrix_df, ewma_df,
     double_checked_EWMA, syntethic, syntethic_relative_matrix, divisor_array,
     reshaped_divisor, index_values,
     index_1000_base) = index_hist_op(crypto_asset_price_arr,
                                      crypto_asset_vol_arr, logic_matrix_one)

    index_hist_uploader(crypto_asset_price, crypto_asset_vol, exc_vol_tot,
                        price_ret, weights_for_board, first_logic_matrix_df,
                        second_logic_matrix_df, ewma_df, double_checked_EWMA,
                        syntethic, syntethic_relative_matrix, divisor_array,
                        reshaped_divisor, index_values, index_1000_base)
コード例 #7
0
def daily_check_mongo(coll_to_check, query, day_to_check=None, coll_kind=None):

    day_before_TS, _ = days_variable(day_to_check)

    if coll_kind is None:

        query["Time"] = int(day_before_TS)

    elif coll_kind == "ecb_raw":

        query["TIME_PERIOD"] = str(day_before_TS)

    elif coll_kind == "ecb_clean":

        query["Date"] = str(day_before_TS)

    # retrieving the wanted data on MongoDB collection
    matrix = query_mongo(DB_NAME, MONGO_DICT.get(coll_to_check), query)

    if isinstance(matrix, list):

        res = False

    else:
        res = True

    return bool(res)
コード例 #8
0
ファイル: web_app_index.py プロジェクト: dginst/crypto-index
def update_index_df(n, sel_col):

    df_index = query_mongo("index", "index_level_1000")

    dff = df_index.copy()
    dff = dff.loc[dff["Date"] > "2016-09-30"]
    dff_last = dff.tail(2)
    dff_y = dff_last[dff_last['Date'] ==
                     dff_last['Date'].min()]['Index Value'].values[0]
    dff_t = dff_last[dff_last['Date'] ==
                     dff_last['Date'].max()]['Index Value'].values[0]

    variation = (dff_t >= dff_y)
    dff["Var"] = variation

    index_area = area(data_frame=dff,
                      x="Date",
                      y="Index Value",
                      labels={
                          "value": "",
                          "Index Value": "",
                          "Date": ""
                      },
                      template=sel_col,
                      title='Crypto Index Level (USD)',
                      color="Var",
                      color_discrete_map={
                          False: '#FD3216',
                          True: '#1CA71C',
                      })
    index_area.update_layout(showlegend=False)

    index_area.update_yaxes(tickprefix="$")

    return index_area
コード例 #9
0
def hist_data_feed_op():

    # define the array containing the date where the index uses CW feed data
    CW_date_arr = date_gen(START_DATE, EXC_START_DATE)
    CW_date_str = [str(date) for date in CW_date_arr]

    # drop the pre-existing collection (if there is one)
    mongo_coll_drop("index_feed")

    # downloading the EXC series from MongoDB
    EXC_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_final"))
    EXC_series = EXC_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    CW_series = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_final"))
    CW_series["Time"] = [str(x) for x in CW_series["Time"]]
    print("CW")
    print(CW_series)
    CW_sub_series = CW_series.loc[CW_series.Time.isin(CW_date_str)]
    print(CW_sub_series)
    CW_sub_series = CW_sub_series[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]
    CW_sub_series["Time"] = [int(x) for x in CW_sub_series["Time"]]
    CW_sub_series.reset_index(drop=True, inplace=True)
    print(CW_sub_series)

    # creting an unique dataframe containing the two different data source
    data_feed = CW_sub_series.append(EXC_series, sort=True)
    data_feed.reset_index(drop=True, inplace=True)

    data_feed = data_feed[
        ["Time", "Close Price", "Crypto Volume", "Pair Volume", "Exchange", "Pair"]]

    print(data_feed)
    data_feed = homogeneize_feed(data_feed)
    print("post hom")
    print(data_feed)

    # put the converted data on MongoDB
    mongo_upload(data_feed, "collection_data_feed")

    return None
コード例 #10
0
def cw_exc_merging(start_date=START_DATE, exc_start=EXC_START_DATE,
                   db=DB_NAME, coll_cw="coll_cw_final",
                   coll_exc="coll_exc_final"):

    cw_date_arr = date_gen(start_date, exc_start, EoD="N")

    exc_series = query_mongo(db, MONGO_DICT.get(coll_exc))
    exc_part = df_reorder(exc_series, column_set="conversion")

    # downloading the CW series from MongoDB and selecting only the date
    # from 2016-01-01 to 2020-04-17
    cw_series = query_mongo(db, MONGO_DICT.get(coll_cw))
    cw_part = cw_series.loc[cw_series.Time.isin(cw_date_arr)]
    cw_part = df_reorder(cw_part, column_set="conversion")

    # creting an unique dataframe containing the two different data source
    merged_series = cw_part.append(exc_part, sort=True)
    merged_series["Time"] = [int(d) for d in merged_series["Time"]]

    return merged_series
コード例 #11
0
def exc_hist_conv(exc_fix_df):

    # querying the rates collection from MongoDB
    matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"))
    matrix_rate = matrix_rate.rename({"Date": "Time"}, axis="columns")
    matrix_rate = matrix_rate.loc[matrix_rate.Time.isin(date_array_str)]

    # querying the stable rates collection from MongoDB
    matrix_rate_stable = query_mongo(
        DB_NAME, MONGO_DICT.get("coll_stable_rate"))
    matrix_rate_stable = matrix_rate_stable.loc[matrix_rate_stable.Time.isin(
        date_array_str)]

    converted_data = conv_into_usd(DB_NAME, exc_fix_df, matrix_rate,
                                   matrix_rate_stable, CONVERSION_FIAT, STABLE_COIN)

    converted_data["Time"] = [int(element)
                              for element in converted_data["Time"]]

    return converted_data
コード例 #12
0
def exc_key_mngmt(exc_clean_df):

    # downloading from MongoDB the matrix containing the exchange-pair logic values
    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_keys"))

    # creating the exchange-pair couples key for the daily matrix
    exc_clean_df["key"] = exc_clean_df["Exchange"] + "&" + exc_clean_df["Pair"]

    # ## adding the dead series to the daily values

    # selecting only the exchange-pair couples present in the historical series
    key_present = logic_key.loc[logic_key.logic_value == 1]
    key_present = key_present.drop(columns=["logic_value"])

    exc_clean_df = exc_clean_df.loc[exc_clean_df.Time != str(today_TS)]

    # selecting the last day of the EXC "historical" series
    last_day_with_val = max(exc_clean_df.Time)
    last_day = exc_clean_df.loc[exc_clean_df.Time
                                == last_day_with_val]

    # applying a left join between the prresent keys matrix and the last_day
    # matrix, this operation returns a matrix containing all the keys in
    # "key_present" and, if some keys are missing in "all_data" put NaN
    merged = pd.merge(key_present, last_day, on="key", how="left")

    # selecting only the absent keys
    merg_absent = merged.loc[merged["Close Price"].isnull()]

    header = CLEAN_DATA_HEAD
    header.extend(["key"])

    # create the historical series for each selected element
    for k in merg_absent["key"]:

        mat_to_add = pd.DataFrame(columns=header)
        mat_to_add["Time"] = date_array_str
        split_val = k.split("&")
        mat_to_add["Exchange"] = split_val[0]
        mat_to_add["Pair"] = split_val[1]
        mat_to_add["Close Price"] = 0.0
        mat_to_add["Crypto Volume"] = 0.0
        mat_to_add["Pair Volume"] = 0.0
        exc_clean_df = exc_clean_df.append(mat_to_add)

    # uploading the cleaned data on MongoDB in the collection EXC_cleandata
    exc_clean_df = exc_clean_df.drop(columns=["key"])
    exc_clean_df["Time"] = [int(element) for element in exc_clean_df["Time"]]

    # deleting the 17/04/2020 from the df (if presents)
    exc_complete_df = exc_clean_df.loc[exc_clean_df.Time != 1587081600]

    return exc_complete_df
コード例 #13
0
def cw_daily_conv_op(day_to_conv_TS):

    # querying the data from mongo
    query_data = {"Time": int(day_to_conv_TS)}
    query_rate = {"Date": str(day_to_conv_TS)}
    query_stable = {"Time": int(day_to_conv_TS)}
    # querying the data from mongo
    matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"),
                              query_rate)
    matrix_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_clean"),
                              query_data)
    matrix_rate_stable = query_mongo(DB_NAME,
                                     MONGO_DICT.get("coll_stable_rate"),
                                     query_stable)

    # converting the "matrix_rate" df
    converted_df = conv_into_usd(DB_NAME, matrix_data, matrix_rate,
                                 matrix_rate_stable, CONVERSION_FIAT,
                                 STABLE_COIN)

    return converted_df
コード例 #14
0
def cw_daily_key_mngm(volume_checked_df, time_to_check, date_tot_str):

    if isinstance(volume_checked_df, list):

        volume_checked_tot = query_mongo(DB_NAME,
                                         MONGO_DICT.get("coll_vol_chk"))

        last_day_with_val = max(volume_checked_tot.Time)

        volume_checked_df = volume_checked_tot.loc[volume_checked_tot.Time ==
                                                   last_day_with_val]

    # downloading from MongoDB the matrix with the daily values and the
    # matrix containing the exchange-pair logic values
    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_keys"))

    # creating the exchange-pair couples key for the daily matrix
    volume_checked_df["key"] = volume_checked_df["Exchange"] + \
        "&" + volume_checked_df["Pair"]

    # selecting only the exchange-pair couples present in the historical series
    key_present = logic_key.loc[logic_key.logic_value == 1]
    key_present = key_present.drop(columns=["logic_value"])
    # applying a left join between the prresent keys matrix and the daily
    # matrix, this operation returns a matrix containing all the keys in
    # "key_present" and NaN where some keys are missing
    merged = pd.merge(key_present, volume_checked_df, on="key", how="left")
    # assigning some columns values and substituting NaN with 0
    # in the "merged" df
    merged["Time"] = int(time_to_check)
    split_val = merged["key"].str.split("&", expand=True)
    merged["Exchange"] = split_val[0]
    merged["Pair"] = split_val[1]
    merged.fillna(0, inplace=True)

    #  checking potential new exchange-pair couple

    cw_new_key_mngm(logic_key, volume_checked_df, time_to_check, date_tot_str)

    return merged
コード例 #15
0
def daily_table():

    # assign date of interest to variables
    today_str = datetime.now().strftime("%Y-%m-%d")
    today = datetime.strptime(today_str, "%Y-%m-%d")
    today_TS = int(today.replace(tzinfo=timezone.utc).timestamp())
    y_TS = today_TS - DAY_IN_SEC

    ecb_query = {"Date": str(y_TS), "Currency": "EUR/USD"}
    btcusd_query = {"Time": y_TS, "Exchange": "coinbase-pro", "Pair": "btcusd"}

    eurusd = query_mongo(DB_NAME, "ecb_clean", ecb_query)
    btcusd = query_mongo(DB_NAME, "index_data_feed", btcusd_query)

    btcusd_value = np.array(btcusd["Close Price"])
    eurusd_value = np.array(eurusd["Rate"])
    human_date = np.array(eurusd["Standard Date"])

    df = pd.DataFrame(columns=["Date", "EUR/USD", "BTCUSD"])
    df["Date"] = human_date
    df["EUR/USD"] = eurusd_value
    df["BTCUSD"] = btcusd_value

    return df
コード例 #16
0
def index_norm_logic_op(crypto_asset, daily_ewma):

    past_start_quarter_list = start_q()
    last_start_q = past_start_quarter_list[len(past_start_quarter_list) - 1]
    # downloading from mongoDB the current logic matrices (1 e 2)
    logic_one = query_mongo(DB_NAME, MONGO_DICT.get("coll_log1"))
    # taking only the logic value referred to the current period
    # current_logic_one = logic_one.iloc[[len(logic_one["Date"]) - 2]]
    current_logic_one = logic_one.iloc[logic_one.Time == int(last_start_q)]
    current_logic_one = current_logic_one.drop(columns=["Date", "Time"])
    logic_two = query_mongo(DB_NAME, MONGO_DICT.get("coll_log2"))
    # taking only the logic value referred to the current period
    # current_logic_two = logic_two.iloc[[len(logic_two["Date"]) - 2]]
    current_logic_two = logic_two.iloc[logic_two.Time == int(last_start_q)]
    current_logic_two = current_logic_two.drop(columns=["Date", "Time"])

    # computing the ewma checked with both the first and second logic matrices
    daily_ewma_first_check = np.array(daily_ewma) * np.array(current_logic_one)
    daily_ewma_double_check = daily_ewma_first_check * \
        np.array(current_logic_two)
    daily_ewma_double_check = pd.DataFrame(daily_ewma_double_check,
                                           columns=crypto_asset)

    return daily_ewma_double_check
コード例 #17
0
ファイル: web_app_index.py プロジェクト: dginst/crypto-index
def update_pie(my_dropdown, sel_col):

    df_weight = query_mongo("index", "index_weights")

    dff_weight = df_weight.copy()
    dff_weight = dff_weight.drop(columns="Time")
    dff_w_filt = dff_weight.loc[dff_weight["Date"] == my_dropdown]

    dff_w_filt = dff_w_filt.drop(columns="Date")

    df_col = list(dff_w_filt.columns)

    for col in df_col:

        val = array(dff_w_filt[col])[0]
        if val == 0.0000:

            dff_w_filt = dff_w_filt.drop(columns=col)

    df_val = array(dff_w_filt)[0]
    df_col_2 = list(dff_w_filt.columns)

    pie_fig = pie(values=df_val,
                  names=df_col_2,
                  hole=.3,
                  template=sel_col,
                  title='Index Weights',
                  color=df_col_2,
                  color_discrete_map={
                      "BTC": "#FEAF16",
                      "ETH": "#511CFB",
                      "XRP": "#F6222E",
                      "LTC": "#E2E2E2",
                      "BCH": "#86CE00",
                      "EOS": "#FBE426",
                      "ETC": "#DA16FF",
                      "ZEC": "#B68100",
                      "ADA": "#00B5F7",
                      "XLM": "#750D86",
                      "XMR": "#A777F1",
                      "BSV": "#F58518"
                  })

    csv_string_weight = dff_weight.to_csv(index=False, encoding='utf-8')
    csv_string_weight = "data:text/csv;charset=utf-8," + \
        quote(csv_string_weight)

    return pie_fig, csv_string_weight
コード例 #18
0
def daily_fix_miss_op(daily_complete_df, day, collection):

    _, two_before_TS = days_variable(day)
    # defining the query details
    q_dict_time: Dict[str, int] = {}
    q_dict_time = {"Time": int(two_before_TS)}

    # downloading from MongoDB the matrix referring to the previuos day
    day_bfr_mat = query_mongo(DB_NAME, MONGO_DICT.get(collection), q_dict_time)

    # add the "key" column
    day_bfr_mat["key"] = day_bfr_mat["Exchange"] + "&" + day_bfr_mat["Pair"]

    # looping through all the daily keys looking for potential missing value
    for key_val in day_bfr_mat["key"]:

        new_val = daily_complete_df.loc[daily_complete_df.key == key_val]

        # if the new 'Close Price' referred to a certain key is 0 the script
        # check the previous day value: if is == 0 then pass, if is != 0
        # the values related to the selected key needs to be corrected
        if np.array(new_val["Close Price"]) == 0.0:

            d_before_val = day_bfr_mat.loc[day_bfr_mat.key == key_val]

            if np.array(d_before_val["Close Price"]) != 0.0:

                price_var = daily_fix_miss(new_val, daily_complete_df,
                                           day_bfr_mat)
                # applying the weighted variation to the day before 'Close Price'
                new_price = (1 + price_var) * d_before_val["Close Price"]
                # changing the 'Close Price' value using the new computed price
                daily_complete_df.loc[daily_complete_df.key == key_val,
                                      "Close Price"] = new_price

            else:
                pass

        else:
            pass

    daily_complete_df.drop(columns=["key"])
    daily_complete_df["Time"] = [int(d) for d in daily_complete_df["Time"]]

    daily_fixed_df = daily_complete_df

    return daily_fixed_df
コード例 #19
0
def new_synth_op(crypto_asset, daily_return_df, synt_ptf_value):

    past_start_quarter_list = start_q()
    new_start_q = past_start_quarter_list[len(past_start_quarter_list) - 1]

    # downloading from mongoDB the current weights
    tot_weights = query_mongo(DB_NAME, MONGO_DICT.get("coll_weights"))
    new_weights = tot_weights.loc[tot_weights.Time == int(new_start_q),
                                  crypto_asset]

    # daily syntethic matrix computation
    daily_ret_arr = np.array(daily_return_df[crypto_asset])
    synt_weights = np.array(new_weights) * synt_ptf_value
    new_synth = synt_weights * (1 + daily_ret_arr)
    daily_synthh = pd.DataFrame(new_synth, columns=crypto_asset)

    return daily_synthh
コード例 #20
0
def check_missing(tot_date_arr, coll_to_check, query, days_to_check=10):

    # selecting the last five days and put them into an array
    last_days = tot_date_arr[(len(tot_date_arr) -
                              days_to_check):len(tot_date_arr)]
    print(last_days)
    # retrieving the wanted data on MongoDB collection
    matrix = query_mongo(DB_NAME, MONGO_DICT.get(coll_to_check), query)

    # checking the time column and selecting only the last five days retrived
    # from MongoDB collection
    try:

        date_list = np.array(matrix["Time"])

    except KeyError:

        try:

            date_list = np.array(matrix["TIME_PERIOD"])

        except KeyError:

            date_list = np.array(matrix["Date"])

    to_del = np.array([0])
    date_list = np.setdiff1d(date_list, to_del)

    last_days_db = date_list[(len(date_list) - days_to_check):len(date_list)]

    # last_days_db_str = [str(single_date)
    #                     for single_date in last_days_db]

    # finding the date to download as difference between
    # complete array of date and date now stored on MongoDB
    date_to_add = Diff(last_days, last_days_db)
    print(date_to_add)

    if len(date_to_add) > 9:

        date_to_add = None

    return date_to_add
コード例 #21
0
def exc_hist_op():

    mongo_coll_drop("exc")

    mongo_indexing()

    # defining the crytpo_fiat array
    crypto_fiat_arr = all_crypto_fiat_gen()
    # querying all raw data from EXC_rawdata
    exc_raw_df = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_raw"))

    midnight_clean = exc_initial_clean(exc_raw_df, crypto_fiat_arr)
    mongo_upload(midnight_clean, "collection_exc_uniform")

    # deleting the values for xrp in the coinbase-pro exchange
    midnight_clean["key"] = midnight_clean["Exchange"] + \
        "&" + midnight_clean["Pair"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "coinbase-pro&xrpusd"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "coinbase-pro&xrpeur"]
    # deleting the values for zec and xmr in the bittrex exchange
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusd"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusdt"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&zecusdc"]
    midnight_clean = midnight_clean.loc[midnight_clean.key
                                        != "bittrex&xmrusdt"]

    midnight_clean = midnight_clean.drop(columns="key")

    exc_complete_df = exc_key_mngmt(midnight_clean)
    exc_fixed_df = exc_hist_fix(exc_complete_df)
    mongo_upload(exc_fixed_df, "collection_exc_clean")

    exc_converted = exc_hist_conv(exc_fixed_df)
    exc_converted.fillna(0, inplace=True)
    mongo_upload(exc_converted, "collection_exc_final_data")

    return None
コード例 #22
0
def exc_daily_feed(day=None):

    day_before_TS, _ = days_variable(day)

    if day is None:

        if daily_check_mongo("coll_data_feed", {"Exchange": "coinbase-pro", "Pair": "ethusd"}) is False:

            query_data = {"Time": int(day_before_TS)}
            exc_daily_df = query_mongo(
                DB_NAME, MONGO_DICT.get("coll_exc_final"), query_data)
            mongo_upload(exc_daily_df, "collection_data_feed")

        else:
            print("The collection index_data_feed is already updated")

    else:
        pass

    return None
コード例 #23
0
def daily_pair_vol_fix2(time_to_fix):

    # defining the query details
    q_dict: Dict[str, int] = {}
    q_dict = {"Time": time_to_fix}

    # querying oin MongoDB collection
    daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"), q_dict)
    daily_mat = daily_mat.loc[daily_mat.Time != 0]
    daily_mat = daily_mat.drop(columns=["Low", "High", "Open"])

    for Crypto in CRYPTO_ASSET:

        ccy_pair_array = []

        for i in PAIR_ARRAY:

            ccy_pair_array.append(Crypto.lower() + i)

        for exchange in EXCHANGES:

            for cp in ccy_pair_array:

                mat = daily_mat.loc[daily_mat["Exchange"] == exchange]
                mat = mat.loc[mat["Pair"] == cp]
                # checking if the matrix is not empty
                if mat.shape[0] > 1:

                    mat["Pair Volume"] = mat["Close Price"] * \
                        mat["Crypto Volume"]

                # put the manipulated data on MongoDB
                try:

                    mongo_upload(mat, "collection_cw_vol_check")

                except TypeError:

                    pass

    return None
コード例 #24
0
def daily_pair_vol_fix(day):

    # defining the query details
    q_dict: Dict[str, int] = {}
    q_dict = {"Time": day}

    # querying on MongoDB collection
    daily_mat = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"), q_dict)

    try:

        daily_mat = daily_mat.loc[daily_mat.Time != 0]
        daily_mat = daily_mat.drop(columns=["Low", "High", "Open"])

        daily_vol_fix = pair_vol_fix(daily_mat)

    except AttributeError:

        daily_vol_fix = []

    return daily_vol_fix
コード例 #25
0
def cw_hist_operation(start_date=START_DATE):

    date_tot = date_gen(start_date)
    last_day_TS = date_tot[len(date_tot) - 1]

    mongo_indexing()

    # deleting previous MongoDB collections
    mongo_coll_drop("cw_hist_clean")
    mongo_coll_drop("cw_hist_conv")

    # fix and upload the series for the "pair volume" info
    tot_raw_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_cw_raw"))
    cw_vol_fix_data = cw_hist_pair_vol_fix(tot_raw_data)
    mongo_upload(cw_vol_fix_data, "collection_cw_vol_check")

    # clean and upload all the series
    cleaned_df = cw_hist_cleaning(cw_vol_fix_data, start_date)
    mongo_upload(cleaned_df, "collection_cw_clean")

    # compute and upload USDC and USDT rates series
    usdt_rates, usdc_rates = stable_rates_op("coll_cw_clean", None)
    mongo_upload(usdt_rates, "collection_stable_rate")
    mongo_upload(usdc_rates, "collection_stable_rate")

    # convert and upload all the data into USD
    converted_df = cw_hist_conv_op(cleaned_df)
    mongo_upload(converted_df, "collection_cw_converted")

    # logic matrix of crypto-fiat keys
    key_df = key_log_mat(DB_NAME, "coll_cw_conv", last_day_TS, EXCHANGES,
                         CRYPTO_ASSET, PAIR_ARRAY)
    mongo_upload(key_df, "collection_CW_key")
    mongo_upload(key_df, "collection_EXC_key")

    # fill zero-volume data and upload on MongoDB
    final_df = cw_hist_zero_vol_fill_op(converted_df)
    mongo_upload(final_df, "collection_cw_final_data")

    return None
コード例 #26
0
ファイル: web_app_index.py プロジェクト: dginst/crypto-index
def update_vol(my_checklist, sel_col):

    df_volume = query_mongo("index", "crypto_volume")

    dff_vol = df_volume.copy()
    dff_date = dff_vol["Date"]
    dff_vol_filtered = dff_vol[my_checklist]
    dff_vol_filtered["Date"] = dff_date

    volume_line = line(data_frame=dff_vol_filtered,
                       x="Date",
                       y=my_checklist,
                       template=sel_col,
                       title='Crypto Volumes',
                       labels={
                           "value": "Volume (USD)",
                           "variable": ""
                       },
                       color_discrete_map={
                           "BTC": "#FEAF16",
                           "ETH": "#511CFB",
                           "XRP": "#F6222E",
                           "LTC": "#E2E2E2",
                           "BCH": "#86CE00",
                           "EOS": "#FBE426",
                           "ETC": "#DA16FF",
                           "ZEC": "#B68100",
                           "ADA": "#00B5F7",
                           "XLM": "#750D86",
                           "XMR": "#A777F1",
                           "BSV": "#F58518"
                       })

    dff_volume = df_volume.copy()
    dff_volume = dff_volume.drop(columns="Time")
    csv_string_volume = dff_volume.to_csv(index=False, encoding='utf-8')
    csv_string_volume = "data:text/csv;charset=utf-8," + \
        quote(csv_string_volume)

    return volume_line, csv_string_volume
コード例 #27
0
ファイル: web_app_index.py プロジェクト: dginst/crypto-index
def update_today_val(timer):

    df_index = query_mongo("index", "index_level_1000")

    dff_ind = df_index.copy()
    dff_last_ind = dff_ind.tail(1)
    dff_prev = (dff_ind.tail(2)).head(1)
    today_val = dff_last_ind["Index Value"].values[0]
    yesterday_val = dff_prev["Index Value"].values[0]

    fig_indicator = go.Figure(
        go.Indicator(
            mode="number+delta",
            value=today_val,
            delta={
                'relative': False,
                'reference': yesterday_val,
                'valueformat': ',.2f',
                'position': "right",
            },
            number={
                'suffix': "$",
                'valueformat': ',.2f',
                'font': {
                    'color': 'black',
                    'size': 22
                }
            },
        ))
    fig_indicator.update_traces(delta_font={'size': 20})
    fig_indicator.update_layout(
        height=60,
        width=300,
    )

    return fig_indicator
コード例 #28
0
def exc_daily_key_mngm(daily_mat_00, daily_mat_12, day_to_clean_TS):

    logic_key = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_keys"))

    # adding to the daily matrix the value referred to dead crypto-fiat pair
    daily_mat_with_dead = exc_dead_key_mng(
        logic_key, daily_mat_00, daily_mat_12, day_to_clean_TS)

    # searching for possible new crypto-fiat pair
    new_key_hist = exc_new_key_mng(logic_key, daily_mat_00, day_to_clean_TS)

    if new_key_hist != []:

        collection_dict_upload = mongo_coll()
        # upload the new artificial historical series on MongoDB
        # collection "EXC_cleandata"
        mongo_upload(new_key_hist, collection_dict_upload.get(
            "collection_exc_clean"))

    else:

        pass

    return daily_mat_with_dead
コード例 #29
0
date_tot = [str(single_date) for single_date in date_complete_int]

# #################### setup mongo connection ##################

# creating the empty collections cleandata within the database index
mongo_indexing()

collection_dict_upload = mongo_coll()

# ################# DAILY DATA CONVERSION MAIN PART ##################

# querying the data from mongo
query_data = {"Time": str(y_TS)}
query_rate = {"Date": str(y_TS)}
query_stable = {"Time": str(y_TS)}
matrix_rate = query_mongo(DB_NAME, MONGO_DICT.get("coll_ecb_clean"),
                          query_rate)
matrix_rate = matrix_rate.rename({"Date": "Time"}, axis="columns")
matrix_data = query_mongo(DB_NAME, MONGO_DICT.get("coll_exc_clean"),
                          query_data)
print(matrix_data)
matrix_rate_stable = query_mongo(DB_NAME, MONGO_DICT.get("coll_stable_rate"),
                                 query_stable)

# creating a column containing the fiat currency
matrix_rate["fiat"] = [x[:3].lower() for x in matrix_rate["Currency"]]
matrix_data["fiat"] = [x[3:].lower() for x in matrix_data["Pair"]]
matrix_rate_stable["fiat"] = [
    x[:4].lower() for x in matrix_rate_stable["Currency"]
]

# ############ creating a USD subset which will not be converted #########
コード例 #30
0
import pandas as pd

from cryptoindex.mongo_setup import query_mongo

db = 'index'
cll = 'index_level_1000'

df = query_mongo(db, cll)

df['Time'] = [int(x) for x in df['Time']]

df = df.loc[df['Time'] > 1467331200]

df = df.rename(columns={
    'Date': 'time',
    'Time': 'timestamp',
    'Index Value': 'value'
})

print(df)

df = str(df.to_dict(orient="records"))

value = 'json = '

final = value + df

print(final)

with open('result.json', 'w') as f:
    f.write(final)