Exemple #1
0
    def __init__(self):
        measure_time_start = time.time()
        list_of_alive_panels = redis_store.lrange(
            'sidebar_info:list_of_alive_panels', 0, -1)
        num_of_gpus = redis_store.get('main_dashboard:num_of_gpus')
        if num_of_gpus:
            self.num_of_gpus = int(num_of_gpus)

        num_of_alive_gpus = redis_store.get('main_dashboard:num_of_alive_gpus')
        if num_of_alive_gpus:
            self.num_of_alive_gpus = int(num_of_alive_gpus)
        measure_time_end = time.time()
        self.execution_time = round((measure_time_end - measure_time_start), 2)

        self.panels_info = {}
        for panel_name in list_of_alive_panels:
            self.panels_info[panel_name] = {}
            list_of_pool_info_packed = redis_store.get(
                "admin_main_dashboard:{}:unique_poll_info_list".format(
                    str(panel_name)))
            if list_of_pool_info_packed:

                self.panels_info[panel_name]["list_of_pool_info"] = json.loads(
                    list_of_pool_info_packed)
            else:
                self.panels_info[panel_name]["list_of_pool_info"] = []

            self.panels_info[panel_name][
                'dual_miners_list'] = redis_store.lrange(
                    "admin_main_dashboard:{}:dual_miners_list".format(
                        str(panel_name)), 0, -1)
Exemple #2
0
def is_bios_exist(bios_name):
    bioses_list = redis_store.lrange("technical_information:list_of_bioses", 0,
                                     -1)
    if bios_name in bioses_list:
        return True
    else:
        return False
Exemple #3
0
def technical_information_update():
    dict_of_bioses = {}
    dict_of_bioses = get_dict_of_bioses()
    redis_store.delete("technical_information:dict_of_bioses")
    if dict_of_bioses:
        redis_store.hmset("technical_information:dict_of_bioses",
                          dict_of_bioses)

    list_of_bioses = get_list_of_bioses()
    redis_store.delete('technical_information:list_of_bioses')
    if list_of_bioses:
        redis_store.lpush('technical_information:list_of_bioses',
                          *list_of_bioses)

    dict_of_rigs = {}
    if list_of_bioses:
        for bios_name_loop in list_of_bioses:
            dict_of_rigs = get_dict_of_rigs_for_bios(bios_name_loop)
            redis_store.delete(
                "technical_information:{}:dict_of_rigs_bioses".format(
                    str(bios_name_loop)))
            if dict_of_rigs:
                json_str = json.dumps(dict_of_rigs)
                redis_store.set(
                    "technical_information:{}:dict_of_rigs_bioses".format(
                        str(bios_name_loop)), json_str)

    dict_of_mobo = get_dict_of_mobo()
    redis_store.delete("technical_information:dict_of_mobo")
    if dict_of_mobo:
        redis_store.hmset("technical_information:dict_of_mobo", dict_of_mobo)

    dict_of_drive_names = get_dict_of_drive_names()
    redis_store.delete("technical_information:dict_of_drive_names")
    if dict_of_drive_names:
        redis_store.hmset("technical_information:dict_of_drive_names",
                          dict_of_drive_names)

    ret_dict_all = get_dict_of_ip_info_all()
    redis_store.delete("technical_information:dict_of_ip_info_all")
    if ret_dict_all:
        json_str_all = json.dumps(ret_dict_all)
        redis_store.set("technical_information:dict_of_ip_info_all",
                        json_str_all)

    list_of_alive_panels = redis_store.lrange(
        'sidebar_info:list_of_alive_panels', 0, -1)
    for panel_name in list_of_alive_panels:
        ret_dict = get_dict_of_ip_info(panel_name=panel_name)
        redis_store.delete("technical_information:{}:dict_of_ip_info".format(
            str(panel_name)))
        if ret_dict:
            json_str = json.dumps(ret_dict)
            redis_store.set(
                "technical_information:{}:dict_of_ip_info".format(
                    str(panel_name)), json_str)
Exemple #4
0
    def __init__(self):
        measure_time_start = time.time()
        self.dict_of_ip_info_panels = {}
        self.dict_of_bioses = redis_store.hgetall(
            "technical_information:dict_of_bioses")
        self.dict_of_mobo = redis_store.hgetall(
            "technical_information:dict_of_mobo")
        self.dict_of_drive_names = redis_store.hgetall(
            "technical_information:dict_of_drive_names")
        num_of_rigs = redis_store.get('main_dashboard:num_of_rigs')
        if num_of_rigs:
            self.num_of_rigs = int(num_of_rigs)

        num_of_gpus = redis_store.get('main_dashboard:num_of_gpus')
        if num_of_gpus:
            self.num_of_gpus = int(num_of_gpus)

        num_of_alive_gpus = redis_store.get('main_dashboard:num_of_alive_gpus')
        if num_of_alive_gpus:
            self.num_of_alive_gpus = int(num_of_alive_gpus)

        dict_of_ip_info_all_packed = redis_store.get(
            "technical_information:dict_of_ip_info_all")
        if dict_of_ip_info_all_packed:
            self.dict_of_ip_info_all = json.loads(dict_of_ip_info_all_packed)
        else:
            self.dict_of_ip_info_all = {}

        list_of_alive_panels = redis_store.lrange(
            'sidebar_info:list_of_alive_panels', 0, -1)
        for panel_name in list_of_alive_panels:

            dict_of_ip_info_packed = redis_store.get(
                "technical_information:{}:dict_of_ip_info".format(
                    str(panel_name)))
            if dict_of_ip_info_packed:
                self.dict_of_ip_info_panels[panel_name] = json.loads(
                    dict_of_ip_info_packed)
            else:
                self.dict_of_ip_info_panels[panel_name] = {}

        measure_time_end = time.time()
        self.execution_time = round((measure_time_end - measure_time_start), 2)
Exemple #5
0
def admin_main_dashboard_update():
    list_of_alive_panels = redis_store.lrange(
        'sidebar_info:list_of_alive_panels', 0, -1)
    for panel_name in list_of_alive_panels:
        ret_list = get_unique_poll_info_list(panel_name=panel_name)
        redis_store.delete(
            "admin_main_dashboard:{}:unique_poll_info_list".format(
                str(panel_name)))
        if ret_list:
            json_str = json.dumps(ret_list)
            redis_store.set(
                "admin_main_dashboard:{}:unique_poll_info_list".format(
                    str(panel_name)), json_str)

        dual_miners_list = get_dual_miners_list(panel_name)
        redis_store.delete("admin_main_dashboard:{}:dual_miners_list".format(
            str(panel_name)))
        if dual_miners_list:
            redis_store.lpush(
                "admin_main_dashboard:{}:dual_miners_list".format(
                    str(panel_name)), *dual_miners_list)
Exemple #6
0
def set_list_of_nanopool_wallets():
    list_of_nanopool_wallets_dict = []
    list_of_nanopool_wallets = []
    list_of_alive_panels = redis_store.lrange(
        'sidebar_info:list_of_alive_panels', 0, -1)
    panels_info = {}
    for panel_name in list_of_alive_panels:
        panels_info[panel_name] = {}
        list_of_pool_info_packed = redis_store.get(
            "admin_main_dashboard:{}:unique_poll_info_list".format(
                str(panel_name)))
        if list_of_pool_info_packed:

            panels_info[panel_name]["list_of_pool_info"] = json.loads(
                list_of_pool_info_packed)
        else:
            panels_info[panel_name]["list_of_pool_info"] = []
    if panels_info:
        for panel_name in panels_info:
            if panels_info[panel_name]["list_of_pool_info"]:
                for pool_info in panels_info[panel_name]["list_of_pool_info"]:
                    if pool_info.get("proxypool1"):
                        if "nanopool" in pool_info.get("proxypool1"):
                            list_of_nanopool_wallets_dict.append(pool_info)
                            if pool_info.get("proxywallet"):
                                list_of_nanopool_wallets.append(
                                    pool_info.get("proxywallet"))
    redis_store.delete("nanopool_dash:list_of_nanopool_wallets_dict")
    if list_of_nanopool_wallets_dict:
        json_str = json.dumps(list_of_nanopool_wallets_dict)
        redis_store.set("nanopool_dash:list_of_nanopool_wallets_dict",
                        json_str)

    redis_store.delete("nanopool_dash:list_of_nanopool_wallets")
    if list_of_nanopool_wallets:
        redis_store.lpush("nanopool_dash:list_of_nanopool_wallets",
                          *list_of_nanopool_wallets)
Exemple #7
0
def nanopool_report_update():
    # FOR Ethereum
    nanopool_eth_url = "https://api.nanopool.org/v1/eth/"
    list_of_nanopool_wallets = redis_store.lrange(
        'nanopool_dash:list_of_nanopool_wallets', 0, -1)
    for nanopool_wallet in list_of_nanopool_wallets:
        try:
            response_pay = requests.get(
                nanopool_eth_url + "payments/{}".format(str(nanopool_wallet)),
                timeout=20)
            json_dict_payments = response_pay.json()
            if os.path.exists("payments/{}.txt".format(str(nanopool_wallet))):
                os.remove("payments/{}.txt".format(str(nanopool_wallet)))
            with open("payments/{}.txt".format(str(nanopool_wallet)),
                      "w") as payments_file:
                payments_file.write(json.dumps(json_dict_payments))
        except Exception as e:
            print("Failed to get payments from Nanopool")
            with open("payments/{}.txt".format(str(nanopool_wallet)),
                      "r+") as payments_information:
                content = payments_information.read()
                json_dict_payments = json.loads(content)
            pass

        if json_dict_payments and json_dict_payments.get("status") == True:
            ret_dict = {}
            payments_list = copy.deepcopy(json_dict_payments["data"])
            for payment in payments_list:
                # payment['date'] = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(payment['date']))
                payment['date'] = time.strftime(
                    '%Y-%m-%d %H:%M:%S', time.localtime(payment['date']))

            df = pd.DataFrame(json_dict_payments["data"])
            # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True)
            # Localized time
            df['date'] = pd.to_datetime(df['date'], unit='s')
            df['date'] = df['date'].dt.tz_localize('UTC')
            df['date'] = df['date'].dt.tz_convert('Asia/Istanbul')
            df['date'] = df['date'].dt.date

            data_frame_obj = df.groupby(df.date)['amount'].sum()
            dict_of_total = dict_with_datetime_keys_to_str(
                data_frame_obj.to_dict())

            response_pay = requests.get(
                nanopool_eth_url + "history/{}".format(str(nanopool_wallet)),
                timeout=20)
            json_dict_hashrate_chart = response_pay.json()

            if json_dict_hashrate_chart and json_dict_hashrate_chart.get(
                    "status"):

                df = pd.DataFrame(json_dict_hashrate_chart["data"])
                # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True)
                # Localized time
                df['date'] = pd.to_datetime(df['date'], unit='s')
                df['date'] = df['date'].dt.tz_localize('UTC')
                df['date'] = df['date'].dt.tz_convert('Asia/Istanbul')
                df['date'] = df['date'].dt.date

                data_frame_obj = df.groupby(df.date)['hashrate'].mean()
                dict_of_average_hashrate = dict_with_datetime_keys_to_str(
                    data_frame_obj.to_dict())

                for key, value in dict_of_average_hashrate.items():
                    if not ret_dict.get(key):
                        ret_dict[key] = {}
                    if value != 0.0:
                        ret_dict[key]["average_hashrate"] = round(
                            (float(value) / 1000.0), 3)
                    else:
                        ret_dict[key]["average_hashrate"] = value

                for key, value in dict_of_total.items():
                    if ret_dict.get(key):
                        ret_dict[key]["total"] = value

                response_shares = requests.get(
                    nanopool_eth_url +
                    "/shareratehistory/{}".format(str(nanopool_wallet)),
                    timeout=20)

                dict_shares = response_shares.json()
                if dict_shares and dict_shares.get("status") == True:
                    df = pd.DataFrame(dict_shares["data"])
                    # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True)
                    # Localized time
                    df['date'] = pd.to_datetime(df['date'], unit='s')
                    df['date'] = df['date'].dt.tz_localize('UTC')
                    df['date'] = df['date'].dt.tz_convert('Asia/Istanbul')
                    df['date'] = df['date'].dt.date
                    data_frame_obj = df.groupby(df.date).sum()
                    dict_of_dates = data_frame_obj.to_dict()
                    shares_table_dict_d = dict_of_dates.get("shares")
                    shares_table_dict = {}
                    for key in shares_table_dict_d.keys():
                        if type(key) is not str:
                            try:
                                shares_table_dict[str(key.strftime(
                                    "%Y-%m-%d"))] = shares_table_dict_d[key]
                            except Exception as e:
                                print(e)
                                print("nanopool_info_update Exception")
                                pass

                    for key, value in shares_table_dict.items():
                        if ret_dict.get(key):
                            ret_dict[key]["shares"] = value
            if ret_dict:
                json_srt = json.dumps(ret_dict)
                redis_store.set(
                    "nanopool_wallet_info:{}:report_by_days".format(
                        str(nanopool_wallet)), json_srt)
Exemple #8
0
def nanopool_payments_history_update():
    # FOR Ethereum
    nanopool_eth_url = "https://api.nanopool.org/v1/eth/"
    list_of_nanopool_wallets = redis_store.lrange(
        'nanopool_dash:list_of_nanopool_wallets', 0, -1)
    for nanopool_wallet in list_of_nanopool_wallets:
        try:
            response_pay = requests.get(
                nanopool_eth_url + "payments/{}".format(str(nanopool_wallet)),
                timeout=20)
            json_dict_payments = response_pay.json()
            if os.path.exists("payments/{}.txt".format(str(nanopool_wallet))):
                os.remove("payments/{}.txt".format(str(nanopool_wallet)))
            with open("payments/{}.txt".format(str(nanopool_wallet)),
                      "w") as payments_file:
                payments_file.write(json.dumps(json_dict_payments))
        except Exception as e:
            print("Failed to get payments from Nanopool")
            with open("payments/{}.txt".format(str(nanopool_wallet)),
                      "r+") as payments_information:
                content = payments_information.read()
                json_dict_payments = json.loads(content)
            pass

        if json_dict_payments and json_dict_payments.get("status") == True:
            ret_dict = {}
            payments_list = copy.deepcopy(json_dict_payments["data"])
            for payment in payments_list:
                # payment['date'] = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(payment['date']))
                payment['date'] = time.strftime(
                    '%Y-%m-%d %H:%M:%S', time.localtime(payment['date']))

            df = pd.DataFrame(json_dict_payments["data"])
            # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True)
            # Localized time
            df['date'] = pd.to_datetime(df['date'], unit='s')
            df['date'] = df['date'].dt.tz_localize('UTC')
            df['date'] = df['date'].dt.tz_convert('Asia/Istanbul')
            df['date'] = df['date'].dt.date

            data_frame_obj = df.groupby(df.date)['amount'].sum()
            dict_of_total = dict_with_datetime_keys_to_str(
                data_frame_obj.to_dict())

            data_frame_obj = df.groupby(df.date)['amount'].count()
            dict_of_amount_tx = dict_with_datetime_keys_to_str(
                data_frame_obj.to_dict())

            for key, value in dict_of_total.items():
                if not ret_dict.get(key):
                    ret_dict[key] = {}
                ret_dict[key]["total"] = value
            for key, value in dict_of_amount_tx.items():
                if not ret_dict.get(key):
                    ret_dict[key] = {}
                ret_dict[key]["amount"] = value

            for key, value in ret_dict.items():
                if not ret_dict.get(key):
                    ret_dict[key] = {}
                for tx in payments_list:
                    if key in tx['date']:
                        if not ret_dict[key].get('tx_list'):
                            ret_dict[key]['tx_list'] = []
                        ret_dict[key]['tx_list'].append(tx)

            if ret_dict:
                json_srt = json.dumps(ret_dict)
                redis_store.set(
                    "nanopool_wallet_info:{}:all_payments".format(
                        str(nanopool_wallet)), json_srt)
Exemple #9
0
def nanopool_info_update():
    # FOR Ethereum
    nanopool_eth_url = "https://api.nanopool.org/v1/eth/"
    list_of_nanopool_wallets = redis_store.lrange(
        'nanopool_dash:list_of_nanopool_wallets', 0, -1)
    for nanopool_wallet in list_of_nanopool_wallets:

        response_hb = requests.get(
            nanopool_eth_url +
            "balance_hashrate/{}".format(str(nanopool_wallet)),
            timeout=20)
        json_dict_balance_hashrate = response_hb.json()

        if json_dict_balance_hashrate and json_dict_balance_hashrate.get("status") == True \
                and json_dict_balance_hashrate.get("data"):
            if json_dict_balance_hashrate["data"].get(
                    "hashrate") and json_dict_balance_hashrate["data"].get(
                        "balance"):
                redis_store.set(
                    "nanopool_wallet_info:{}:balance".format(
                        str(nanopool_wallet)),
                    json_dict_balance_hashrate["data"].get("balance"))
                redis_store.set(
                    "nanopool_wallet_info:{}:hashrate".format(
                        str(nanopool_wallet)),
                    json_dict_balance_hashrate["data"].get("hashrate"))

        response_24p = requests.get(
            nanopool_eth_url + "paymentsday/{}".format(str(nanopool_wallet)),
            timeout=20)
        json_dict_24_pay = response_24p.json()

        if json_dict_24_pay and json_dict_24_pay.get("status") == True:
            payments_24_table = []
            all_paid = 0.0
            for payment in json_dict_24_pay['data']:
                # payment["date"] = time.strftime('%Y-%m-%d', time.gmtime(payment["date"])) # UTC TIME
                payment["date"] = time.strftime(
                    '%Y-%m-%d %H:%M:%S', time.localtime(payment["date"]))
                payments_24_table.append(payment)
                # utc_now = datetime.datetime.utcnow() #UTC NOW
                now = datetime.datetime.now()
                today = now.strftime('%Y-%m-%d')
                if today in payment['date']:
                    if payment.get("confirmed"):
                        all_paid += payment.get("amount")

            json_str_payments_24_table = json.dumps(payments_24_table)
            redis_store.set(
                "nanopool_wallet_info:{}:payments_24_table".format(
                    str(nanopool_wallet)), json_str_payments_24_table)

            redis_store.set(
                "nanopool_wallet_info:{}:all_paid_today".format(
                    str(nanopool_wallet)), all_paid)

        response_shares = requests.get(
            nanopool_eth_url +
            "/shareratehistory/{}".format(str(nanopool_wallet)),
            timeout=20)

        dict_shares = response_shares.json()
        if dict_shares and dict_shares.get("status") == True:
            df = pd.DataFrame(dict_shares["data"])
            # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True)
            # Localized time
            df['date'] = pd.to_datetime(df['date'], unit='s')
            df['date'] = df['date'].dt.tz_localize('UTC')
            df['date'] = df['date'].dt.tz_convert('Asia/Istanbul')
            df['date'] = df['date'].dt.date
            data_frame_obj = df.groupby(df.date).sum()
            dict_of_dates = data_frame_obj.to_dict()
            shares_table_dict_d = dict_of_dates.get("shares")
            shares_table_dict = {}
            for key in shares_table_dict_d.keys():
                if type(key) is not str:
                    try:
                        shares_table_dict[str(key.strftime(
                            "%Y-%m-%d"))] = shares_table_dict_d[key]
                    except Exception as e:
                        print(e)
                        print("nanopool_info_update Exception")
                        pass
            shares_table_json_srt = json.dumps(shares_table_dict)
            redis_store.set(
                "nanopool_wallet_info:{}:shares_table".format(
                    str(nanopool_wallet)), shares_table_json_srt)