def create_celery(app, config_name): celery_app = Celery(app.import_name, backend=app.config['CELERY_RESULT_BACKEND'], broker=app.config['CELERY_BROKER_URL']) celery_app.conf.update(app.config) celery_app.conf.ONCE = { 'backend': 'celery_once.backends.Redis', 'settings': { 'url': config[config_name].CELERY_ONCE_BROKER_DB_URL, 'default_timeout': 60 * 35 } } TaskBase = celery_app.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery_app.Task = ContextTask # SET IS SYSTEM CALLING TO 0 redis_store.set("is_system_already_calling", 0) print("Set 'is_server_calling_you' value to False") return celery_app
def admin_main_dashboard_update(): list_of_alive_panels = redis_store.lrange( 'sidebar_info:list_of_alive_panels', 0, -1) for panel_name in list_of_alive_panels: ret_list = get_unique_poll_info_list(panel_name=panel_name) redis_store.delete( "admin_main_dashboard:{}:unique_poll_info_list".format( str(panel_name))) if ret_list: json_str = json.dumps(ret_list) redis_store.set( "admin_main_dashboard:{}:unique_poll_info_list".format( str(panel_name)), json_str) dual_miners_list = get_dual_miners_list(panel_name) redis_store.delete("admin_main_dashboard:{}:dual_miners_list".format( str(panel_name))) if dual_miners_list: redis_store.lpush( "admin_main_dashboard:{}:dual_miners_list".format( str(panel_name)), *dual_miners_list)
def set_list_of_nanopool_wallets(): list_of_nanopool_wallets_dict = [] list_of_nanopool_wallets = [] list_of_alive_panels = redis_store.lrange( 'sidebar_info:list_of_alive_panels', 0, -1) panels_info = {} for panel_name in list_of_alive_panels: panels_info[panel_name] = {} list_of_pool_info_packed = redis_store.get( "admin_main_dashboard:{}:unique_poll_info_list".format( str(panel_name))) if list_of_pool_info_packed: panels_info[panel_name]["list_of_pool_info"] = json.loads( list_of_pool_info_packed) else: panels_info[panel_name]["list_of_pool_info"] = [] if panels_info: for panel_name in panels_info: if panels_info[panel_name]["list_of_pool_info"]: for pool_info in panels_info[panel_name]["list_of_pool_info"]: if pool_info.get("proxypool1"): if "nanopool" in pool_info.get("proxypool1"): list_of_nanopool_wallets_dict.append(pool_info) if pool_info.get("proxywallet"): list_of_nanopool_wallets.append( pool_info.get("proxywallet")) redis_store.delete("nanopool_dash:list_of_nanopool_wallets_dict") if list_of_nanopool_wallets_dict: json_str = json.dumps(list_of_nanopool_wallets_dict) redis_store.set("nanopool_dash:list_of_nanopool_wallets_dict", json_str) redis_store.delete("nanopool_dash:list_of_nanopool_wallets") if list_of_nanopool_wallets: redis_store.lpush("nanopool_dash:list_of_nanopool_wallets", *list_of_nanopool_wallets)
def technical_information_update(): dict_of_bioses = {} dict_of_bioses = get_dict_of_bioses() redis_store.delete("technical_information:dict_of_bioses") if dict_of_bioses: redis_store.hmset("technical_information:dict_of_bioses", dict_of_bioses) list_of_bioses = get_list_of_bioses() redis_store.delete('technical_information:list_of_bioses') if list_of_bioses: redis_store.lpush('technical_information:list_of_bioses', *list_of_bioses) dict_of_rigs = {} if list_of_bioses: for bios_name_loop in list_of_bioses: dict_of_rigs = get_dict_of_rigs_for_bios(bios_name_loop) redis_store.delete( "technical_information:{}:dict_of_rigs_bioses".format( str(bios_name_loop))) if dict_of_rigs: json_str = json.dumps(dict_of_rigs) redis_store.set( "technical_information:{}:dict_of_rigs_bioses".format( str(bios_name_loop)), json_str) dict_of_mobo = get_dict_of_mobo() redis_store.delete("technical_information:dict_of_mobo") if dict_of_mobo: redis_store.hmset("technical_information:dict_of_mobo", dict_of_mobo) dict_of_drive_names = get_dict_of_drive_names() redis_store.delete("technical_information:dict_of_drive_names") if dict_of_drive_names: redis_store.hmset("technical_information:dict_of_drive_names", dict_of_drive_names) ret_dict_all = get_dict_of_ip_info_all() redis_store.delete("technical_information:dict_of_ip_info_all") if ret_dict_all: json_str_all = json.dumps(ret_dict_all) redis_store.set("technical_information:dict_of_ip_info_all", json_str_all) list_of_alive_panels = redis_store.lrange( 'sidebar_info:list_of_alive_panels', 0, -1) for panel_name in list_of_alive_panels: ret_dict = get_dict_of_ip_info(panel_name=panel_name) redis_store.delete("technical_information:{}:dict_of_ip_info".format( str(panel_name))) if ret_dict: json_str = json.dumps(ret_dict) redis_store.set( "technical_information:{}:dict_of_ip_info".format( str(panel_name)), json_str)
def call_crashed_gpus(): is_system_calling = redis_store.get("is_system_already_calling") if is_system_calling is None: redis_store.set("is_system_already_calling", 0) is_system_calling = bool(int(redis_store.get("is_system_already_calling"))) num_of_crashed_gpus = redis_store.get("main_dashboard:num_of_crashed_gpus") if num_of_crashed_gpus is None: num_of_crashed_gpus = 0 else: num_of_crashed_gpus = int(num_of_crashed_gpus) try: client_twilio = Client(str(Config.TWILIO_ACCOUNT_SID), str(Config.TWILIO_AUTH_TOKEN)) is_notified = False try_num = 1 url_with_attack_response = "https://powmining.com/twilio_responses/crashed_gpus" if num_of_crashed_gpus > 0 and is_system_calling is False: while try_num <= 10 and bool( int(redis_store.get("is_system_already_calling")) ) is False and is_notified is False: redis_store.set("is_system_already_calling", 1) save_simple_action( "Attempt to notify about crashed gpus. \nTry № {} \nGPUs crashed {}" .format(try_num, num_of_crashed_gpus)) try_num += 1 if Config.TWILIO_PHONE_NUMBER_1: call_num_1 = client_twilio.calls.create( to=str(Config.TWILIO_PHONE_NUMBER_1), from_=str(Config.TWILIO_PHONE_NUMBER_SERVER), url=url_with_attack_response) time.sleep(40) call_num_1 = client_twilio.calls(call_num_1.sid).fetch() if not (call_num_1.status == "busy" or call_num_1.status == "failed"): call_num_1 = client_twilio.calls( call_num_1.sid).update(status="completed") call_num_1 = client_twilio.calls(call_num_1.sid).fetch() save_simple_action( "Call to {} ended with status {}".format( call_num_1.to, call_num_1.status)) if call_num_1.status == "busy" or call_num_1.status == "completed" \ or call_num_1.status == "in-progress": is_notified = True save_simple_action( "{} has been notified about crashed gpus.".format( call_num_1.to)) else: save_simple_action( "Failed to notify {} about crashed gpus.".format( call_num_1.to)) if Config.TWILIO_PHONE_NUMBER_2 and not is_notified: call_num_2 = client_twilio.calls.create( to=str(Config.TWILIO_PHONE_NUMBER_2), from_=str(Config.TWILIO_PHONE_NUMBER_SERVER), url=url_with_attack_response) time.sleep(40) call_num_2 = client_twilio.calls(call_num_2.sid).fetch() if not (call_num_2.status == "busy" or call_num_2.status == "failed"): call_num_2 = client_twilio.calls( call_num_2.sid).update(status="completed") call_num_2 = client_twilio.calls(call_num_2.sid).fetch() save_simple_action( "Call to {} ended with status {}".format( call_num_2.to, call_num_2.status)) if call_num_2.status == "busy" or call_num_2.status == "completed" \ or call_num_2.status == "in-progress": is_notified = True save_simple_action( "{} has been notified about crashed gpus.".format( call_num_2.to)) else: save_simple_action( " Failed to notify {} about crashed gpus.".format( call_num_2.to)) if Config.TWILIO_PHONE_NUMBER_3 and not is_notified: call_num_3 = client_twilio.calls.create( to=str(Config.TWILIO_PHONE_NUMBER_3), from_=str(Config.TWILIO_PHONE_NUMBER_SERVER), url=url_with_attack_response) time.sleep(40) call_num_3 = client_twilio.calls(call_num_3.sid).fetch() if not (call_num_3.status == "busy" or call_num_3.status == "failed"): call_num_3 = client_twilio.calls( call_num_3.sid).update(status="completed") call_num_3 = client_twilio.calls(call_num_3.sid).fetch() save_simple_action( "Call to {} ended with status {}".format( call_num_3.to, call_num_3.status)) if call_num_3.status == "busy" or call_num_3.status == "completed" \ or call_num_3.status == "in-progress": is_notified = True save_simple_action( "{} has been notified about crashed gpus.".format( call_num_3.to)) else: save_simple_action( " Failed to notify {} about crashed gpus.".format( call_num_3.to)) if Config.TWILIO_PHONE_NUMBER_4 and not is_notified: call_num_4 = client_twilio.calls.create( to=str(Config.TWILIO_PHONE_NUMBER_4), from_=str(Config.TWILIO_PHONE_NUMBER_SERVER), url=url_with_attack_response) time.sleep(40) call_num_4 = client_twilio.calls(call_num_4.sid).fetch() if not (call_num_4.status == 'busy' or call_num_4.status == 'failed'): call_num_4 = client_twilio.calls( call_num_4.sid).update(status="completed") call_num_4 = client_twilio.calls(call_num_4.sid).fetch() save_simple_action( "Call to {} ended with status {}".format( call_num_4.to, call_num_4.status)) if call_num_4.status == "busy" or call_num_4.status == "completed" \ or call_num_4.status == "in-progress": is_notified = True save_simple_action( "{} has been notified about crashed gpus.".format( call_num_4.to)) else: save_simple_action( " Failed to notify {} about crashed gpus.".format( call_num_4.to)) if Config.TWILIO_PHONE_NUMBER_5 and not is_notified: call_num_5 = client_twilio.calls.create( to=str(Config.TWILIO_PHONE_NUMBER_5), from_=str(Config.TWILIO_PHONE_NUMBER_SERVER), url=url_with_attack_response) time.sleep(40) call_num_5 = client_twilio.calls(call_num_5.sid).fetch() if not (call_num_5.status == "busy" or call_num_5 == "failed"): call_num_5 = client_twilio.calls( call_num_5.sid).update(status="completed") call_num_5 = client_twilio.calls(call_num_5.sid).fetch() save_simple_action( "Call to {} ended with status {}".format( call_num_5.to, call_num_5.status)) if call_num_5.status == "busy" or call_num_5.status == "completed" \ or call_num_5.status == "in-progress": is_notified = True save_simple_action( "{} has been notified about crashed gpus.".format( call_num_5.to)) else: save_simple_action( " Failed to notify {} about crashed gpus.".format( call_num_5.to)) redis_store.set("is_system_already_calling", 0) else: print("No GPUs crashed.all OK.") except Exception as e: redis_store.set("is_system_already_calling", 0) print(e) print("Exception occurred while trying to notify about crashed gpus.") pass
def nanopool_report_update(): # FOR Ethereum nanopool_eth_url = "https://api.nanopool.org/v1/eth/" list_of_nanopool_wallets = redis_store.lrange( 'nanopool_dash:list_of_nanopool_wallets', 0, -1) for nanopool_wallet in list_of_nanopool_wallets: try: response_pay = requests.get( nanopool_eth_url + "payments/{}".format(str(nanopool_wallet)), timeout=20) json_dict_payments = response_pay.json() if os.path.exists("payments/{}.txt".format(str(nanopool_wallet))): os.remove("payments/{}.txt".format(str(nanopool_wallet))) with open("payments/{}.txt".format(str(nanopool_wallet)), "w") as payments_file: payments_file.write(json.dumps(json_dict_payments)) except Exception as e: print("Failed to get payments from Nanopool") with open("payments/{}.txt".format(str(nanopool_wallet)), "r+") as payments_information: content = payments_information.read() json_dict_payments = json.loads(content) pass if json_dict_payments and json_dict_payments.get("status") == True: ret_dict = {} payments_list = copy.deepcopy(json_dict_payments["data"]) for payment in payments_list: # payment['date'] = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(payment['date'])) payment['date'] = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(payment['date'])) df = pd.DataFrame(json_dict_payments["data"]) # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True) # Localized time df['date'] = pd.to_datetime(df['date'], unit='s') df['date'] = df['date'].dt.tz_localize('UTC') df['date'] = df['date'].dt.tz_convert('Asia/Istanbul') df['date'] = df['date'].dt.date data_frame_obj = df.groupby(df.date)['amount'].sum() dict_of_total = dict_with_datetime_keys_to_str( data_frame_obj.to_dict()) response_pay = requests.get( nanopool_eth_url + "history/{}".format(str(nanopool_wallet)), timeout=20) json_dict_hashrate_chart = response_pay.json() if json_dict_hashrate_chart and json_dict_hashrate_chart.get( "status"): df = pd.DataFrame(json_dict_hashrate_chart["data"]) # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True) # Localized time df['date'] = pd.to_datetime(df['date'], unit='s') df['date'] = df['date'].dt.tz_localize('UTC') df['date'] = df['date'].dt.tz_convert('Asia/Istanbul') df['date'] = df['date'].dt.date data_frame_obj = df.groupby(df.date)['hashrate'].mean() dict_of_average_hashrate = dict_with_datetime_keys_to_str( data_frame_obj.to_dict()) for key, value in dict_of_average_hashrate.items(): if not ret_dict.get(key): ret_dict[key] = {} if value != 0.0: ret_dict[key]["average_hashrate"] = round( (float(value) / 1000.0), 3) else: ret_dict[key]["average_hashrate"] = value for key, value in dict_of_total.items(): if ret_dict.get(key): ret_dict[key]["total"] = value response_shares = requests.get( nanopool_eth_url + "/shareratehistory/{}".format(str(nanopool_wallet)), timeout=20) dict_shares = response_shares.json() if dict_shares and dict_shares.get("status") == True: df = pd.DataFrame(dict_shares["data"]) # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True) # Localized time df['date'] = pd.to_datetime(df['date'], unit='s') df['date'] = df['date'].dt.tz_localize('UTC') df['date'] = df['date'].dt.tz_convert('Asia/Istanbul') df['date'] = df['date'].dt.date data_frame_obj = df.groupby(df.date).sum() dict_of_dates = data_frame_obj.to_dict() shares_table_dict_d = dict_of_dates.get("shares") shares_table_dict = {} for key in shares_table_dict_d.keys(): if type(key) is not str: try: shares_table_dict[str(key.strftime( "%Y-%m-%d"))] = shares_table_dict_d[key] except Exception as e: print(e) print("nanopool_info_update Exception") pass for key, value in shares_table_dict.items(): if ret_dict.get(key): ret_dict[key]["shares"] = value if ret_dict: json_srt = json.dumps(ret_dict) redis_store.set( "nanopool_wallet_info:{}:report_by_days".format( str(nanopool_wallet)), json_srt)
def nanopool_payments_history_update(): # FOR Ethereum nanopool_eth_url = "https://api.nanopool.org/v1/eth/" list_of_nanopool_wallets = redis_store.lrange( 'nanopool_dash:list_of_nanopool_wallets', 0, -1) for nanopool_wallet in list_of_nanopool_wallets: try: response_pay = requests.get( nanopool_eth_url + "payments/{}".format(str(nanopool_wallet)), timeout=20) json_dict_payments = response_pay.json() if os.path.exists("payments/{}.txt".format(str(nanopool_wallet))): os.remove("payments/{}.txt".format(str(nanopool_wallet))) with open("payments/{}.txt".format(str(nanopool_wallet)), "w") as payments_file: payments_file.write(json.dumps(json_dict_payments)) except Exception as e: print("Failed to get payments from Nanopool") with open("payments/{}.txt".format(str(nanopool_wallet)), "r+") as payments_information: content = payments_information.read() json_dict_payments = json.loads(content) pass if json_dict_payments and json_dict_payments.get("status") == True: ret_dict = {} payments_list = copy.deepcopy(json_dict_payments["data"]) for payment in payments_list: # payment['date'] = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(payment['date'])) payment['date'] = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(payment['date'])) df = pd.DataFrame(json_dict_payments["data"]) # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True) # Localized time df['date'] = pd.to_datetime(df['date'], unit='s') df['date'] = df['date'].dt.tz_localize('UTC') df['date'] = df['date'].dt.tz_convert('Asia/Istanbul') df['date'] = df['date'].dt.date data_frame_obj = df.groupby(df.date)['amount'].sum() dict_of_total = dict_with_datetime_keys_to_str( data_frame_obj.to_dict()) data_frame_obj = df.groupby(df.date)['amount'].count() dict_of_amount_tx = dict_with_datetime_keys_to_str( data_frame_obj.to_dict()) for key, value in dict_of_total.items(): if not ret_dict.get(key): ret_dict[key] = {} ret_dict[key]["total"] = value for key, value in dict_of_amount_tx.items(): if not ret_dict.get(key): ret_dict[key] = {} ret_dict[key]["amount"] = value for key, value in ret_dict.items(): if not ret_dict.get(key): ret_dict[key] = {} for tx in payments_list: if key in tx['date']: if not ret_dict[key].get('tx_list'): ret_dict[key]['tx_list'] = [] ret_dict[key]['tx_list'].append(tx) if ret_dict: json_srt = json.dumps(ret_dict) redis_store.set( "nanopool_wallet_info:{}:all_payments".format( str(nanopool_wallet)), json_srt)
def nanopool_info_update(): # FOR Ethereum nanopool_eth_url = "https://api.nanopool.org/v1/eth/" list_of_nanopool_wallets = redis_store.lrange( 'nanopool_dash:list_of_nanopool_wallets', 0, -1) for nanopool_wallet in list_of_nanopool_wallets: response_hb = requests.get( nanopool_eth_url + "balance_hashrate/{}".format(str(nanopool_wallet)), timeout=20) json_dict_balance_hashrate = response_hb.json() if json_dict_balance_hashrate and json_dict_balance_hashrate.get("status") == True \ and json_dict_balance_hashrate.get("data"): if json_dict_balance_hashrate["data"].get( "hashrate") and json_dict_balance_hashrate["data"].get( "balance"): redis_store.set( "nanopool_wallet_info:{}:balance".format( str(nanopool_wallet)), json_dict_balance_hashrate["data"].get("balance")) redis_store.set( "nanopool_wallet_info:{}:hashrate".format( str(nanopool_wallet)), json_dict_balance_hashrate["data"].get("hashrate")) response_24p = requests.get( nanopool_eth_url + "paymentsday/{}".format(str(nanopool_wallet)), timeout=20) json_dict_24_pay = response_24p.json() if json_dict_24_pay and json_dict_24_pay.get("status") == True: payments_24_table = [] all_paid = 0.0 for payment in json_dict_24_pay['data']: # payment["date"] = time.strftime('%Y-%m-%d', time.gmtime(payment["date"])) # UTC TIME payment["date"] = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(payment["date"])) payments_24_table.append(payment) # utc_now = datetime.datetime.utcnow() #UTC NOW now = datetime.datetime.now() today = now.strftime('%Y-%m-%d') if today in payment['date']: if payment.get("confirmed"): all_paid += payment.get("amount") json_str_payments_24_table = json.dumps(payments_24_table) redis_store.set( "nanopool_wallet_info:{}:payments_24_table".format( str(nanopool_wallet)), json_str_payments_24_table) redis_store.set( "nanopool_wallet_info:{}:all_paid_today".format( str(nanopool_wallet)), all_paid) response_shares = requests.get( nanopool_eth_url + "/shareratehistory/{}".format(str(nanopool_wallet)), timeout=20) dict_shares = response_shares.json() if dict_shares and dict_shares.get("status") == True: df = pd.DataFrame(dict_shares["data"]) # df['date'] = pd.to_datetime(df['date'], unit='s',utc=True) # Localized time df['date'] = pd.to_datetime(df['date'], unit='s') df['date'] = df['date'].dt.tz_localize('UTC') df['date'] = df['date'].dt.tz_convert('Asia/Istanbul') df['date'] = df['date'].dt.date data_frame_obj = df.groupby(df.date).sum() dict_of_dates = data_frame_obj.to_dict() shares_table_dict_d = dict_of_dates.get("shares") shares_table_dict = {} for key in shares_table_dict_d.keys(): if type(key) is not str: try: shares_table_dict[str(key.strftime( "%Y-%m-%d"))] = shares_table_dict_d[key] except Exception as e: print(e) print("nanopool_info_update Exception") pass shares_table_json_srt = json.dumps(shares_table_dict) redis_store.set( "nanopool_wallet_info:{}:shares_table".format( str(nanopool_wallet)), shares_table_json_srt)
def main_dashboard_update(): redis_store.set('main_dashboard:num_of_gpus', get_num_of_gpus()) redis_store.set('main_dashboard:num_of_alive_gpus', get_num_of_alive_gpus()) redis_store.set('main_dashboard:total_hashrate', get_total_hashrate()) redis_store.set('main_dashboard:num_of_rigs', get_num_of_all_rigs()) redis_store.set('main_dashboard:num_of_alive_rigs', get_num_of_alive_rigs()) redis_store.set('main_dashboard:average_gpu_temperature', get_average_gpu_temperature()) redis_store.set('main_dashboard:num_of_rigs_under_attack', get_num_of_rigs_under_attack()) redis_store.set('main_dashboard:num_of_crashed_gpus', get_num_of_crashed_gpus()) num_of_alive_panels, list_of_alive_panels = get_num_and_list_of_alive_panels( ) redis_store.delete( 'main_dashboard:list_of_alive_panels') # IMPORTANT!!!!!!! if list_of_alive_panels: redis_store.lpush('main_dashboard:list_of_alive_panels', *list_of_alive_panels) redis_store.set('main_dashboard:num_of_alive_panels', num_of_alive_panels)