def network_versions_to_redis(): now = round(time.time()) domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query_range?query=count_values("version"%2C%20yagna_version_major%7Bjob%3D"community.1"%7D*100%2Byagna_version_minor%7Bjob%3D"community.1"%7D*10%2Byagna_version_patch%7Bjob%3D"community.1"%7D)&start={now}&end={now}&step=5' content = get_stats_data(domain) if content[1] == 200: versions_nonsorted = [] versions = [] data = content[0]['data']['result'] # Append to array so we can sort for obj in data: versions_nonsorted.append( {"version": int(obj['metric']['version']), "count": obj['values'][0][1]}) versions_nonsorted.sort(key=lambda x: x['version'], reverse=False) for obj in versions_nonsorted: version = str(obj['version']) count = obj['count'] if len(version) == 2: concatinated = "0." + version[0] + "." + version[1] elif len(version) == 3: concatinated = "0." + version[0] + \ version[1] + "." + version[2] versions.append({ "version": concatinated, "count": count, }) serialized = json.dumps(versions) r.set("network_versions", serialized)
def requestor_scraper(): checker, checkcreated = requestor_scraper_check.objects.get_or_create(id=1) if checkcreated: # No requestors indexed before, we loop back over the last 90 days to init the table with data. checker.indexed_before = True checker.save() now = round(time.time()) ninetydaysago = round(time.time()) - int(7776000) hour = 3600 while ninetydaysago < now: domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=increase(market_agreements_requestor_approved%7Bjob%3D"community.1"%7D%5B{hour}s%5D)&time={ninetydaysago+hour}' data = get_stats_data(domain) ninetydaysago += hour if data[1] == 200: if data[0]['data']['result']: for node in data[0]['data']['result']: stats_tasks_requested = float(node['value'][1]) if stats_tasks_requested > 1: obj, created = Requestors.objects.get_or_create( node_id=node['metric']['instance']) if created: obj.tasks_requested = stats_tasks_requested obj.save() else: obj.tasks_requested = obj.tasks_requested + stats_tasks_requested obj.save() else: # Already indexed, we check the last 10 seconds. now = round(time.time()) domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=increase(market_agreements_requestor_approved%7Bjob%3D"community.1"%7D%5B10s%5D)&time={now}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: for node in data[0]['data']['result']: stats_tasks_requested = float(node['value'][1]) if stats_tasks_requested > 1: obj, created = Requestors.objects.get_or_create( node_id=node['metric']['instance']) if created: obj.tasks_requested = stats_tasks_requested obj.save() else: obj.tasks_requested = obj.tasks_requested + stats_tasks_requested obj.save()
def market_agreement_termination_reasons(): end = round(time.time()) start = round(time.time()) - int(10) content = {} domain_success = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(market_agreements_provider_terminated_reason%7Bjob%3D"community.1"%2C%20reason%3D"Success"%7D%5B1h%5D))&time={end}' data_success = get_stats_data(domain_success) if data_success[1] == 200: if data_success[0]['data']['result']: content['market_agreements_success'] = round(float( data_success[0]['data']['result'][0]['value'][1])) # Failure domain_cancelled = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(market_agreements_provider_terminated_reason%7Bjob%3D"community.1"%2C%20reason%3D"Cancelled"%7D%5B6h%5D))&time={end}' data_cancelled = get_stats_data(domain_cancelled) if data_cancelled[1] == 200: if data_cancelled[0]['data']['result']: content['market_agreements_cancelled'] = round(float( data_cancelled[0]['data']['result'][0]['value'][1])) # Expired domain_expired = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(market_agreements_provider_terminated_reason%7Bjob%3D"community.1"%2C%20reason%3D"Expired"%7D%5B6h%5D))&time={end}' data_expired = get_stats_data(domain_expired) if data_expired[1] == 200: if data_expired[0]['data']['result']: content['market_agreements_expired'] = round(float( data_expired[0]['data']['result'][0]['value'][1])) # RequestorUnreachable domain_unreachable = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(market_agreements_provider_terminated_reason%7Bjob%3D"community.1"%2C%20reason%3D"RequestorUnreachable"%7D%5B6h%5D))&time={end}' data_unreachable = get_stats_data(domain_unreachable) if data_unreachable[1] == 200: if data_unreachable[0]['data']['result']: content['market_agreements_requestorUnreachable'] = round(float( data_unreachable[0]['data']['result'][0]['value'][1])) # DebitNotesDeadline domain_debitdeadline = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(market_agreements_provider_terminated_reason%7Bjob%3D"community.1"%2C%20reason%3D"DebitNotesDeadline"%7D%5B6h%5D))&time={end}' data_debitdeadline = get_stats_data(domain_debitdeadline) if data_debitdeadline[1] == 200: if data_debitdeadline[0]['data']['result']: content['market_agreements_debitnoteDeadline'] = round(float( data_debitdeadline[0]['data']['result'][0]['value'][1])) serialized = json.dumps(content) r.set("market_agreement_termination_reasons", serialized)
def node_earnings_total(node_version): if node_version == "v1": providers = Node.objects.filter(online=True) elif node_version == "v2": providers = Nodev2.objects.filter(online=True) for user in providers: now = round(time.time()) domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bhostname%3D~"{user.node_id}"%2C%20platform%3D"zksync-mainnet-glm"%7D%5B10m%5D)%2F10%5E9)&time={now}' data = get_stats_data(domain) domain2 = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bhostname%3D~"{user.node_id}"%2C%20platform%3D"erc20-mainnet-glm"%7D%5B10m%5D)%2F10%5E9)&time={now}' data2 = get_stats_data(domain2) domain3 = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bhostname%3D~"{user.node_id}"%2C%20platform%3D"erc20-polygon-glm"%7D%5B10m%5D)%2F10%5E9)&time={now}' data3 = get_stats_data(domain3) domain4 = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bhostname%3D~"{user.node_id}"%2C%20platform%3D"polygon-polygon-glm"%7D%5B10m%5D)%2F10%5E9)&time={now}' data4 = get_stats_data(domain4) if data[0]['data']['result']: zksync_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) else: zksync_mainnet_glm = 0.0 if data2[0]['data']['result']: erc20_mainnet_glm = round( float(data2[0]['data']['result'][0]['value'][1]), 2) else: erc20_mainnet_glm = 0.0 if data3[0]['data']['result']: erc20_polygon_glm = round( float(data3[0]['data']['result'][0]['value'][1]), 2) else: erc20_polygon_glm = 0.0 if data4[0]['data']['result']: polygon_polygon_glm = round( float(data4[0]['data']['result'][0]['value'][1]), 2) else: polygon_polygon_glm = 0.0 if user.earnings_total: user.earnings_total += zksync_mainnet_glm + \ erc20_mainnet_glm + erc20_polygon_glm + polygon_polygon_glm else: user.earnings_total = zksync_mainnet_glm + \ erc20_mainnet_glm + erc20_polygon_glm + polygon_polygon_glm user.save(update_fields=['earnings_total'])
def network_earnings_6h_to_redis(): end = round(time.time()) # ZKSYNC MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"zksync-mainnet-glm"%7D%5B6h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: zksync_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) else: zksync_mainnet_glm = 0.0 # ERC20 MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"erc20-mainnet-glm"%7D%5B6h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: erc20_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) else: erc20_mainnet_glm = 0.0 # POLYGON-POLYGON-GLM -- thorg i think domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"polygon-polygon-glm"%7D%5B6h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: polygon_polygon_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) else: polygon_polygon_glm = 0.0 # ERC20 POLYGON MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"erc20-polygon-glm"%7D%5B6h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: erc20_polygon_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) else: erc20_polygon_glm = 0.0 content = {'total_earnings': round(float(zksync_mainnet_glm + erc20_mainnet_glm + erc20_polygon_glm + polygon_polygon_glm), 2)} serialized = json.dumps(content) r.set("network_earnings_6h", serialized)
def providers_average_earnings_to_redis(): end = round(time.time()) domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=avg(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"zksync-mainnet-glm"%7D%5B24h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: zksync_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 4) else: zksync_mainnet_glm = 0.0 # ERC20 MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=avg(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"erc20-mainnet-glm"%7D%5B24h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: erc20_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 4) else: erc20_mainnet_glm = 0.0 # ERC20 POLYGON MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=avg(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"erc20-polygon-glm"%7D%5B24h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: erc20_polygon_glm = round( float(data[0]['data']['result'][0]['value'][1]), 4) else: erc20_polygon_glm = 0.0 # POLYGON POLYGON MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=avg(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"polygon-polygon-glm"%7D%5B24h%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: polygon_polygon_glm = round( float(data[0]['data']['result'][0]['value'][1]), 4) else: polygon_polygon_glm = 0.0 content = {'average_earnings': zksync_mainnet_glm + erc20_mainnet_glm + erc20_polygon_glm + polygon_polygon_glm} serialized = json.dumps(content) r.set("provider_average_earnings", serialized)
def network_utilization_to_redis(): end = round(time.time()) start = end - 21600 domain = os.environ.get( 'STATS_URL') + f"api/datasources/proxy/40/api/v1/query_range?query=sum(activity_provider_created%7Bjob%3D~%22community.1%22%7D%20-%20activity_provider_destroyed%7Bjob%3D~%22community.1%22%7D)&start={start}&end={end}&step=30" content = get_stats_data(domain) if content[1] == 200: serialized = json.dumps(content[0]) r.set("network_utilization", serialized)
def provider_accepted_invoices_1h(): end = round(time.time()) domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_invoices_provider_accepted%7Bjob%3D~"community.1"%7D%5B1h%5D))%2Fsum(increase(payment_invoices_provider_sent%7Bjob%3D~"community.1"%7D%5B1h%5D))&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: content = {'percentage_invoice_accepted': float(data[0]['data'] ['result'][0]['value'][1]) * 100} serialized = json.dumps(content) r.set("provider_accepted_invoice_percentage", serialized)
def computing_now_to_redis(): end = round(time.time()) start = round(time.time()) - int(10) domain = os.environ.get( 'STATS_URL') + f"api/datasources/proxy/40/api/v1/query_range?query=sum(activity_provider_created%7Bjob%3D~%22community.1%22%7D%20-%20activity_provider_destroyed%7Bjob%3D~%22community.1%22%7D)&start={start}&end={end}&step=1" data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: content = {'computing_now': data[0] ['data']['result'][0]['values'][-1][1]} ProvidersComputing.objects.create( total=data[0]['data']['result'][0]['values'][-1][1]) serialized = json.dumps(content) r.set("computing_now", serialized)
def online_nodes_computing(): end = round(time.time()) providers = Node.objects.filter(online=True) for node in providers: domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=round(increase(activity_provider_created%7Bhostname%3D~%22{node.node_id}%22%2C%20job%3D~%22community.1%22%7D%5B1795s%5D%20offset%2010s)%20-%20increase(activity_provider_destroyed%7Bhostname%3D~%22{node.node_id}%22%2C%20job%3D~%22community.1%22%7D%5B1795s%5D%20offset%205s))&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: try: if int(data[0]['data']['result'][0]['value'][1]) >= 1: node.computing_now = True node.save() else: node.computing_now = False node.save() except: continue
def network_node_versions(): now = round(time.time()) domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=yagna_version_major%7Bjob%3D"community.1"%7D*100%2Byagna_version_minor%7Bjob%3D"community.1"%7D*10%2Byagna_version_patch%7Bjob%3D"community.1"%7D&time={now}' data = get_stats_data(domain) nodes = data[0]['data']['result'] for obj in nodes: try: node = obj['metric']['instance'] if len(obj['value'][1]) == 2: version = "0" + obj['value'][1] concatinated = version[0] + "." + version[1] + "." + version[2] Node.objects.filter(node_id=node).update(version=concatinated) Nodev2.objects.filter(node_id=node).update( version=concatinated) elif len(obj['value'][1]) == 3: version = obj['value'][1] concatinated = "0." + version[0] + \ version[1] + "." + version[2] Node.objects.filter(node_id=node).update(version=concatinated) Nodev2.objects.filter(node_id=node).update( version=concatinated) except: continue
def network_total_earnings(): end = round(time.time()) # ZKSYNC MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"zksync-mainnet-glm"%7D%5B1m%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: zksync_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) if zksync_mainnet_glm > 0: db, created = Network.objects.get_or_create(id=1) db.total_earnings = db.total_earnings + zksync_mainnet_glm db.save() content = {'total_earnings': db.total_earnings} serialized = json.dumps(content) r.set("network_earnings_90d", serialized) # ERC20 MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"erc20-mainnet-glm"%7D%5B1m%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: erc20_mainnet_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) if erc20_mainnet_glm > 0: db, created = Network.objects.get_or_create(id=1) db.total_earnings = db.total_earnings + erc20_mainnet_glm db.save() content = {'total_earnings': db.total_earnings} serialized = json.dumps(content) r.set("network_earnings_90d", serialized) # POLYGON POLYGON GLM THORG domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"polygon-polygon-glm"%7D%5B1m%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: polygon_polygon_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) if polygon_polygon_glm > 0: db, created = Network.objects.get_or_create(id=1) db.total_earnings = db.total_earnings + polygon_polygon_glm db.save() content = {'total_earnings': db.total_earnings} serialized = json.dumps(content) r.set("network_earnings_90d", serialized) # ERC20 POLYGON MAINNET GLM domain = os.environ.get( 'STATS_URL') + f'api/datasources/proxy/40/api/v1/query?query=sum(increase(payment_amount_received%7Bjob%3D~"community.1"%2C%20platform%3D"erc20-polygon-glm"%7D%5B1m%5D)%2F10%5E9)&time={end}' data = get_stats_data(domain) if data[1] == 200: if data[0]['data']['result']: erc20_polygon_glm = round( float(data[0]['data']['result'][0]['value'][1]), 2) if erc20_polygon_glm > 0: db, created = Network.objects.get_or_create(id=1) db.total_earnings = db.total_earnings + erc20_polygon_glm db.save() content = {'total_earnings': db.total_earnings} serialized = json.dumps(content) r.set("network_earnings_90d", serialized)