def scan(self, nodes: RpcNodeList, remove_invalid=False): """ Start threads checking known nodes to see if they're alive. :param nodes: :param remove_invalid: only return valid nodes when set to True :return: valid nodes """ if len(nodes) == 0: return nodes if len(self.ban_list) > 0: filtered_nodes = RpcNodeList() for node in nodes: if node.address in self.ban_list: log_msg('Ban %s'%node.address) else: filtered_nodes.append(node) nodes = filtered_nodes now = datetime.now() log_msg('Scanning %d node(s) on port %d. This can take several minutes. Let it run.' % ( len(nodes), self._m_rpc_port)) pool = Pool(processes=CONFIG['concurrent_scans']) nodes = RpcNodeList.from_list(pool.map(partial(RpcNode.is_valid, self._blockchain_height), nodes)) pool.close() pool.join() log_msg('Scanning %d node(s) done after %d seconds, found %d valid' % ( len(nodes), (datetime.now() - now).total_seconds(), len(nodes.valid(valid=True)))) if remove_invalid: nodes = nodes.valid(valid=True) return nodes
def get_records(self): max_retries = 5 nodes = RpcNodeList() log_msg('Fetching existing record(s) (%s.%s)' % (self.subdomain_name, self.domain_name)) retries = 0 while (True): try: result = make_json_request( '%s/%s/dns_records/?type=A&name=%s.%s' % (self.api_base, self.zone_id, self.subdomain_name, self.domain_name), headers=self.headers) records = result.get('result') # filter on A records / subdomain for record in records: if record.get('type') != 'A' or record.get( 'name') != self.fulldomain_name: continue node = RpcNode(address=record.get('content'), uid=record.get('id')) nodes.append(node) log_msg('> A %s %s' % (record.get('name'), record.get('content'))) return nodes except Exception as ex: log_err("Cloudflare record fetching failed: %s" % (str(ex))) retries += 1 time.sleep(1) if retries > max_retries: return None
def get_records(self): nodes = RpcNodeList() log_msg('Fetching existing record(s) (%s.%s)' % (self.subdomain_name, self.domain_name)) result = make_json_request('%s/%s/dns_records/?type=A&name=%s.%s' % ( self.api_base, self.zone_id, self.subdomain_name, self.domain_name), headers=self.headers) records = result.get('result') # filter on A records / subdomain for record in records: if record.get('type') != 'A' or record.get('name') != self.fulldomain_name: continue node = RpcNode(address=record.get('content'), uid=record.get('id')) nodes.append(node) log_msg('> A %s %s' % (record.get('name'), record.get('content'))) return nodes
def monerod_get_peers(self): """Gets the last known peers from monerod""" nodes = RpcNodeList() output = self._daemon_command("print_pl") if not output: return nodes regex = r"(gray|white)\s+(\w+)\s+(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}):(\d{1,5})" matches = re.finditer(regex, output) for i, match in enumerate(matches): if match.group(1) != 'white': continue address = match.group(3) nodes.append(RpcNode(address=address)) log_msg('Got peers from RPC: %d node(s)' % len(nodes)) return nodes
def get_records(self, all_records=False): nodes = RpcNodeList() cookie = self.build_cookie(mode=MODE_RO, method='getInfo', parameters=[self.domain_name]) self.update_cookie(cookie) result = self.soap_client.service.getInfo(self.domain_name) for dnsentry in result.dnsEntries: if dnsentry.__class__.__name__ != 'DnsEntry': continue if dnsentry.type != 'A' and not all_records: continue if dnsentry.name != self.subdomain_name and not all_records: continue nodes.append( RpcNode(address=dnsentry.content, type=dnsentry.type, name=dnsentry.name, expire=dnsentry.expire)) return nodes