コード例 #1
0
ファイル: main.py プロジェクト: zahedaziz/nembex-v3
def runAsync():
    index = 0
    while index < len(config.nodes):
        crawlerSeed = config.nodes[index]
        sourceEndpoint = NodeEndpoint.from_parameters('http', crawlerSeed,
                                                      7890)
        crawler = NetworkCrawler(config.network == 'testnet')

        try:
            d = yield from crawler.crawl(sourceEndpoint)
        except Exception as e:
            print("MAIN Exception: {0}".format(e))

        crawler.reset()
        result = getResults(crawler.counter.values())
        end = datetime.utcnow()
        index += 1

        if not result:
            continue

        with open('nodes_dump-' + config.network + '.json', 'w') as output:
            output.write(
                json.dumps({
                    'nodes_last_time': end,
                    'active_nodes': result
                }))
        break
コード例 #2
0
ファイル: main.py プロジェクト: NemProject/nembex-v3
def runAsync():
	sourceEndpoint = NodeEndpoint.from_parameters('http', config.crawlerSeed, 7890)
	crawler = NetworkCrawler(config.network == 'testnet')

	if True:
		d = yield from crawler.crawl(sourceEndpoint)
		crawler.reset()
		result = getResults(crawler.counter.values())
		end = datetime.utcnow()
		with open('nodes_dump-'+config.network+'.json', 'w') as output:
			output.write(json.dumps({'nodes_last_time':end, 'active_nodes':result}))
コード例 #3
0
	def crawl(self, endpoint):
		endpointUrl = endpoint.url()

		if endpointUrl in self.allNodes and endpointUrl in self.allInfo:
			return

		if not endpointUrl in self.allInfo:
			#print('getting info {0}'.format(endpointUrl))
			self.allInfo[endpointUrl] = True
			try:
				response = yield from asyncio.wait_for(aiohttp.request('GET', '{0}/node/extended-info'.format(endpointUrl)), 2)
				info = yield from response.json()
				version = info['node']['metaData']['version']
				network = info['node']['metaData']['networkId']
				if (self.isTest and network == -104) or (not self.isTest and network != -104):
					self.allInfo[endpointUrl] = info
					
					response = yield from asyncio.wait_for(aiohttp.request('GET', '{0}/chain/height'.format(endpointUrl)), 2)
					info = yield from response.json()
					self.allInfo[endpointUrl]['node']['metaData']['height'] = info['height']
					yield from asyncio.wait([self.crawl(endpoint)], return_when=ALL_COMPLETED)

			except aiohttp.errors.ClientOSError as err:
				print('error detected:', str(err))

			except aiohttp.errors.ServerDisconnectedError as err:
				print('error detected:', str(err))

			except Exception as e:
				#print("Exceptionxxxx:", str(e))
				pass

		if not endpointUrl in self.allNodes:
			#print('processing {0}'.format(endpointUrl))
			self.allNodes[endpointUrl] = True
			try:
				response = yield from asyncio.wait_for(aiohttp.request('GET', '{0}/node/peer-list/active'.format(endpoint.url())), 2)
				peers = yield from response.json()

				futures = []
				for peer in peers['data']:
					peer_endpoint = NodeEndpoint.from_json(peer['endpoint'])
					futures.append(self.crawl(peer_endpoint))

				yield from asyncio.wait(futures, return_when=ALL_COMPLETED)
			except aiohttp.errors.ClientOSError as err:
				print('error detected:', str(err))

			except aiohttp.errors.ServerDisconnectedError as err:
				print('error detected:', str(err))

			except Exception as e:
				#print("Exceptionxxxx:", str(e))
				pass
コード例 #4
0
    def getNodes(self, endpoint, attempt):
        endpointUrl = endpoint.url()
        if endpointUrl in self.allNodes:
            #if attempt == 1:
            return

        self.allNodes[endpointUrl] = True
        try:
            response = yield from asyncio.wait_for(
                aiohttp.request(
                    'GET', '{0}/node/peer-list/all'.format(endpoint.url())),
                10 * attempt)
            peers = yield from response.json()

            futures = []
            for peer in peers['active']:
                peer_endpoint = NodeEndpoint.from_json(peer['endpoint'])
                if peer_endpoint.url() not in self.allFutures:
                    #print(peer_endpoint.url())
                    self.allFutures[peer_endpoint.url()] = True
                    futures.append(self.crawl(peer_endpoint))

            #print('yield, added', len(futures), len(self.allFutures))
            if futures:
                yield from asyncio.wait(futures, return_when=ALL_COMPLETED)

        except aiohttp.errors.ClientOSError as err:
            logging.warning('peerList, error detected: {0} {1}'.format(
                endpointUrl, str(err)))
        except aiohttp.errors.ServerDisconnectedError as err:
            logging.warning('peerList, error detected: {0} {1}'.format(
                endpointUrl, str(err)))
        except asyncio.TimeoutError as err:
            # retry two times and log if fail
            if attempt <= 0:
                yield from asyncio.wait([self.getNodes(endpoint, attempt + 1)],
                                        return_when=ALL_COMPLETED)
            else:
                logging.warning(
                    'peerList, timeout attempt {2}: {0} {1}'.format(
                        endpointUrl, str(err), attempt))

        except Exception as err:
            logging.warning('peerList, exception: {0} {1}'.format(
                endpointUrl, str(err)))