def extract_tokens(contracts, provider_uri, output, max_workers, values_as_strings=False): """Extracts tokens from contracts file.""" set_max_field_size_limit() with smart_open(contracts, 'r') as contracts_file: if contracts.endswith('.json'): contracts_iterable = (json.loads(line) for line in contracts_file) else: contracts_iterable = csv.DictReader(contracts_file) converters = [ IntToStringItemConverter(keys=['decimals', 'total_supply']) ] if values_as_strings else [] job = ExtractTokensJob( contracts_iterable=contracts_iterable, web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(provider_uri))), max_workers=max_workers, item_exporter=tokens_item_exporter(output, converters)) job.run()
def get_item_sink(output_file): fh = get_file_handle(output_file, 'w') if output_file.endswith('.csv'): set_max_field_size_limit() writer = None def sink(item): nonlocal writer if writer is None: fields = list(six.iterkeys(item)) writer = csv.DictWriter(fh, fieldnames=fields, extrasaction='ignore') writer.writeheader() writer.writerow(item) else: def sink(item): fh.write(json.dumps(item) + '\n') try: yield sink finally: fh.close()
def get_item_iterable(input_file): fh = get_file_handle(input_file, 'r') if input_file.endswith('.csv'): set_max_field_size_limit() reader = csv.DictReader(fh) else: reader = (json.loads(line) for line in fh) try: yield reader finally: fh.close()
def extract_contracts(traces, batch_size, output, max_workers): """Extracts contracts from traces file.""" set_max_field_size_limit() with smart_open(traces, 'r') as traces_file: if traces.endswith('.json'): traces_iterable = (json.loads(line) for line in traces_file) else: traces_iterable = csv.DictReader(traces_file) job = ExtractContractsJob( traces_iterable=traces_iterable, batch_size=batch_size, max_workers=max_workers, item_exporter=contracts_item_exporter(output)) job.run()
def extract_tokens(contracts, provider_uri, output, max_workers): """Extracts tokens from contracts file.""" set_max_field_size_limit() with smart_open(contracts, 'r') as contracts_file: if contracts.endswith('.json'): contracts_iterable = (json.loads(line) for line in contracts_file) else: contracts_iterable = csv.DictReader(contracts_file) job = ExtractTokensJob( contracts_iterable=contracts_iterable, web3=ThreadLocalProxy(lambda: Web3(get_provider_from_uri(provider_uri))), max_workers=max_workers, item_exporter=tokens_item_exporter(output)) job.run()