Ejemplo n.º 1
0
def fetch_transactions(args: dict, web3: Web3):
    tx_hashes = []
    for filename in args["files"]:
        with smart_open(filename) as fin:
            for tx in csv.DictReader(fin):
                tx_hashes.append(tx["hash"])

    tx_tracer = TransactionTracer(web3)
    done = set()
    with smart_open(args["output"], "w") as fout:
        for i, tx_hash in enumerate(tx_hashes):
            if i % 10 == 0:
                logger.info("progress: %s/%s", i, len(tx_hashes))
            if tx_hash in done:
                continue
            try:
                tx = dict(web3.eth.getTransaction(tx_hash))
                if args["include_receipt"]:
                    tx["receipt"] = web3.eth.getTransactionReceipt(tx_hash)
                if args["include_traces"]:
                    tx["traces"] = tx_tracer.trace_transaction(tx_hash)
                json.dump(tx, fout, cls=EthJSONEncoder)
                print(file=fout)
                done.add(tx_hash)
            except Exception as ex:  # pylint: disable=broad-except
                logger.warning("failed to trace %s: %s", tx_hash, ex)
                continue
Ejemplo n.º 2
0
def bulk_fetch_events(args: dict, web3: Web3):
    fetcher = EventFetcher(web3)
    with smart_open(args["config"]) as f:
        raw_tasks = json.load(f)
    tasks = [
        FetchTask.from_dict(raw_task, args["abis"]) for raw_task in raw_tasks
    ]
    fetcher.fetch_all_events(tasks, args["output"])
Ejemplo n.º 3
0
 def from_dict(cls, raw_task: dict, abi_paths: str = None):
     raw_task = raw_task.copy()
     default_abi_name = raw_task.get("name", raw_task["address"]).lower()
     abi_path = raw_task.pop("abi", default_abi_name + ".json")
     if abi_paths:
         abi_path = path.join(abi_paths, abi_path)
     with smart_open(abi_path) as f:
         raw_task["abi"] = json.load(f)
     keys = ["address", "abi", "start_block", "end_block", "name"]
     return cls(**{k: raw_task.get(k) for k in keys})
Ejemplo n.º 4
0
def fetch_address_transactions(args: dict, etherscan_key: Web3):
    fetcher = TransactionsFetcher(etherscan_api_key=etherscan_key)
    internal = args["internal"]
    if internal:
        fields = constants.ETHERSCAN_INTERNAL_TRANSACTION_KEYS
    else:
        fields = constants.ETHERSCAN_TRANSACTION_KEYS
    with smart_open(args["output"], "w") as f:
        writer = csv.DictWriter(f, fieldnames=fields)
        writer.writeheader()
        for transaction in fetcher.fetch_contract_transactions(
                args["address"], internal=internal):
            writer.writerow(transaction)
Ejemplo n.º 5
0
def fetch_blocks(args: dict, web3: Web3):
    """Fetches blocks and stores them in the file given in arguments"""
    block_iterator = BlockIterator(web3,
                                   args["start_block"],
                                   args["end_block"],
                                   log_interval=args["log_interval"])
    fields = args["fields"]
    with smart_open(args["output"], "w") as f:
        writer = csv.DictWriter(f, fieldnames=fields)
        writer.writeheader()
        for block in block_iterator:
            row = {field: getattr(block, field) for field in fields}
            writer.writerow(row)
Ejemplo n.º 6
0
 def fetch_and_persist_events(self, task: FetchTask, output_file: str):
     with smart_open(output_file, "w") as f:
         for event in self.fetch_events(task):
             print(json.dumps(event, cls=EthJSONEncoder), file=f)
Ejemplo n.º 7
0
def smart_open_with_stdout(filename, mode="r", **kwargs) -> Iterator[IO]:
    if filename is None:
        yield sys.stdout
    else:
        with smart_open(filename, mode, **kwargs) as f:
            yield f