Esempio n. 1
0
def test_export_blocks_job(tmpdir, start_block, end_block, batch_size,
                           resource_group, web3_provider_type):
    blocks_output_file = str(tmpdir.join('actual_blocks.csv'))
    transactions_output_file = str(tmpdir.join('actual_transactions.csv'))

    job = ExportBlocksJob(
        start_block=start_block,
        end_block=end_block,
        batch_size=batch_size,
        batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider(
            web3_provider_type,
            lambda file: read_resource(resource_group, file),
            batch=True)),
        max_workers=5,
        item_exporter=blocks_and_transactions_item_exporter(
            blocks_output_file, transactions_output_file),
        export_blocks=blocks_output_file is not None,
        export_transactions=transactions_output_file is not None)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_blocks.csv'),
        read_file(blocks_output_file))

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_transactions.csv'),
        read_file(transactions_output_file))
Esempio n. 2
0
def test_export_blocks_job(tmpdir, batch_size, resource_group):
    receipts_output_file = tmpdir.join('actual_receipts.csv')
    logs_output_file = tmpdir.join('actual_logs.csv')

    job = ExportReceiptsJob(tx_hashes_iterable=[
        '0x04cbcb236043d8fb7839e07bbc7f5eed692fb2ca55d897f1101eac3e3ad4fab8',
        '0x463d53f0ad57677a3b430a007c1c31d15d62c37fab5eee598551697c297c235c',
        '0x05287a561f218418892ab053adfb3d919860988b19458c570c5c30f51c146f02',
        '0xcea6f89720cc1d2f46cc7a935463ae0b99dd5fad9c91bb7357de5421511cee49'
    ],
                            batch_size=batch_size,
                            ipc_wrapper=ThreadLocalProxy(
                                lambda: MockIPCWrapper(
                                    lambda file: read_resource(
                                        resource_group, file))),
                            max_workers=5,
                            item_exporter=export_receipts_job_item_exporter(
                                receipts_output_file, logs_output_file),
                            export_receipts=receipts_output_file is not None,
                            export_logs=logs_output_file is not None)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_receipts.csv'),
        read_file(receipts_output_file))

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_logs.csv'),
        read_file(logs_output_file))
Esempio n. 3
0
def test_stream(tmpdir, start_block, end_block, batch_size, resource_group, provider_type, chain):
    try:
        os.remove('last_synced_block.txt')
    except OSError:
        pass

    blocks_output_file = str(tmpdir.join('actual_block.json'))
    transactions_output_file = str(tmpdir.join("actual_transactions.json"))

    stream(
        bitcoin_rpc=ThreadLocalProxy(
            lambda: get_bitcoin_rpc(
                provider_type,
                read_resource_lambda=lambda file: read_resource(resource_group, file),
                chain=chain)),
        start_block=start_block,
        end_block=end_block,
        batch_size=batch_size,
        item_exporter=blocks_and_transactions_item_exporter(blocks_output_file, transactions_output_file)
    )

    print('=====================')
    print(read_file(blocks_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_blocks.json'), read_file(blocks_output_file)
    )

    print('=====================')
    print(read_file(transactions_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_transactions.json'), read_file(transactions_output_file)
    )
def test_export_blocks_job(tmpdir, start_block, end_block, batch_size,
                           resource_group):
    blocks_output_file = tmpdir.join('actual_blocks.csv')
    transactions_output_file = tmpdir.join('actual_transactions.csv')

    job = ExportBlocksJob(
        start_block=start_block,
        end_block=end_block,
        batch_size=batch_size,
        ipc_wrapper=ThreadLocalProxy(lambda: MockIPCWrapper(
            lambda file: read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=export_blocks_job_item_exporter(
            blocks_output_file, transactions_output_file),
        export_blocks=blocks_output_file is not None,
        export_transactions=transactions_output_file is not None)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_blocks.csv'),
        read_file(blocks_output_file))

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_transactions.csv'),
        read_file(transactions_output_file))
Esempio n. 5
0
def test_export_origin(tmpdir, start_block, end_block, batch_size, output_format, resource_group, web3_provider_type, ipfs_client_type):
    marketplace_output_file = str(tmpdir.join('actual_marketplace.' + output_format))
    shop_output_file = str(tmpdir.join('actual_shop.' + output_format))

    ipfs_client = MockIpfsClient(lambda file: read_resource(resource_group, file)) if ipfs_client_type == 'mock' else get_origin_ipfs_client()

    job = ExportOriginJob(
        start_block=start_block,
        end_block=end_block,
        batch_size=batch_size,
        web3=ThreadLocalProxy(
            lambda: Web3(get_web3_provider(web3_provider_type, lambda file: read_resource(resource_group, file)))
        ),
        ipfs_client=ipfs_client,
        marketplace_listing_exporter=origin_marketplace_listing_item_exporter(marketplace_output_file),
        shop_product_exporter=origin_shop_product_item_exporter(shop_output_file),
        max_workers=5)

    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_marketplace.' + output_format), read_file(marketplace_output_file)
    )

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_shop.' + output_format), read_file(shop_output_file)
    )
def test_export_blocks_job(tmpdir, start_block, end_block, batch_size,
                           resource_group, provider_type, chain):
    blocks_output_file = str(tmpdir.join('actual_block.json'))
    transactions_output_file = str(tmpdir.join("actual_transactions.json"))

    job = ExportBlocksJob(
        start_block=start_block,
        end_block=end_block,
        batch_size=batch_size,
        bitcoin_rpc=ThreadLocalProxy(
            lambda: get_bitcoin_rpc(provider_type,
                                    read_resource_lambda=lambda file:
                                    read_resource(resource_group, file),
                                    chain=chain)),
        max_workers=5,
        item_exporter=blocks_and_transactions_item_exporter(
            blocks_output_file, transactions_output_file),
        chain=chain,
        export_blocks=blocks_output_file is not None,
        export_transactions=transactions_output_file is not None)
    job.run()

    print('=====================')
    print(read_file(blocks_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_blocks.json'),
        read_file(blocks_output_file))

    print('=====================')
    print(read_file(transactions_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_transactions.json'),
        read_file(transactions_output_file))
def test_export_receipts_job(tmpdir, batch_size, transaction_hashes,
                             output_format, resource_group,
                             web3_provider_type):
    receipts_output_file = tmpdir.join('actual_receipts.' + output_format)
    logs_output_file = tmpdir.join('actual_logs.' + output_format)

    job = ExportReceiptsJob(
        transaction_hashes_iterable=transaction_hashes,
        batch_size=batch_size,
        batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider(
            web3_provider_type,
            lambda file: read_resource(resource_group, file),
            batch=True)),
        max_workers=5,
        item_exporter=receipts_and_logs_item_exporter(receipts_output_file,
                                                      logs_output_file),
        export_receipts=receipts_output_file is not None,
        export_logs=logs_output_file is not None)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_receipts.' + output_format),
        read_file(receipts_output_file))

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_logs.' + output_format),
        read_file(logs_output_file))
Esempio n. 8
0
def test_export_erc20_tokens_job(tmpdir, token_addresses, resource_group):
    output_file = tmpdir.join('erc20_tokens.csv')

    job = ExportErc20TokensJob(
        token_addresses_iterable=token_addresses,
        web3=ThreadLocalProxy(lambda: Web3(MockWeb3Provider(resource_group))),
        item_exporter=erc20_tokens_item_exporter(output_file),
        max_workers=5)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_erc20_tokens.csv'),
        read_file(output_file))
def test_export_erc20_transfers_job(tmpdir, start_block, end_block, batch_size, resource_group):
    output_file = tmpdir.join('erc20_transfers.csv')

    job = ExportErc20TransfersJob(
        start_block=start_block, end_block=end_block, batch_size=batch_size,
        web3=ThreadLocalProxy(lambda: Web3(MockWeb3Provider(resource_group))),
        item_exporter=erc20_transfers_item_exporter(output_file),
        max_workers=5
    )
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_erc20_transfers.csv'), read_file(output_file)
    )
def test_export_token_transfers_job(tmpdir, resource_group):
    output_file = tmpdir.join('token_transfers.csv')

    logs_content = read_resource(resource_group, 'logs.csv')
    logs_csv_reader = csv.DictReader(io.StringIO(logs_content))
    job = ExtractTokenTransfersJob(
        logs_iterable=logs_csv_reader,
        batch_size=2,
        item_exporter=token_transfers_item_exporter(output_file),
        max_workers=5)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_token_transfers.csv'),
        read_file(output_file))
def test_extract_traces_job(tmpdir, resource_group):
    output_file = str(tmpdir.join('actual_traces.csv'))

    geth_traces_content = read_resource(resource_group, 'geth_traces.json')
    traces_iterable = (json.loads(line)
                       for line in geth_traces_content.splitlines())
    job = ExtractGethTracesJob(traces_iterable=traces_iterable,
                               batch_size=2,
                               item_exporter=traces_item_exporter(output_file),
                               max_workers=5)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_traces.csv'),
        read_file(output_file))
def test_export_transaction_logs_job(tmpdir, start_block, end_block, resource_group, provider_type):
    job = ExportTransactionLogsJob(
        start_block=start_block,
        end_block=end_block,
        iotex_rpc=ThreadLocalProxy(
            lambda: get_iotex_rpc(
                provider_type,
                read_resource_lambda=lambda file: read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=IotexItemExporter(str(tmpdir)),
    )
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_transaction_logs.json'), read_file(str(tmpdir.join('transaction_logs.json')))
    )
def test_export_contracts_job(tmpdir, batch_size, contract_addresses,
                              output_format, resource_group):
    contracts_output_file = tmpdir.join('actual_contracts.' + output_format)

    job = ExportContractsJob(
        contract_addresses_iterable=CONTRACT_ADDRESSES_UNDER_TEST,
        batch_size=batch_size,
        batch_web3_provider=ThreadLocalProxy(lambda: MockBatchWeb3Provider(
            lambda file: read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=contracts_item_exporter(contracts_output_file))
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_contracts.' + output_format),
        read_file(contracts_output_file))
def test_filter_items(tmpdir):
    input_file = str(tmpdir.join('input.json'))
    open(input_file, 'w').write('''{"field1": "x1", "field2": "y1"}    
{"field1": "x2", "field2": "y2"}    
''')

    output_file = str(tmpdir.join('output.json'))
    filter_items(input_file, output_file, lambda item: item['field1'] == 'x1')

    expected_file = str(tmpdir.join('expected.json'))
    open(expected_file, 'w').write('''{"field1": "x1", "field2": "y1"}     
''')

    compare_lines_ignore_order(
        read_file(expected_file), read_file(output_file)
    )
Esempio n. 15
0
def test_export_tokens_job(tmpdir, token_addresses, resource_group,
                           web3_provider_type):
    output_file = str(tmpdir.join('tokens.csv'))

    job = ExportTokensJob(
        token_addresses_iterable=token_addresses,
        web3=ThreadLocalProxy(lambda: Web3(
            get_web3_provider(web3_provider_type, lambda file: read_resource(
                resource_group, file)))),
        item_exporter=tokens_item_exporter(output_file),
        max_workers=5)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_tokens.csv'),
        read_file(output_file))
Esempio n. 16
0
def test_export_traces_job(tmpdir, start_block, end_block, resource_group, web3_provider_type):
    traces_output_file = str(tmpdir.join('actual_traces.csv'))

    job = ExportTracesJob(
        start_block=start_block, end_block=end_block, batch_size=1,
        web3=ThreadLocalProxy(
            lambda: Web3(get_web3_provider(web3_provider_type, lambda file: read_resource(resource_group, file)))
        ),
        max_workers=5,
        item_exporter=traces_item_exporter(traces_output_file),
    )
    job.run()

    print('=====================')
    print(read_file(traces_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_traces.csv'), read_file(traces_output_file)
    )
def test_export_token_transfers_job(tmpdir, start_block, end_block, batch_size,
                                    resource_group, web3_provider_type):
    output_file = tmpdir.join('token_transfers.csv')

    job = ExportTokenTransfersJob(
        start_block=start_block,
        end_block=end_block,
        batch_size=batch_size,
        web3=ThreadLocalProxy(lambda: Web3(
            get_web3_provider(web3_provider_type, lambda file: read_resource(
                resource_group, file)))),
        item_exporter=token_transfers_item_exporter(output_file),
        max_workers=5)
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_token_transfers.csv'),
        read_file(output_file))
def test_export_contracts_job(tmpdir, batch_size, contract_addresses,
                              output_format, resource_group,
                              web3_provider_type):
    contracts_output_file = tmpdir.join('actual_contracts.' + output_format)

    job = ExportContractsJob(
        contract_addresses_iterable=contract_addresses,
        batch_size=batch_size,
        batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider(
            web3_provider_type,
            lambda file: read_resource(resource_group, file),
            batch=True)),
        max_workers=5,
        item_exporter=contracts_item_exporter(contracts_output_file))
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_contracts.' + output_format),
        read_file(contracts_output_file))
def test_export_geth_traces_job(tmpdir, start_block, end_block, resource_group,
                                web3_provider_type):
    traces_output_file = tmpdir.join('actual_geth_traces.json')

    job = ExportGethTracesJob(
        start_block=start_block,
        end_block=end_block,
        batch_size=1,
        batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider(
            web3_provider_type,
            lambda file: read_resource(resource_group, file),
            batch=True)),
        max_workers=5,
        item_exporter=geth_traces_item_exporter(traces_output_file),
    )
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'geth_traces.json'),
        read_file(traces_output_file))
Esempio n. 20
0
def test_export_blocks_job(tmpdir, start_block, end_block, resource_group,
                           provider_type):
    job = ExportBlocksJob(
        start_block=start_block,
        end_block=end_block,
        iotex_rpc=ThreadLocalProxy(
            lambda: get_iotex_rpc(provider_type,
                                  read_resource_lambda=lambda file:
                                  read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=IotexItemExporter(str(tmpdir)),
    )
    job.run()

    all_files = ['blocks.json', 'actions.json', 'logs.json']

    for file in all_files:
        print(read_file(str(tmpdir.join(file))))
        compare_lines_ignore_order(
            read_resource(resource_group, f'expected_{file}'),
            read_file(str(tmpdir.join(file))))
def test_export_tx_blocks_job(tmpdir, start_block, end_block, resource_group, provider_type):
    job = ExportTxBlocksJob(
        start_block=start_block,
        end_block=end_block,
        zilliqa_api=ThreadLocalProxy(
            lambda: get_zilliqa_api(
                provider_type,
                read_resource_lambda=lambda file: read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=ZilliqaItemExporter(str(tmpdir)),
    )
    job.run()

    all_files = ['tx_blocks.json', 'ds_blocks.json', 'transactions.json', 'event_logs.json', 'transitions.json',
                 'exceptions.json']

    for file in all_files:
        print(read_file(str(tmpdir.join(file))))
        compare_lines_ignore_order(
            read_resource(resource_group, f'expected_{file}'), read_file(str(tmpdir.join(file)))
        )
Esempio n. 22
0
def test_stream(tmpdir, start_block, end_block, batch_size, resource_group,
                provider_type, chain):
    try:
        os.remove('last_synced_block.txt')
    except OSError:
        pass

    blocks_output_file = str(tmpdir.join('actual_block.json'))
    transactions_output_file = str(tmpdir.join("actual_transactions.json"))

    streamer_adapter = BtcStreamerAdapter(
        bitcoin_rpc=ThreadLocalProxy(
            lambda: get_bitcoin_rpc(provider_type,
                                    read_resource_lambda=lambda file:
                                    read_resource(resource_group, file),
                                    chain=chain)),
        batch_size=batch_size,
        item_exporter=CompositeItemExporter(
            filename_mapping={
                'block': blocks_output_file,
                'transaction': transactions_output_file,
            }),
    )
    streamer = Streamer(blockchain_streamer_adapter=streamer_adapter,
                        start_block=start_block,
                        end_block=end_block,
                        retry_errors=False)
    streamer.stream()

    print('=====================')
    print(read_file(blocks_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_blocks.json'),
        read_file(blocks_output_file))

    print('=====================')
    print(read_file(transactions_output_file))
    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_transactions.json'),
        read_file(transactions_output_file))
Esempio n. 23
0
def test_export_receipts_job(tmpdir, batch_size, tx_hashes, resource_group):
    receipts_output_file = tmpdir.join('actual_receipts.csv')
    logs_output_file = tmpdir.join('actual_logs.csv')

    job = ExportReceiptsJob(
        tx_hashes_iterable=tx_hashes,
        batch_size=batch_size,
        batch_web3_provider=ThreadLocalProxy(lambda: MockBatchWeb3Provider(lambda file: read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=export_receipts_job_item_exporter(receipts_output_file, logs_output_file),
        export_receipts=receipts_output_file is not None,
        export_logs=logs_output_file is not None
    )
    job.run()

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_receipts.csv'), read_file(receipts_output_file)
    )

    compare_lines_ignore_order(
        read_resource(resource_group, 'expected_logs.csv'), read_file(logs_output_file)
    )
Esempio n. 24
0
def test_export_job(tmpdir, start_block, end_block, resource_group,
                    provider_type):
    job = ExportJob(
        start_block=start_block,
        end_block=end_block,
        tezos_rpc=ThreadLocalProxy(
            lambda: get_tezos_rpc(provider_type,
                                  read_resource_lambda=lambda file:
                                  read_resource(resource_group, file))),
        max_workers=5,
        item_exporter=TezosItemExporter(str(tmpdir)),
    )
    job.run()

    all_files = ['blocks.json', 'balance_updates.json'] + \
                [f'{operation_kind}_operations.json' for operation_kind in OperationKind.ALL]

    for file in all_files:
        print('=====================')
        print(read_file(str(tmpdir.join(file))))
        compare_lines_ignore_order(
            read_resource(resource_group, f'expected_{file}'),
            read_file(str(tmpdir.join(file))))
Esempio n. 25
0
def test_stream(tmpdir, start_block, end_block, batch_size, resource_group,
                entity_types, provider_type):
    try:
        os.remove('last_synced_block.txt')
    except OSError:
        pass

    blocks_output_file = str(tmpdir.join('actual_blocks.json'))
    transactions_output_file = str(tmpdir.join('actual_transactions.json'))
    logs_output_file = str(tmpdir.join('actual_logs.json'))
    token_transfers_output_file = str(
        tmpdir.join('actual_token_transfers.json'))
    traces_output_file = str(tmpdir.join('actual_traces.json'))
    contracts_output_file = str(tmpdir.join('actual_contracts.json'))
    tokens_output_file = str(tmpdir.join('actual_tokens.json'))

    streamer_adapter = EthStreamerAdapter(
        batch_web3_provider=ThreadLocalProxy(
            lambda: get_web3_provider(provider_type,
                                      read_resource_lambda=lambda file:
                                      read_resource(resource_group, file),
                                      batch=True)),
        batch_size=batch_size,
        item_exporter=CompositeItemExporter(
            filename_mapping={
                'block': blocks_output_file,
                'transaction': transactions_output_file,
                'log': logs_output_file,
                'token_transfer': token_transfers_output_file,
                'trace': traces_output_file,
                'contract': contracts_output_file,
                'token': tokens_output_file,
            }),
        entity_types=entity_types,
    )
    streamer = Streamer(blockchain_streamer_adapter=streamer_adapter,
                        start_block=start_block,
                        end_block=end_block,
                        retry_errors=False)
    streamer.stream()

    if 'block' in entity_types:
        print('=====================')
        print(read_file(blocks_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_blocks.json'),
            read_file(blocks_output_file))

    if 'transaction' in entity_types:
        print('=====================')
        print(read_file(transactions_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_transactions.json'),
            read_file(transactions_output_file))

    if 'log' in entity_types:
        print('=====================')
        print(read_file(logs_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_logs.json'),
            read_file(logs_output_file))

    if 'token_transfer' in entity_types:
        print('=====================')
        print(read_file(token_transfers_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_token_transfers.json'),
            read_file(token_transfers_output_file))

    if 'trace' in entity_types:
        print('=====================')
        print(read_file(traces_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_traces.json'),
            read_file(traces_output_file))

    if 'contract' in entity_types:
        print('=====================')
        print(read_file(contracts_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_contracts.json'),
            read_file(contracts_output_file))

    if 'token' in entity_types:
        print('=====================')
        print(read_file(tokens_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_tokens.json'),
            read_file(tokens_output_file))
Esempio n. 26
0
def test_stream(tmpdir, start_block, end_block, batch_size, resource_group,
                entity_types, provider_type):
    try:
        os.remove('last_synced_block.txt')
    except OSError:
        pass

    blocks_output_file = str(tmpdir.join('actual_blocks.json'))
    actions_output_file = str(tmpdir.join('actual_actions.json'))
    logs_output_file = str(tmpdir.join('actual_logs.json'))
    transaction_logs_output_file = str(
        tmpdir.join('actual_transaction_logs.json'))

    streamer_adapter = IotexStreamerAdapter(
        iotex_rpc=ThreadLocalProxy(
            lambda: get_iotex_rpc(provider_type,
                                  read_resource_lambda=lambda file:
                                  read_resource(resource_group, file))),
        batch_size=batch_size,
        item_exporter=CompositeItemExporter(
            filename_mapping={
                'block': blocks_output_file,
                'action': actions_output_file,
                'log': logs_output_file,
                'transaction_log': transaction_logs_output_file,
            }),
        entity_types=entity_types,
    )
    streamer = Streamer(blockchain_streamer_adapter=streamer_adapter,
                        start_block=start_block,
                        end_block=end_block,
                        retry_errors=False)
    streamer.stream()

    if EntityType.BLOCK in entity_types:
        print('=====================')
        print(read_file(blocks_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_blocks.json'),
            read_file(blocks_output_file))

    if EntityType.ACTION in entity_types:
        print('=====================')
        print(read_file(actions_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_actions.json'),
            read_file(actions_output_file))

    if EntityType.LOG in entity_types:
        print('=====================')
        print(read_file(logs_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_logs.json'),
            read_file(logs_output_file))

    if EntityType.TRANSACTION_LOG in entity_types:
        print('=====================')
        print(read_file(logs_output_file))
        compare_lines_ignore_order(
            read_resource(resource_group, 'expected_transaction_logs.json'),
            read_file(transaction_logs_output_file))