Exemplo n.º 1
0
def migrate_genesis_txn(base_dir):
    for suffix in ('sandbox', 'live', 'local'):
        old_domain_genesis = os.path.join(
            base_dir, 'transactions_{}'.format(suffix))
        old_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}'.format(suffix))

        new_domain_genesis = os.path.join(
            base_dir, 'domain_transactions_{}_genesis'.format(suffix))
        new_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}_genesis'.format(suffix))

        if os.path.exists(new_domain_genesis):
            os.remove(new_domain_genesis)
        if os.path.exists(new_pool_genesis):
            os.remove(new_pool_genesis)

        if os.path.exists(old_domain_genesis):
            old_ser = CompactSerializer(getTxnOrderedFields())
            new_ser = JsonSerializer()
            with open(old_domain_genesis, 'r') as f1:
                with open(new_domain_genesis, 'w') as f2:
                    for line in store_utils.cleanLines(f1):
                        txn = old_ser.deserialize(line)
                        txn = {k: v for k, v in txn.items() if v}
                        txn = new_ser.serialize(txn, toBytes=False)
                        f2.write(txn)
                        f2.write('\n')
            os.remove(old_domain_genesis)
        if os.path.exists(old_pool_genesis):
            os.rename(old_pool_genesis, new_pool_genesis)
def migrate_genesis_txn(base_dir):
    for suffix in ('sandbox', 'live', 'local'):
        old_domain_genesis = os.path.join(
            base_dir, 'transactions_{}'.format(suffix))
        old_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}'.format(suffix))

        new_domain_genesis = os.path.join(
            base_dir, 'domain_transactions_{}_genesis'.format(suffix))
        new_pool_genesis = os.path.join(
            base_dir, 'pool_transactions_{}_genesis'.format(suffix))

        if os.path.exists(new_domain_genesis):
            os.remove(new_domain_genesis)
        if os.path.exists(new_pool_genesis):
            os.remove(new_pool_genesis)

        if os.path.exists(old_domain_genesis):
            old_ser = CompactSerializer(getTxnOrderedFields())
            new_ser = JsonSerializer()
            with open(old_domain_genesis, 'r') as f1:
                with open(new_domain_genesis, 'w') as f2:
                    for line in store_utils.cleanLines(f1):
                        txn = old_ser.deserialize(line)
                        txn = {k: v for k, v in txn.items() if v}
                        txn = new_ser.serialize(txn, toBytes=False)
                        f2.write(txn)
                        f2.write('\n')
            os.remove(old_domain_genesis)
        if os.path.exists(old_pool_genesis):
            os.rename(old_pool_genesis, new_pool_genesis)
Exemplo n.º 3
0
def explorer():
    #args = read_args()
    config = getConfig()
    result = []
    ledger_data_dir = get_ledger_dir("", "")
    read_copy_ledger_data_dir = None
    try:
        # RocksDB supports real read-only mode and does not need to have a ledger copy.
        if config.hashStore['type'].lower() != HS_ROCKSDB:
            config.db_transactions_config = None
            # NOTE: such approach works well only for small ledgers.
            tmp = make_copy_of_ledger(ledger_data_dir)

            # Let's be paranoid to avoid removing of ledger instead of its copy.
            ledger_path = Path(ledger_data_dir)
            ledger_copy_path = Path(tmp)
            assert ledger_path != ledger_copy_path
            assert ledger_copy_path not in ledger_path.parents

            read_copy_ledger_data_dir = tmp
            ledger_data_dir = read_copy_ledger_data_dir
        elif config.db_transactions_config is not None:
            # This allows to avoid debug logs creation on each read_ledger run
            config.db_transactions_config['db_log_dir'] = '/dev/null'
        storage = get_storage("domain", ledger_data_dir)
    finally:
        if read_copy_ledger_data_dir:
            shutil.rmtree(read_copy_ledger_data_dir)
        for seqNo, txn in storage.iterator(start=0, end=100):
            txn = ledger_txn_serializer.deserialize(txn)
            serializer = JsonSerializer()
            x = serializer.serialize(txn, toBytes=False)
            result.append(x)
        return result
Exemplo n.º 4
0
def testJsonSerializer():
    sz = JsonSerializer()
    m1 = {
        'integer': 36,
        'name': 'Foo',
        'surname': 'Bar',
        'float': 14.8639,
        'index': 1,
        'index_start_at': 56,
        'email': '*****@*****.**',
        'fullname': 'Foo Bar',
        'bool': False
    }
    m1s = '{"bool":false,"email":"*****@*****.**","float":14.8639,"fullname":"Foo Bar",' \
          '"index":1,"index_start_at":56,"integer":36,"name":"Foo","surname":"Bar"}'

    m2 = {
        'latitude': 31.351883,
        'longitude': -97.466179,
        'tags': ['foo', 'bar', 'baz', 'alice', 'bob', 'carol', 'dave']
    }
    m2s = '{"latitude":31.351883,"longitude":-97.466179,' \
          '"tags":["foo","bar","baz","alice","bob","carol","dave"]}'

    m3 = {
        'name':
        'Alice Bob',
        'website':
        'example.com',
        'friends': [{
            'id': 0,
            'name': 'Dave'
        }, {
            'id': 1,
            'name': 'Carol'
        }, {
            'id': 2,
            'name': 'Dave'
        }]
    }
    m3s = '{"friends":[{"id":0,"name":"Dave"},{"id":1,"name":"Carol"},' \
          '{"id":2,"name":"Dave"}],' \
          '"name":"Alice Bob","website":"example.com"}'

    assert sz.serialize(m1) == m1s.encode()
    assert sz.serialize(m1, toBytes=False) == m1s
    assert sz.serialize(m2) == m2s.encode()
    assert sz.serialize(m2, toBytes=False) == m2s
    assert sz.serialize(m3) == m3s.encode()
    assert sz.serialize(m3, toBytes=False) == m3s

    assert sz.deserialize(m1s) == m1
    assert sz.deserialize(m1s.encode()) == m1
    assert sz.deserialize(m2s) == m2
    assert sz.deserialize(m2s.encode()) == m2
    assert sz.deserialize(m3s) == m3
    assert sz.deserialize(m3s.encode()) == m3
def testJsonSerializer():
    sz = JsonSerializer()
    m1 = {'integer': 36, 'name': 'Foo', 'surname': 'Bar', 'float': 14.8639,
          'index': 1, 'index_start_at': 56, 'email': '*****@*****.**',
          'fullname': 'Foo Bar', 'bool': False}
    m1s = '{"bool":false,"email":"*****@*****.**","float":14.8639,"fullname":"Foo Bar",' \
          '"index":1,"index_start_at":56,"integer":36,"name":"Foo","surname":"Bar"}'

    m2 = {'latitude': 31.351883, 'longitude': -97.466179,
          'tags': ['foo', 'bar', 'baz', 'alice', 'bob',
                   'carol', 'dave']}
    m2s = '{"latitude":31.351883,"longitude":-97.466179,' \
          '"tags":["foo","bar","baz","alice","bob","carol","dave"]}'

    m3 = {'name': 'Alice Bob', 'website': 'example.com', 'friends': [
        {
          'id': 0,
          'name': 'Dave'
          },
        {
            'id': 1,
            'name': 'Carol'
        },
        {
            'id': 2,
            'name': 'Dave'
        }]}
    m3s = '{"friends":[{"id":0,"name":"Dave"},{"id":1,"name":"Carol"},' \
          '{"id":2,"name":"Dave"}],' \
          '"name":"Alice Bob","website":"example.com"}'

    assert sz.serialize(m1) == m1s.encode()
    assert sz.serialize(m1, toBytes=False) == m1s
    assert sz.serialize(m2) == m2s.encode()
    assert sz.serialize(m2, toBytes=False) == m2s
    assert sz.serialize(m3) == m3s.encode()
    assert sz.serialize(m3, toBytes=False) == m3s

    assert sz.deserialize(m1s) == m1
    assert sz.deserialize(m1s.encode()) == m1
    assert sz.deserialize(m2s) == m2
    assert sz.deserialize(m2s.encode()) == m2
    assert sz.deserialize(m3s) == m3
    assert sz.deserialize(m3s.encode()) == m3
    def _print_debug_data(found_data):
        serializer = JsonSerializer()

        txn = ledger_txn_serializer.deserialize(found_data)
        txn = serializer.serialize(txn, toBytes=False)
        logger.debug("txn: {}".format(txn))