Пример #1
0
def create_histories(history, hashX_count=100):
    '''Creates a bunch of random transaction histories, and write them
    to disk in a series of small flushes.'''
    hashXs = [urandom(HASHX_LEN) for n in range(hashX_count)]
    mk_array = lambda: array.array('Q')
    histories = {hashX: mk_array() for hashX in hashXs}
    unflushed = history.unflushed
    tx_num = 0
    while hashXs:
        tx_numb = pack_le_uint64(tx_num)[:5]
        hash_indexes = set(
            random.randrange(len(hashXs))
            for n in range(1 + random.randrange(4)))
        for index in hash_indexes:
            histories[hashXs[index]].append(tx_num)
            unflushed[hashXs[index]].extend(tx_numb)

        tx_num += 1
        # Occasionally flush and drop a random hashX if non-empty
        if random.random() < 0.1:
            history.flush()
            index = random.randrange(0, len(hashXs))
            if histories[hashXs[index]]:
                del hashXs[index]

    return histories
Пример #2
0
 def add_unflushed(self, hashXs_by_tx, first_tx_num):
     unflushed = self.unflushed
     count = 0
     for tx_num, hashXs in enumerate(hashXs_by_tx, start=first_tx_num):
         tx_numb = pack_le_uint64(tx_num)[:5]
         hashXs = set(hashXs)
         for hashX in hashXs:
             unflushed[hashX].extend(tx_numb)
         count += len(hashXs)
     self.unflushed_count += count
Пример #3
0
 def fs_update_header_offsets(self, offset_start, height_start, headers):
     if self.coin.STATIC_BLOCK_HEADERS:
         return
     offset = offset_start
     offsets = []
     for h in headers:
         offset += len(h)
         offsets.append(pack_le_uint64(offset))
     # For each header we get the offset of the next header, hence we
     # start writing from the next height
     pos = (height_start + 1) * 8
     self.headers_offsets_file.write(pos, b''.join(offsets))
Пример #4
0
def check_hashX_compaction(history):
    history.max_hist_row_entries = 40
    row_size = history.max_hist_row_entries * 5
    full_hist = b''.join(pack_le_uint64(tx_num)[:5] for tx_num in range(100))
    hashX = urandom(HASHX_LEN)
    pairs = ((1, 20), (26, 50), (56, 30))

    cum = 0
    hist_list = []
    hist_map = {}
    for flush_count, count in pairs:
        key = hashX + pack_be_uint16(flush_count)
        hist = full_hist[cum * 5:(cum + count) * 5]
        hist_map[key] = hist
        hist_list.append(hist)
        cum += count

    write_items = []
    keys_to_delete = set()
    write_size = history._compact_hashX(hashX, hist_map, hist_list,
                                        write_items, keys_to_delete)
    # Check results for sanity
    assert write_size == len(full_hist)
    assert len(write_items) == 3
    assert len(keys_to_delete) == 3
    assert len(hist_map) == len(pairs)
    for n, item in enumerate(write_items):
        assert item == (hashX + pack_be_uint16(n),
                        full_hist[n * row_size:(n + 1) * row_size])
    for flush_count, count in pairs:
        assert hashX + pack_be_uint16(flush_count) in keys_to_delete

    # Check re-compaction is null
    hist_map = {key: value for key, value in write_items}
    hist_list = [value for key, value in write_items]
    write_items.clear()
    keys_to_delete.clear()
    write_size = history._compact_hashX(hashX, hist_map, hist_list,
                                        write_items, keys_to_delete)
    assert write_size == 0
    assert len(write_items) == 0
    assert len(keys_to_delete) == 0
    assert len(hist_map) == len(pairs)

    # Check re-compaction adding a single tx writes the one row
    hist_list[-1] += array.array('I', [100]).tobytes()
    write_size = history._compact_hashX(hashX, hist_map, hist_list,
                                        write_items, keys_to_delete)
    assert write_size == len(hist_list[-1])
    assert write_items == [(hashX + pack_be_uint16(2), hist_list[-1])]
    assert len(keys_to_delete) == 1
    assert write_items[0][0] in keys_to_delete
    assert len(hist_map) == len(pairs)
Пример #5
0
 def serialize_unsigned(self, payload_version):
     return b''.join((pack_varbytes(self.code), self.cid,
                      self.did if payload_version > 0 else b'',
                      pack_varbytes(self.nickname), pack_varbytes(self.url),
                      pack_le_uint64(self.location)))
Пример #6
0
 def serialize(self, payload_version):
     return b''.join(
         (pack_varbytes(self.ownerpb), pack_varbytes(self.nodepb),
          pack_varbytes(self.nickname), pack_varbytes(self.url),
          pack_le_uint64(self.location), pack_varbytes(self.netaddress),
          pack_varbytes(self.signature)))