def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex: """ Return the beacon proposer index at the current slot. """ epoch = get_current_epoch(state) seed = hash(get_seed(state, epoch, DOMAIN_BEACON_PROPOSER) + int_to_bytes(state.slot, length=8)) indices = get_active_validator_indices(state, epoch) return compute_proposer_index(state, indices, seed)
def merkleize(chunks): tree = chunks[::] while not is_power_of_two(len(tree)): tree.append(ZERO_CHUNK) tree = [ZERO_CHUNK] * len(tree) + tree for i in range(len(tree) // 2 - 1, 0, -1): tree[i] = hash(tree[i * 2] + tree[i * 2 + 1]) return tree[1]
def process_randao(state: BeaconState, body: BeaconBlockBody) -> None: epoch = get_current_epoch(state) # Verify RANDAO reveal proposer = state.validators[get_beacon_proposer_index(state)] assert bls_verify(proposer.pubkey, hash_tree_root(epoch), body.randao_reveal, get_domain(state, DOMAIN_RANDAO)) # Mix in RANDAO reveal mix = xor(get_randao_mix(state, epoch), hash(body.randao_reveal)) state.randao_mixes[epoch % EPOCHS_PER_HISTORICAL_VECTOR] = mix
def compute_shuffled_index(index: ValidatorIndex, index_count: uint64, seed: Bytes32) -> ValidatorIndex: """ Return the shuffled validator index corresponding to ``seed`` (and ``index_count``). """ assert index < index_count # Swap or not (https://link.springer.com/content/pdf/10.1007%2F978-3-642-32009-5_1.pdf) # See the 'generalized domain' algorithm on page 3 for current_round in range(SHUFFLE_ROUND_COUNT): pivot = bytes_to_int(hash(seed + int_to_bytes(current_round, length=1))[0:8]) % index_count flip = uint64((pivot + index_count - index) % index_count) position = max(index, flip) source = hash(seed + int_to_bytes(current_round, length=1) + int_to_bytes(position // 256, length=4)) byte = source[(position % 256) // 8] bit = (byte >> (position % 8)) % 2 index = flip if bit else index return ValidatorIndex(index)
def filler(starting_position, chunk_count): at, skip, end = chunk_count, 1, next_power_of_two(chunk_count) value = ZERO_CHUNK o = {} while at < end: while at % (skip*2) == 0: skip *= 2 value = hash(value + value) o[starting_position + at] = value at += skip return o
def root(self): o = {**self.objects} keys = sorted(o.keys())[::-1] pos = 0 while pos < len(keys): k = keys[pos] if k in o and k^1 in o and k//2 not in o: o[k//2] = hash(o[k&-2] + o[k|1]) keys.append(k // 2) pos += 1 return o[1]
def calc_merkle_tree_from_leaves(values, layer_count=32): values = list(values) tree = [values[::]] for h in range(layer_count): if len(values) % 2 == 1: values.append(zerohashes[h]) values = [ hash(values[i] + values[i + 1]) for i in range(0, len(values), 2) ] tree.append(values[::]) return tree
def merkle_branch(chunks, index): tree = chunks[::] while not is_power_of_two(len(tree)): tree.append(ZERO_CHUNK) tree = [ZERO_CHUNK] * len(tree) + tree output = {} opos = len(tree) // 2 + index for i in range(len(tree) // 2 - 1, 0, -1): tree[i] = hash(tree[i * 2] + tree[i * 2 + 1]) if i == opos // 2: output[opos ^ 1] = tree[opos ^ 1] opos //= 2 return output
def merge_ssz_branches(*branches): o = {} for branch in branches: # print(branch.keys()) o = {**o, **branch} keys = sorted(o.keys())[::-1] pos = 0 while pos < len(keys): k = keys[pos] if k in o and k^1 in o and k//2 not in o: o[k//2] = hash(o[k&-2] + o[k|1]) keys.append(k // 2) pos += 1 return {x: o[x] for x in o if not (x*2 in o and x*2+1 in o)}
def compute_proposer_index(state: BeaconState, indices: Sequence[ValidatorIndex], seed: Bytes32) -> ValidatorIndex: """ Return from ``indices`` a random index sampled by effective balance. """ assert len(indices) > 0 MAX_RANDOM_BYTE = 2**8 - 1 i = 0 while True: candidate_index = indices[compute_shuffled_index(uint64(i % len(indices)), len(indices), seed)] random_byte = hash(seed + int_to_bytes(i // 32, length=8))[i % 32] effective_balance = state.validators[candidate_index].effective_balance if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE * random_byte: return ValidatorIndex(candidate_index) i += 1
def merkleize_chunks(chunks, limit=None): # If no limit is defined, we are just merkleizing chunks (e.g. SSZ container). if limit is None: limit = len(chunks) count = len(chunks) # See if the input is within expected size. # If not, a list-limit is set incorrectly, or a value is unexpectedly large. assert count <= limit if limit == 0: return zerohashes[0] depth = max(count - 1, 0).bit_length() max_depth = (limit - 1).bit_length() tmp = [None for _ in range(max_depth + 1)] def merge(h, i): j = 0 while True: if i & (1 << j) == 0: if i == count and j < depth: h = hash( h + zerohashes[j] ) # keep going if we are complementing the void to the next power of 2 else: break else: h = hash(tmp[j] + h) j += 1 tmp[j] = h # merge in leaf by leaf. for i in range(count): merge(chunks[i], i) # complement with 0 if empty, or if not the right power of 2 if 1 << depth != count: merge(zerohashes[0], count) # the next power of two may be smaller than the ultimate virtual size, complement with zero-hashes at each depth. for j in range(depth, max_depth): tmp[j + 1] = hash(tmp[j] + zerohashes[j]) return tmp[max_depth]
def get_genesis_state(n, seed="hello"): block_hash = hash(seed.encode("utf-8")) eth1_timestamp = 1578009600 return specs.initialize_beacon_state_from_eth1(block_hash, eth1_timestamp, get_initial_deposits(n))
def get_seed(state: BeaconState, epoch: Epoch, domain_type: DomainType) -> Bytes32: """ Return the seed at ``epoch``. """ mix = get_randao_mix(state, Epoch(epoch + EPOCHS_PER_HISTORICAL_VECTOR - MIN_SEED_LOOKAHEAD - 1)) # Avoid underflow return hash(domain_type + int_to_bytes(epoch, length=8) + mix)
import hash_function import sys # test = hash_function.hash("hello world") # print(test) arr_of_lines = [] file_to_hash = open("file_to_hash.txt", "r") for line in file_to_hash.readlines(): #file.readlines(), splits the file into a list, where each element is a seperate line temp_arr = line.split('=') temp_str = temp_arr[0] + " = " + hash_function.hash(temp_arr[1]).decode("utf-8") print(temp_str) arr_of_lines.append(temp_str) # print(len(arr_of_lines)) # print ("Name of the file: ", file_to_hash.name) # line = file_to_hash.readline() # print ("Read Line: %s" % (line)) # Close opened file file_to_hash.close() with open("result.txt", 'w') as writer: for line in arr_of_lines: writer.write(line + "\n") writer.close
def mix_in_length(root, length): return hash(root + length.to_bytes(32, 'little'))
def get_genesis_state(n): hey = "hello" block_hash = hash(hey.encode("utf-8")) eth1_timestamp = 1578009600 return initialize_beacon_state_from_eth1(block_hash, eth1_timestamp, get_initial_deposits(n))
from hash_function import hash from math import log2 ZERO_BYTES32 = b'\x00' * 32 zerohashes = [ZERO_BYTES32] for layer in range(1, 100): zerohashes.append(hash(zerohashes[layer - 1] + zerohashes[layer - 1])) def calc_merkle_tree_from_leaves(values, layer_count=32): values = list(values) tree = [values[::]] for h in range(layer_count): if len(values) % 2 == 1: values.append(zerohashes[h]) values = [ hash(values[i] + values[i + 1]) for i in range(0, len(values), 2) ] tree.append(values[::]) return tree def get_merkle_tree(values, pad_to=None): layer_count = (len(values) - 1).bit_length() if pad_to is None else (pad_to - 1).bit_length() if len(values) == 0: return zerohashes[layer_count] return calc_merkle_tree_from_leaves(values, layer_count)