Пример #1
0
    def propose(self):
        self.compute_beacon(self.shared_secret)
        new_shared_secret, encrypted_shares, proof = pvss.share_random_secret(self.OTHER_PUBLIC_KEYS, T)
        dataset = Dataset(
            round_idx=self.round,
            prev_round_idx=self.last_confirmed_round,
            revealed_secret=self.shared_secret,
            beacon=self.beacon,
            recovered_beacons=self.beacons[self.last_confirmed_round + 1: self.round],
            merkle_root=merkle.compute_root([Hash(bytes(es)) for es in encrypted_shares]),
            encrypted_shares=encrypted_shares,
            proof=proof,
            confirmation_certificate=self.last_confirmation_certificate,
            recovery_certificates=self.recovery_certificates[self.last_confirmed_round + 1: self.round],
        )

        msg = ProposeMessage(
            sender=self.ID,
            dataset=dataset,
            dataset_header_signature=Signature.create_later(
                lambda: dataset.serialized_header, self.KEYPAIR.secret_key
            ),
            confirmation_certificate_signature=Signature.create_later(
                lambda: ConfirmMessage(self.ID, self.round, dataset.header_digest).serialize(),
                self.KEYPAIR.secret_key
            ),
        )
        self.broadcast_message(msg)
        self.datasets[self.round] = dataset
        self.shared_secret = new_shared_secret
        self.confirmations.append(Confirmation(self.ID, dataset.header_digest, msg.confirmation_certificate_signature))
        self.confirmation_counter.update([dataset.header_digest])
        self.update_phase(Phase.Vote)
Пример #2
0
def test_branch_power_of_two(leaves):
    root = compute_root(leaves)
    for i in range(len(leaves)):
        branch = compute_branch(leaves, i)
        assert leaves[i] in branch
        assert get_leaf(branch, i, len(leaves)) == leaves[i]
        assert verify_branch(branch, root, i, len(leaves))
Пример #3
0
def test_branch_power_all(leaves):
    root = compute_root(leaves)
    for i in range(len(leaves)):
        branch = compute_branch(leaves, i)
        assert leaves[i] in branch
        assert get_leaf(branch, i, len(leaves)) == leaves[i]
        assert len(branch) == math.ceil(math.log2(len(leaves))) + 1
        assert verify_branch(branch, root, i, len(leaves))
Пример #4
0
def load_config(n=None):
    if n is None:
        n = N

    from hydrand import merkle
    from hydrand.data import ShareCorrectnessProof, NodeInfo

    CONFIG_DIR = os.path.join(CONFIG_BASE_DIR, f"{n:03}")
    if not os.path.exists(CONFIG_DIR):
        assert NETWORK_CONFIG != "amazon", "do never generate config on the fly for amazon tests"
        logging.warning("config does not exist, generating one on the fly")
        return generate_sample_config()

    addresses, ports = load_network_config()
    node_infos = []
    for node_id in range(n):
        if MODE == "testing" or NODE_ID == node_id:
            with open(os.path.join(CONFIG_DIR, f"{node_id:03}.secret_key"),
                      "rb") as f:
                keypair = KeyPair(f.read())
            public_key = keypair.public_key
            with open(os.path.join(CONFIG_DIR, f"{node_id:03}.initial_secret"),
                      "rb") as f:
                initial_secret = Scalar.from_bytes(f.read())
        else:
            keypair = None
            initial_secret = None
            with open(os.path.join(CONFIG_DIR, f"{node_id:03}.public_key"),
                      "rb") as f:
                public_key = Point.from_bytes(f.read())

        with open(
                os.path.join(CONFIG_DIR, f"{node_id:03}.initial_pvss_shares"),
                "rb") as f:
            shares = [Point.from_bytes(f.read(32)) for i in range(n - 1)]
        with open(os.path.join(CONFIG_DIR, f"{node_id:03}.initial_pvss_proof"),
                  "rb") as f:
            proof = ShareCorrectnessProof(
                commitments=[
                    Point.from_bytes(f.read(32)) for i in range(n - 1)
                ],
                challenge=Scalar.from_bytes(f.read(32)),
                responses=[
                    Scalar.from_bytes(f.read(32)) for i in range(n - 1)
                ],
            )

        merkle_root = merkle.compute_root(
            [merkle.Hash(bytes(es)) for es in shares])
        node_infos.append(
            NodeInfo(node_id, addresses[node_id], ports[node_id], keypair,
                     public_key, initial_secret, shares, proof, merkle_root))
    return node_infos
Пример #5
0
def generate_sample_config(n=None, write_to_disk=False):
    if n is None:
        n = N

    from hydrand import merkle, pvss
    from hydrand.data import NodeInfo

    addresses, ports = load_network_config()
    node_infos = []
    t = math.ceil(n / 3)
    keypairs = [KeyPair.random() for node_id in range(n)]
    for node_id, keypair in enumerate(keypairs):
        receiver_pks = [
            kp.public_key for j, kp in enumerate(keypairs) if j != node_id
        ]
        secret, shares, proof = pvss.share_random_secret(receiver_pks, t)
        merkle_root = merkle.compute_root(
            [merkle.Hash(bytes(es)) for es in shares])
        node_infos.append(
            NodeInfo(node_id, addresses[node_id], ports[node_id], keypair,
                     keypair.public_key, secret, shares, proof, merkle_root))
    if write_to_disk:
        save_config(node_infos)
    return node_infos
Пример #6
0
keypairs = [KeyPair.random() for i in range(N - 1)]
public_keys = [k.public_key for k in keypairs]
shared_secret, encrypted_shares, proofs = pvss.share_random_secret(
    public_keys, T)
decrypted_shares = [
    pvss.decrypt_share(share, keypair.secret_scalar)
    for share, keypair in zip(encrypted_shares, keypairs)
]

dataset_header = DatasetHeader(
    round_idx=4711,
    prev_round_idx=4710,
    revealed_secret=Scalar.random(),
    beacon=Hash(utils.deterministic_random_bytes(32, "some beacon")),
    recovered_beacons=[],
    merkle_root=merkle.compute_root(
        [Hash(bytes(es)) for es in encrypted_shares]))

decryption_proofs = [
    pvss.prove_share_decryption(decrypted_share, encrypted_share,
                                keypair.secret_scalar, keypair.public_key)
    for decrypted_share, encrypted_share, keypair in zip(
        decrypted_shares, encrypted_shares, keypairs)
]
merkle_branches = [
    merkle.compute_branch([Hash(bytes(s.value))
                           for s in encrypted_shares], share_idx)
    for share_idx, _ in enumerate(encrypted_shares)
]

serialized = dataset_header.serialize()
Пример #7
0
def test_compose_uneven():
    L = compute_root(B[0:4])
    R = compute_root(B[4:7])
    assert compute_root([L, R]) == compute_root(B[0:7])
Пример #8
0
def test_compose():
    L = compute_root(B[0:4])
    R = compute_root(B[4:8])
    assert compute_root([L, R]) == compute_root(B[0:8])