Esempio n. 1
0
def _lagrange_coefficient(i, idxs):
    numerator = Scalar(1)
    denominator = Scalar(1)
    for j in idxs:
        if j != i:
            numerator *= j
            denominator *= j - i
    return numerator / denominator
Esempio n. 2
0
def test_dleq_parallel():
    α = [Scalar.random() for _ in range(10)]
    g = [G * Scalar.random() for _ in range(10)]
    x = [g[i] * α[i] for i in range(10)]
    h = [H * Scalar.random() for _ in range(10)]
    y = [h[i] * α[i] for i in range(10)]
    e, z = _DLEQ_prove(g, x, h, y, α)
    assert _DLEQ_verify(g, x, h, y, e, z)
Esempio n. 3
0
 def random(degree: int, secret: Optional[Scalar] = None) -> "Polynomial":
     """ Return a polynomial with random coefficients from Zq.
         p(x) = c_0 + c_1*x + ... c_{degree} * x^{degree}
     """
     if secret is None:
         coeffs = [Scalar.random() for i in range(degree + 1)]
     else:
         coeffs = [secret] + [Scalar.random() for i in range(degree)]
     return Polynomial(coeffs)
Esempio n. 4
0
def recover(indexed_shares):
    """ Takes EXACTLY t (idx, decrypted_share) tuples and performs Langrange interpolation to recover the secret S.
        The validity of the decrypted shares has to be verified prior to a call of this function.
    """
    idxs = [Scalar(idx) for idx, _ in indexed_shares]
    idx, share = indexed_shares[0]
    rec = share * _lagrange_coefficient(Scalar(idx), idxs)
    for idx, share in indexed_shares[1:]:
        rec += share * _lagrange_coefficient(Scalar(idx), idxs)
    return rec
Esempio n. 5
0
def load_config(n=None):
    if n is None:
        n = N

    from hydrand import merkle
    from hydrand.data import ShareCorrectnessProof, NodeInfo

    CONFIG_DIR = os.path.join(CONFIG_BASE_DIR, f"{n:03}")
    if not os.path.exists(CONFIG_DIR):
        assert NETWORK_CONFIG != "amazon", "do never generate config on the fly for amazon tests"
        logging.warning("config does not exist, generating one on the fly")
        return generate_sample_config()

    addresses, ports = load_network_config()
    node_infos = []
    for node_id in range(n):
        if MODE == "testing" or NODE_ID == node_id:
            with open(os.path.join(CONFIG_DIR, f"{node_id:03}.secret_key"),
                      "rb") as f:
                keypair = KeyPair(f.read())
            public_key = keypair.public_key
            with open(os.path.join(CONFIG_DIR, f"{node_id:03}.initial_secret"),
                      "rb") as f:
                initial_secret = Scalar.from_bytes(f.read())
        else:
            keypair = None
            initial_secret = None
            with open(os.path.join(CONFIG_DIR, f"{node_id:03}.public_key"),
                      "rb") as f:
                public_key = Point.from_bytes(f.read())

        with open(
                os.path.join(CONFIG_DIR, f"{node_id:03}.initial_pvss_shares"),
                "rb") as f:
            shares = [Point.from_bytes(f.read(32)) for i in range(n - 1)]
        with open(os.path.join(CONFIG_DIR, f"{node_id:03}.initial_pvss_proof"),
                  "rb") as f:
            proof = ShareCorrectnessProof(
                commitments=[
                    Point.from_bytes(f.read(32)) for i in range(n - 1)
                ],
                challenge=Scalar.from_bytes(f.read(32)),
                responses=[
                    Scalar.from_bytes(f.read(32)) for i in range(n - 1)
                ],
            )

        merkle_root = merkle.compute_root(
            [merkle.Hash(bytes(es)) for es in shares])
        node_infos.append(
            NodeInfo(node_id, addresses[node_id], ports[node_id], keypair,
                     public_key, initial_secret, shares, proof, merkle_root))
    return node_infos
Esempio n. 6
0
def _random_codeword(num_nodes: int, recovery_threshold: int) -> List[Scalar]:
    f = Polynomial.random(num_nodes - recovery_threshold - 1)
    codeword = []
    for i in range(1, num_nodes + 1):
        # vi's could be precomputed given n and t
        vi = Scalar(1)
        for j in range(1, num_nodes + 1):
            if j != i:
                vi *= Scalar((i - j) % GROUP_ORDER)
        vi.invert()
        codeword.append(vi * f(i))
    return codeword
Esempio n. 7
0
def test_share_verification_invalid_challenge():
    commitments, challenge, responses = proof.commitments, proof.challenge, proof.responses
    challenge = challenge + Scalar(1)
    assert not verify_shares(
        encrypted_shares,
        ShareCorrectnessProof(commitments, challenge, responses), public_keys,
        RECOVERY_THRESHOLD)
Esempio n. 8
0
def share_random_secret(
    receiver_public_keys: List[Point],
    recovery_threshold: int,
    secret_scalar: Optional[Scalar] = None
) -> Tuple[Scalar, List[Point], ShareCorrectnessProof]:
    """ generate a fresh random base secret s (or uses the provided one)
        computes share (s_1, ..., s_n) for S = h^s
        encrypts them with the public keys to obtain ŝ_1, ..., ŝ_n
        compute the verification information
        returns
         - the secret s (which can is used to reveal and verify S)
         - the encrypted shares ŝ_1, ..., ŝ_n
         - the share verification information, i.e. PROOF_D, which consists of
            - the commitments v_1, ..., v_n   (v_i = g^{s_i})
            - the (common) challenge e
            - the responses z_1, ..., z_n
    """
    num_receivers = len(receiver_public_keys)

    secret = secret_scalar or Scalar.random()
    poly = Polynomial.random(recovery_threshold - 1, secret)

    shares = [poly(i) for i in range(1, num_receivers + 1)]
    encrypted_shares = [
        pk * share for pk, share in zip(receiver_public_keys, shares)
    ]
    proof = prove_share_correctness(shares, encrypted_shares,
                                    receiver_public_keys)

    return secret, encrypted_shares, proof
Esempio n. 9
0
 def __call__(self, arg: int) -> Scalar:
     x = Scalar(arg)
     result = self.coeffs[0] + (self.coeffs[1] * x)
     x_pow = copy(x)
     for i in range(2, len(self.coeffs)):
         x_pow *= x
         result += self.coeffs[i] * x_pow
     return result
Esempio n. 10
0
def _DLEQ_derive_challenge(x, y, a1, a2):
    """ Compute (common) challenge e = H(x_1, y_1, a_11, a_21, ..., x_n, y_n, a_1n, a_2n).
        Compared to the SCRAPE paper the order of the arguments is changed for a consise implementation.
    """
    n = len(x)
    hasher = hashlib.sha512()
    for i in range(n):
        hasher.update(bytes(x[i]))
        hasher.update(bytes(y[i]))
        hasher.update(bytes(a1[i]))
        hasher.update(bytes(a2[i]))
    return Scalar.reduce(hasher.digest())
Esempio n. 11
0
def _DLEQ_prove(g, x, h, y, α):
    """ Performs a the DLEQ NIZK protocol for the given values g, x, h, y and the exponent α.
        I.e. the prover shows that he knows α such that x = g^α and y = h^α holds.
        To perform the proving prodedure in parallel (but with a common challenge) g, x, h, y and α might be lists.
    """
    g, x, h, y, α = _listify(g, x, h, y, α)
    assert len(g) == len(x) == len(h) == len(y) == len(α)
    n = len(g)

    w = [Scalar.random() for _ in range(n)]  # w random element from Zq
    a1 = [g[i] * w[i] for i in range(n)]  # a1 = g^w
    a2 = [h[i] * w[i] for i in range(n)]  # a2 = h^w
    e = _DLEQ_derive_challenge(x, y, a1, a2)  # the challenge e
    z = [w[i] - (α[i] * e) for i in range(n)]  # the response(s) z
    return e, z[0] if n == 1 else z
Esempio n. 12
0
def test_dleq():
    α = Scalar.random()
    e, z = _DLEQ_prove(G, G * α, H, H * α, α)
    assert _DLEQ_verify(G, G * α, H, H * α, e, z)
Esempio n. 13
0
def test_verify_invalid_secret():
    commitments = proof.commitments
    assert not verify_secret(secret + Scalar(1), commitments,
                             RECOVERY_THRESHOLD)
Esempio n. 14
0
def deterministic_random_scalar(purpose: Optional[str] = None, counter: Optional[int] = None):
    purpose = "__scalar__ || " + (purpose or "")
    return Scalar.reduce(deterministic_random_bytes(64, purpose, counter))
Esempio n. 15
0
def test_double_eq_times_2():
    assert B + B == B * Scalar(2)
Esempio n. 16
0
assert N == 128, "set i.e. N=128 before running this file in config.py"

keypairs = [KeyPair.random() for i in range(N - 1)]
public_keys = [k.public_key for k in keypairs]
shared_secret, encrypted_shares, proofs = pvss.share_random_secret(
    public_keys, T)
decrypted_shares = [
    pvss.decrypt_share(share, keypair.secret_scalar)
    for share, keypair in zip(encrypted_shares, keypairs)
]

dataset_header = DatasetHeader(
    round_idx=4711,
    prev_round_idx=4710,
    revealed_secret=Scalar.random(),
    beacon=Hash(utils.deterministic_random_bytes(32, "some beacon")),
    recovered_beacons=[],
    merkle_root=merkle.compute_root(
        [Hash(bytes(es)) for es in encrypted_shares]))

decryption_proofs = [
    pvss.prove_share_decryption(decrypted_share, encrypted_share,
                                keypair.secret_scalar, keypair.public_key)
    for decrypted_share, encrypted_share, keypair in zip(
        decrypted_shares, encrypted_shares, keypairs)
]
merkle_branches = [
    merkle.compute_branch([Hash(bytes(s.value))
                           for s in encrypted_shares], share_idx)
    for share_idx, _ in enumerate(encrypted_shares)
Esempio n. 17
0
def test_base_times_zero_fails():
    with pytest.raises(ValueError):
        Point.base_times(Scalar(0))
Esempio n. 18
0
def test_multiply_inplace():
    X = copy(B)
    X *= Scalar(3)
    assert X == B + B + B
Esempio n. 19
0
def test_multiply_by_zero_fails():
    with pytest.raises(ValueError):
        B * Scalar(0)
Esempio n. 20
0
def test_multipy_flipped_order():
    assert B * Scalar(17) == Scalar(17) * B
Esempio n. 21
0
def test_dleq_invalid_challenge():
    α = Scalar.random()
    e, z = _DLEQ_prove(G, G * α, H, H * α, α)
    e += Scalar(1)
    assert not _DLEQ_verify(G, G * α, H, H * α, e, z)
Esempio n. 22
0
def test_double_inplace():
    B2 = copy(B)
    B2 += B2
    assert B2 == B * Scalar(2)
Esempio n. 23
0
def test_dleq_non_equal():
    a = Scalar.random()
    b = Scalar.random()
    e, z = _DLEQ_prove(G, G * a, H, H * b, a)
    assert not _DLEQ_verify(G, G * a, H, H * b, e, z)
Esempio n. 24
0
def keygen():
    """ generates a fresh ed25519 keypair (sk, pk = h^sk) for a participant in the PVSS
    """
    secret_key = Scalar.random()
    public_key = H * secret_key
    return secret_key, public_key
Esempio n. 25
0
def test_base_multiply_by_one():
    assert B == Point.base_times(Scalar(1))
Esempio n. 26
0
def get(obj_type, num_elements=None, **kwargs):
    mapping = {
        Scalar: lambda: Scalar.random(),
        Point: lambda: Point.base_times(Scalar.random()),
        Hash: lambda: Hash(secrets.token_bytes(32)),
        Signature: lambda: Signature(secrets.token_bytes(64)),
        RecoveryCertificate:
            lambda: RecoveryCertificate(
                signers=random.sample(list(range(N)), F + 1),
                signatures=get(Signature, F + 1),
        ),
        ConfirmationCertificate:
            lambda: ConfirmationCertificate(
                dataset_header_digest=get(Hash),
                signers=random.sample(list(range(N)), F + 1),
                signatures=get(Signature, F + 1),
        ),
        ShareCorrectnessProof:
            lambda: ShareCorrectnessProof(
                commitments=get(Point, N - 1),
                challenge=get(Scalar),
                responses=get(Scalar, N - 1),
        ),
        ShareDecryptionProof:
            lambda: ShareDecryptionProof(
                challenge=get(Scalar),
                response=get(Scalar),
        ),
        RecoveredShare:
            lambda: RecoveredShare(
                share=get(Point),
                proof=get(ShareDecryptionProof),
                merkle_branch=get(Hash, merkle.branch_length(N - 1)),
        ),
        DatasetHeader:
            lambda **kws: DatasetHeader(
                round_idx=kws['round_idx'],
                prev_round_idx=kws['prev_round_idx'],
                revealed_secret=get(Scalar),
                beacon=get(Hash),
                recovered_beacons=get(Hash, kws['round_idx'] - kws['prev_round_idx'] - 1),
                merkle_root=get(Hash),
        ),
        Dataset:
            lambda **kws: Dataset(
                round_idx=kws['round_idx'],
                prev_round_idx=kws['prev_round_idx'],
                revealed_secret=get(Scalar),
                beacon=get(Hash),
                recovered_beacons=get(Hash, kws['round_idx'] - kws['prev_round_idx'] - 1),
                merkle_root=get(Hash),
                encrypted_shares=get(Point, N - 1),
                proof=get(ShareCorrectnessProof),
                confirmation_certificate=None if kws['prev_round_idx'] == 0 else get(ConfirmationCertificate),
                recovery_certificates=get(RecoveryCertificate, kws['round_idx'] - kws['prev_round_idx'] - 1),
        ),
        ProposeMessage:
            lambda **kws: ProposeMessage(
                sender=random.randint(0, N - 1),
                dataset=get(Dataset, **kws),
                dataset_header_signature=get(Signature),
                confirmation_certificate_signature=get(Signature),
        ),
        AcknowledgeMessage:
            lambda **kws: AcknowledgeMessage(
                sender=random.randint(0, N - 1),
                dataset_header=get(DatasetHeader, **kws),
                dataset_header_signature=get(Signature),
        ),
        ConfirmMessage:
            lambda **kws: ConfirmMessage(
                sender=random.randint(0, N - 1),
                round_idx=kws.get('round_idx', random.randint(0, 1000_000)),
                dataset_header_digest=get(Hash),
        ),
        RecoverMessage:
            lambda **kws: RecoverMessage(
                sender=random.randint(0, N - 1),
                round_idx=kws.get('round_idx', random.randint(0, 1000_000)),
                recovery_certificate_signature=get(Signature),
                recovered_share=get(RecoveredShare) if kws.get('add_recovered_share', True) else None
        ),
        SignedMessage:
            lambda **kws: SignedMessage(
                message=get(kws['msg_type'], **kws),
                signature=get(Signature),
        )
    }
    if num_elements is None:
        return mapping[obj_type](**kwargs)
    return [mapping[obj_type](**kwargs) for _ in range(num_elements)]