def to_affine(self) -> AffinePoint: if self.infinity: return AffinePoint(Fq.zero(self.ec.q), Fq.zero(self.ec.q), self.infinity, self.ec) new_x = self.x / (self.z**2) new_y = self.y / (self.z**3) return AffinePoint(new_x, new_y, self.infinity, self.ec)
def to_affine(self): if self.infinity: return AffinePoint(Fq.zero(self.ec.q), Fq.zero(self.ec.q), self.infinity, self.ec) new_x = self.x / pow(self.z, 2) new_y = self.y / pow(self.z, 3) return AffinePoint(new_x, new_y, self.infinity, self.ec)
def verify(self): """ This implementation of verify has several steps. First, it reorganizes the pubkeys and messages into groups, where each group corresponds to a message. Then, it checks if the siganture has info on how it was aggregated. If so, we exponentiate each pk based on the exponent in the AggregationInfo. If not, we find public keys that share messages with others, and aggregate all of these securely (with exponents.). Finally, since each public key now corresponds to a unique message (since we grouped them), we can verify using the distinct verification procedure. """ message_hashes = self.aggregation_info.message_hashes public_keys = self.aggregation_info.public_keys assert (len(message_hashes) == len(public_keys)) hash_to_public_keys = {} for i in range(len(message_hashes)): if message_hashes[i] in hash_to_public_keys: hash_to_public_keys[message_hashes[i]].append(public_keys[i]) else: hash_to_public_keys[message_hashes[i]] = [public_keys[i]] final_message_hashes = [] final_public_keys = [] ec = public_keys[0].value.ec for message_hash, mapped_keys in hash_to_public_keys.items(): dedup = list(set(mapped_keys)) public_key_sum = JacobianPoint(Fq.one(ec.q), Fq.one(ec.q), Fq.zero(ec.q), True, ec) for public_key in dedup: try: exponent = self.aggregation_info.tree[(message_hash, public_key)] public_key_sum += (public_key.value * exponent) except KeyError: return False final_message_hashes.append(message_hash) final_public_keys.append(public_key_sum.to_affine()) mapped_hashes = [ hash_to_point_prehashed_Fq2(mh) for mh in final_message_hashes ] g1 = Fq(default_ec.n, -1) * generator_Fq() Ps = [g1] + final_public_keys Qs = [self.value.to_affine()] + mapped_hashes res = ate_pairing_multi(Ps, Qs, default_ec) return res == Fq12.one(default_ec.q)
def aggregate(public_keys, secure): """ Aggregates public keys together """ if len(public_keys) < 1: raise Exception("Invalid number of keys") public_keys.sort() computed_Ts = BLS.hash_pks(len(public_keys), public_keys) ec = public_keys[0].value.ec sum_keys = JacobianPoint(Fq.one(ec.q), Fq.one(ec.q), Fq.zero(ec.q), True, ec) for i in range(len(public_keys)): addend = public_keys[i].value if secure: addend *= computed_Ts[i] sum_keys += addend return PublicKey.from_g1(sum_keys)
# # point serialization / deserialization # using the "enhanced ZCash" format proposed in # https://github.com/pairingwg/bls_standard/issues/16 # (C) 2019 Riad S. Wahby <*****@*****.**> # # see the comment at the top of ../sage-impl/serdes.sage for more information import struct from consts import p from curve_ops import from_jacobian, point_eq from fields import Fq, Fq2, sgn0, sqrt_F2 F1_one = Fq.one(p) F1_zero = Fq.zero(p) F2_one = Fq2.one(p) F2_zero = Fq2.zero(p) class DeserError(Exception): pass class SerError(Exception): pass def serialize(P, compressed=True): if isinstance(P[0], Fq): return _serialize_ell1(P, compressed)
ySq = y**2 xSq = x**2 xCu = x * xSq z2 = z**2 z4 = z2**2 z6 = z4 * z2 infty = x == 0 and y != 0 and z == 0 match = ySq == xCu + b * z6 return infty or match on_curve_g1 = lambda P: _on_curve(P, Fq(p, 4)) on_curve_g2 = lambda P: _on_curve(P, Fq2(p, 4, 4)) def _subgroup_check(P, on_curve_fn, id_pt): if not on_curve_fn(P): return False Q = q_chain(P) return point_eq(Q, id_pt) id_g1 = (Fq.zero(p), Fq.one(p), Fq.zero(p)) subgroup_check_g1 = lambda P: _subgroup_check(P, on_curve_g1, id_g1) id_g2 = (Fq2.zero(p), Fq2.one(p), Fq2.zero(p)) subgroup_check_g2 = lambda P: _subgroup_check(P, on_curve_g2, id_g2)