def aggregate_sigs_secure(signatures, public_keys, message_hashes): """ Aggregate signatures using the secure method, which calculates exponents based on public keys, and raises each signature to an exponent before multiplying them together. This is secure against rogue public key attack, but is slower than simple aggregation. """ if (len(signatures) != len(public_keys) or len(public_keys) != len(message_hashes)): raise Exception("Invalid number of keys") mh_pub_sigs = [(message_hashes[i], public_keys[i], signatures[i]) for i in range(len(signatures))] # Sort by message hash + pk mh_pub_sigs.sort() computed_Ts = hash_pks(len(public_keys), public_keys) # Raise each sig to a power of each t, # and multiply all together into agg_sig ec = public_keys[0].ec agg_sig = JacobianPoint(Fq2.one(ec.q), Fq2.one(ec.q), Fq2.zero(ec.q), True, ec) for i, (_, _, signature) in enumerate(mh_pub_sigs): agg_sig += signature * computed_Ts[i] return Signature.from_g2(agg_sig)
def sign_with_coefficient(sk, m, player, players): """ As the given player out of a list of player indices, return a signature share for the given message. """ assert player in players r = hash_to_point_Fq2(m).to_jacobian() i = players.index(player) lambs = Threshold.lagrange_coeffs_at_zero(players) return Signature.from_g2(sk.value * (r * lambs[i]))
def aggregate_unit_sigs(signatures: List[Signature], players: List[int], T: int, ec=default_ec) -> Signature: lambs = Threshold.lagrange_coeffs_at_zero(players, ec) agg = AffinePoint(Fq2.zero(ec.q), Fq2.zero(ec.q), True, ec).to_jacobian() for i, sig in enumerate(signatures): agg += sig.value * lambs[i] return Signature.from_g2(agg)
def aggregate_sigs_simple(signatures): """ Aggregate signatures by multiplying them together. This is NOT secure against rogue public key attacks, so do not use this for signatures on the same message. """ q = default_ec.q agg_sig = (AffinePoint(Fq2.zero(q), Fq2.zero(q), True, default_ec).to_jacobian()) for sig in signatures: agg_sig += sig.value return Signature.from_g2(agg_sig)
def sign_prehashed(self, h): r = hash_to_point_prehashed_Fq2(h).to_jacobian() aggregation_info = AggregationInfo.from_msg_hash( self.get_public_key(), h) return Signature.from_g2(self.value * r, aggregation_info)
def sign(self, m): r = hash_to_point_Fq2(m).to_jacobian() aggregation_info = AggregationInfo.from_msg(self.get_public_key(), m) return Signature.from_g2(self.value * r, aggregation_info)
def aggregate_sigs(signatures): """ Aggregates many (aggregate) signatures, using a combination of simple and secure aggregation. Signatures are grouped based on which ones share common messages, and these are all merged securely. """ public_keys = [] # List of lists message_hashes = [] # List of lists for signature in signatures: if signature.aggregation_info.empty(): raise Exception( "Each signature must have a valid aggregation " + "info") public_keys.append(signature.aggregation_info.public_keys) message_hashes.append(signature.aggregation_info.message_hashes) # Find colliding vectors, save colliding messages messages_set = set() colliding_messages_set = set() for msg_vector in message_hashes: messages_set_local = set() for msg in msg_vector: if msg in messages_set and msg not in messages_set_local: colliding_messages_set.add(msg) messages_set.add(msg) messages_set_local.add(msg) if len(colliding_messages_set) == 0: # There are no colliding messages between the groups, so we # will just aggregate them all simply. Note that we assume # that every group is a valid aggregate signature. If an invalid # or insecure signature is given, and invalid signature will # be created. We don't verify for performance reasons. final_sig = BLS.aggregate_sigs_simple(signatures) aggregation_infos = [sig.aggregation_info for sig in signatures] final_agg_info = AggregationInfo.merge_infos(aggregation_infos) final_sig.set_aggregation_info(final_agg_info) return final_sig # There are groups that share messages, therefore we need # to use a secure form of aggregation. First we find which # groups collide, and securely aggregate these. Then, we # use simple aggregation at the end. colliding_sigs = [] non_colliding_sigs = [] colliding_message_hashes = [] # List of lists colliding_public_keys = [] # List of lists for i in range(len(signatures)): group_collides = False for msg in message_hashes[i]: if msg in colliding_messages_set: group_collides = True colliding_sigs.append(signatures[i]) colliding_message_hashes.append(message_hashes[i]) colliding_public_keys.append(public_keys[i]) break if not group_collides: non_colliding_sigs.append(signatures[i]) # Arrange all signatures, sorted by their aggregation info colliding_sigs.sort(key=lambda s: s.aggregation_info) # Arrange all public keys in sorted order, by (m, pk) sort_keys_sorted = [] for i in range(len(colliding_public_keys)): for j in range(len(colliding_public_keys[i])): sort_keys_sorted.append((colliding_message_hashes[i][j], colliding_public_keys[i][j])) sort_keys_sorted.sort() sorted_public_keys = [pk for (mh, pk) in sort_keys_sorted] computed_Ts = hash_pks(len(colliding_sigs), sorted_public_keys) # Raise each sig to a power of each t, # and multiply all together into agg_sig ec = sorted_public_keys[0].value.ec agg_sig = JacobianPoint(Fq2.one(ec.q), Fq2.one(ec.q), Fq2.zero(ec.q), True, ec) for i, signature in enumerate(colliding_sigs): agg_sig += signature.value * computed_Ts[i] for signature in non_colliding_sigs: agg_sig += signature.value final_sig = Signature.from_g2(agg_sig) aggregation_infos = [sig.aggregation_info for sig in signatures] final_agg_info = AggregationInfo.merge_infos(aggregation_infos) final_sig.set_aggregation_info(final_agg_info) return final_sig