def check_proof_combined(setup, public_key_serialized, custody_value, proof): D_serialized, E_serialized, Pi_serialized = proof D = blst.P1(D_serialized) E = blst.P2(E_serialized) Pi = blst.P1(Pi_serialized) r = int.from_bytes( hash(list(proof) + [public_key_serialized, custody_value]), "little") % MODULUS r2 = r * r % MODULUS public_key = blst.P1(public_key_serialized) b_values = [ C_CONSTANTS[i] * custody_value + D_CONSTANTS[i] for i in range(N) ] B = lincomb_naive(setup["g1_lagrange"], b_values) C = public_key.dup().add(B) pairing = blst.PT(blst.G2().mult(r).add(E).to_affine(), D.dup().neg().to_affine()) pairing.mul(blst.PT(E.to_affine(), blst.G1().mult(r).to_affine())) pairing.mul(blst.PT(blst.G2().to_affine(), C.to_affine())) pairing.mul(blst.PT(setup["g2_zero"].to_affine(), Pi.to_affine())) return pairing.final_exp().is_one()
def insert_verkle_node(root, key, value): """ Insert node without updating hashes/commitments (useful for building a full trie) """ current_node = root indices = iter(get_verkle_indices(key)) index = None while current_node["node_type"] == "inner": previous_node = current_node previous_index = index index = next(indices) if index in current_node: current_node = current_node[index] else: current_node[index] = { "node_type": "leaf", "key": key, "value": value } return if current_node["key"] == key: current_node["value"] = value else: previous_node[index] = { "node_type": "inner", "commitment": blst.G1().mult(0) } insert_verkle_node(root, key, value) insert_verkle_node(root, current_node["key"], current_node["value"])
def check_proof_simple(setup, public_key_serialized, custody_value, proof): D_serialized, E_serialized, Pi_serialized = proof D = blst.P1(D_serialized) E = blst.P2(E_serialized) Pi = blst.P1(Pi_serialized) public_key = blst.P1(public_key_serialized) b_values = [ C_CONSTANTS[i] * custody_value + D_CONSTANTS[i] for i in range(N) ] B = lincomb_naive(setup["g1_lagrange"], b_values) C = public_key.dup().add(B) pairing = blst.PT(blst.G2().to_affine(), D.to_affine()) pairing.mul(blst.PT(E.to_affine(), blst.G1().neg().to_affine())) if not pairing.final_exp().is_one(): return False pairing = blst.PT(E.to_affine(), D.dup().neg().to_affine()) pairing.mul(blst.PT(setup["g2_zero"].to_affine(), Pi.to_affine())) pairing.mul(blst.PT(blst.G2().to_affine(), C.to_affine())) if not pairing.final_exp().is_one(): return False return True
def generate_setup(size, secret): """ Generates a setup in the G1 group and G2 group, as well as the Lagrange polynomials in G1 (via FFT) """ g1_setup = [blst.G1().mult(pow(secret, i, MODULUS)) for i in range(size)] g2_setup = [blst.G2().mult(pow(secret, i, MODULUS)) for i in range(size)] g1_lagrange = fft(g1_setup, MODULUS, ROOT_OF_UNITY, inv=True) return {"g1": g1_setup, "g2": g2_setup, "g1_lagrange": g1_lagrange}
def lincomb_naive(group_elements, factors, start_value=blst.G1().mult(0)): """ Direct linear combination """ assert len(group_elements) == len(factors) result = start_value.dup() for g, f in zip(group_elements, factors): result.add(g.dup().mult(f)) return result
def check_kzg_proof(self, C, z, y, pi): """ Check the KZG proof e(C - [y], [1]) = e(pi, [s - z]) which is equivalent to e(C - [y], [1]) * e(-pi, [s - z]) == 1 """ pairing = blst.PT(blst.G2().to_affine(), C.dup().add(blst.G1().mult(y).neg()).to_affine()) pairing.mul(blst.PT(self.SETUP["g2"][1].dup().add(blst.G2().mult(z).neg()).to_affine(), pi.dup().neg().to_affine())) return pairing.final_exp().is_one()
def _fft(vals, modulus, roots_of_unity): if len(vals) == 1: return vals L = _fft(vals[::2], modulus, roots_of_unity[::2]) R = _fft(vals[1::2], modulus, roots_of_unity[::2]) if isinstance(vals[0], blst.P1): o = [blst.G1().mult(0) for i in vals] else: o = [0 for i in vals] for i, (x, y) in enumerate(zip(L, R)): y_times_root = y.dup().mult(roots_of_unity[i]) if isinstance( vals[0], blst.P1) else y * roots_of_unity[i] o[i] = x.dup().add(y_times_root) if isinstance( vals[0], blst.P1) else (x + y_times_root) % modulus o[i + len(L)] = x.dup().add(y_times_root.neg()) if isinstance( vals[0], blst.P1) else (x - y_times_root) % modulus return o
def generate_setup(N, secret): """ Generates a setup in the G1 group and G2 group, as well as the Lagrange polynomials in G1 (via FFT) """ g1_setup = [blst.G1().mult(pow(secret, i, MODULUS)) for i in range(N + 1)] g2_setup = [blst.G2().mult(pow(secret, i, MODULUS)) for i in range(N + 1)] lagrange_polys = primefield.lagrange_polys(list(range(N))) g1_lagrange = [lincomb_naive(g1_setup[:N], p) for p in lagrange_polys] g2_lagrange = [ lincomb_naive(g2_setup[:N], p, start_value=blst.G2().mult(0)) for p in lagrange_polys ] g2_zero = lincomb_naive(g2_setup, primefield.zero_poly(list(range(N))), start_value=blst.G2().mult(0)) g2_one = lincomb_naive(g2_lagrange, [1] * N, start_value=blst.G2().mult(0)) return { "g1": g1_setup, "g2": g2_setup, "g1_lagrange": g1_lagrange, "g2_zero": g2_zero, "g2_one": g2_one }
def get_proof_size(proof): return sum(len(x) for x in proof) if __name__ == "__main__": time_a = time() setup = generate_setup(N, 8927347823478352432985) time_b = time() print("Computed setup in {0:.3f} ms".format(1000 * (time_b - time_a)), file=sys.stderr) secret_key = pow(523487, 253478, MODULUS) + 1 public_key = blst.G1().mult(secret_key).compress() time_a = time() custody_value = 876354679 values_tried = 1 while not is_valid_custody_value(secret_key, custody_value): custody_value += 1 values_tried += 1 time_b = time() print("Found custody value in {0:.3f} ms after {1} tries".format( 1000 * (time_b - time_a), values_tried), file=sys.stderr) time_a = time() proof = compute_proof(setup, secret_key, custody_value)
WIDTH = 2**WIDTH_BITS ROOT_OF_UNITY = pow(PRIMITIVE_ROOT, (MODULUS - 1) // WIDTH, MODULUS) DOMAIN = [pow(ROOT_OF_UNITY, i, MODULUS) for i in range(WIDTH)] NUMBER_INITIAL_KEYS = int(sys.argv[2]) NUMBER_KEYS_PROOF = int(sys.argv[3]) NUMBER_DELETED_KEYS = 0 NUMBER_ADDED_KEYS = 0 SETUP = generate_setup(WIDTH, 8927347823478352432985) kzg_utils = KzgUtils(MODULUS, WIDTH, DOMAIN, SETUP, primefield) # Build a random verkle trie root = {"node_type": "inner", "commitment": blst.G1().mult(0)} values = {} for i in range(NUMBER_INITIAL_KEYS): key = randint(0, 2**256 - 1).to_bytes(32, "little") value = randint(0, 2**256 - 1).to_bytes(32, "little") insert_verkle_node(root, key, value) values[key] = value average_depth = get_average_depth(root) print("Inserted {0} elements for an average depth of {1:.3f}".format( NUMBER_INITIAL_KEYS, average_depth), file=sys.stderr)