def parallel_all_neighbors(algo, db, start, tag): cols = [start.copy()] hash_framework.attacks.collision.insert_db_multiple(algo, db, cols, tag + "-original") wq = [] for i in range(0, 48): wq.append((i)) jq = [] while len(wq) > 0: print(wq) i = wq.pop(0) m = hash_framework.models() m.start(tag+"-r" + str(i), False) hash_framework.models.vars.write_header() hash_framework.models.generate(algo, ['h1', 'h2']) hash_framework.attacks.collision.connected.loose.constraints_new_neighbor(algo, cols, i) hash_framework.attacks.collision.write_constraints(algo) hash_framework.attacks.collision.write_optional_differential(algo) hash_framework.models.vars.write_values(algo.default_state_bits, 'h1s', "01-h1-state.txt") hash_framework.models.vars.write_values(algo.default_state_bits, 'h2s', "01-h2-state.txt") hash_framework.models.vars.write_assign(['ccollision', 'cblocks', 'cdifferentials']) m.collapse() m.build() jqj = compute.perform_sat("problem.cnf", "problem.out", count=1, no_wait=True) jq.append((i, jqj)) while compute.assign_work() is None or (len(wq) == 0 and len(jq) > 0): print("Waiting for work...") fj = compute.wait_job_hosts(loop_until_found=True) fj_status = fj[0] fj_job = fj[1] for j in range(0, len(jq)): jqe = jq[j] jqr = jqe[0] jqj = jqe[1] if jqj[0] == fj_job[0]: print("Found finished job:") print((fj, jqe)) if fj_status: m = hash_framework.models() m.start(tag + "-r" + str(jqr), False) rs = m.results(algo) ncols = hash_framework.attacks.collision.build_col_rows(algo, db, rs, tag) cols.extend(ncols) wq.append(jqr) jq.remove(jq[j]) break hash_framework.attacks.collision.insert_db_multiple(algo, db, cols[1:], tag) return cols
def block_extender(w, rounds, margin): algo = hf.algorithms.sha3(w=w, rounds=rounds) tag = "keccak-attacks-fixed-point" m = hf.models() m.start(tag, recreate=True) hf.models.vars.write_header() hf.models.generate(algo, ['h1'], rounds=rounds, bypass=True) hf.models.vars.write_assign(['cfixed', 'cinput']) cinput = ['and'] for i in range(margin, 25 * w): cinput.append(('equal', 'h1in' + str(i), 'F')) cinput = tuple(cinput) hf.models.vars.write_clause('cinput', cinput, '50-input.txt') cfixed = ['and'] for i in range(margin, 25 * w): cfixed.append(('equal', 'h1in' + str(i), 'h1out' + str(i))) cfixed = tuple(cfixed) hf.models.vars.write_clause('cfixed', cfixed, '50-fixed.txt') m.collapse() m.build() m.run(count=10000000) rg = m.results_generator(algo, prefixes=["h1"]) count = 0 for r in rg: # print(r['h1in'], r['h1out']) count += 1 print(count)
def rtheta_inverse(w, s): algo = hf.algorithms.sha3(w=w, rounds=['t']) tag = "keccak-attacks-theta-inverse" m = hf.models() m.start(tag, recreate=True) hf.models.vars.write_header() hf.models.generate(algo, ['h1'], rounds=['t'], bypass=True) hf.models.vars.write_assign(['cinput', 'coutput']) #cinput = ['and'] #for i in range(0, 25*w): # cinput.append(('equal', 'h1in' + str(i), 'T')) #cinput = tuple(cinput) #hf.models.vars.write_range_clause('cinput', input_differences, input_differences, cinput, '50-input.txt') coutput = ['and'] for i in range(0, 25 * w): coutput.append(('equal', 'h1out' + str(i), s[i])) coutput = tuple(coutput) hf.models.vars.write_clause('coutput', coutput, '50-output.txt') m.collapse() m.build() m.run(count=10000) rg = m.results_generator(algo, prefixes=["h1"]) for r in rg: print(r['h1in']) return r['h1in'] return None
def run(): import hash_framework.algorithms._siphash as _sh import hash_framework as hf key_unknown_bits = 16 number_of_blocks = 4 block_len = 1 block_rounds = 1 final_rounds = 0 for fr in [0, 1, 2, 3]: for br in [1, 2]: for nb in range(1, 17): for kub in range(0, 65, 2): print((kub, nb, br, fr)) mdir = ("kub" + str(kub) + "-nb" + str(nb) + "-br" + str(br) + "-fr" + str(fr)) m = hf.models() m.start(mdir, False) base_dir = m.model_dir + "/" + mdir _sh.build_test_key_leakage( dir=base_dir, key_unknown_bits=kub, number_of_blocks=nb, block_rounds=br, final_rounds=fr, ) m.collapse() m.build() m.remote = False m.run(count=2) load_results(base_dir + "/problem.out", base_dir + "/problem.cnf")
def all_neighbors(algo, db, start, tag): cols = [start.copy()] hash_framework.attacks.collision.insert_db_multiple(algo, db, cols, tag + "-original") for i in range(0, 48): j = 0 while True: m = hash_framework.models() m.start(tag+"-r" + str(i), False) hash_framework.models.vars.write_header() if j == 0: hash_framework.models.generate(algo, ['h1', 'h2']) hash_framework.attacks.collision.connected.loose.constraints_new_neighbor(algo, cols, i) hash_framework.attacks.collision.write_constraints(algo) hash_framework.attacks.collision.write_optional_differential(algo) hash_framework.models.vars.write_values(algo.default_state_bits, 'h1s', "01-h1-state.txt") hash_framework.models.vars.write_values(algo.default_state_bits, 'h2s', "01-h2-state.txt") hash_framework.models.vars.write_assign(['ccollision', 'cblocks', 'cdifferentials']) m.collapse() m.build() sat = m.run(count=1) if not sat: break rs = m.results(algo) ncols = hash_framework.attacks.collision.build_col_rows(algo, db, rs, tag) cols.extend(ncols) hash_framework.attacks.collision.insert_db_multiple(algo, db, rs, tag) j += 1 #attacks.collision.insert_db_multiple(algo, db, cols[1:], tag) return cols
def exists_unit_step(algo, start, target, tag="md4-wangs-sasakis-connected"): pos = hash_framework.attacks.collision.metric.loose.delta(algo, start, target) r = [] i = 0 for e in pos: m = hash_framework.models() m.start(tag, False) hash_framework.models.vars.write_header() if i == 0: hash_framework.models.generate(algo, ["h1", "h2"]) i += 1 hash_framework.attacks.collision.connected.loose.constraints(algo, start, e) hash_framework.attacks.collision.write_constraints(algo) hash_framework.attacks.collision.write_optional_differential(algo) hash_framework.models.vars.write_values( algo.default_state_bits, "h1s", "01-h1-state.txt" ) hash_framework.models.vars.write_values( algo.default_state_bits, "h2s", "01-h2-state.txt" ) hash_framework.models.vars.write_assign( ["ccollision", "cblocks", "cdifferentials"] ) m.collapse() m.build() sat = m.run(count=1) if not sat: continue rs = m.results(algo) r.extend(rs) # attacks.collision.insert_db_multiple(algo, db, rs, tag) return r
def run(): import hash_framework.algorithms._siphash as _sh import hash_framework as hf fr = 1 br = 1 for nb in [32, 48, 64]: ob = 16 kub = 64 print((ob, kub, nb, br, fr)) mdir = ("ob" + str(ob) + "-kub" + str(kub) + "-nb" + str(nb) + "-br" + str(br) + "-fr" + str(fr)) m = hf.models() m.start(mdir, False) base_dir = m.model_dir + "/" + mdir _sh.build_test_key_leakage( dir=base_dir, key_unknown_bits=kub, number_of_blocks=nb, block_rounds=br, final_rounds=fr, output_bits=ob, ) m.collapse() m.build() m.remote = False m.run(count=2) load_results(base_dir + "/problem.out", base_dir + "/problem.cnf")
def __main__(): for w in [2]: for rounds in range(1, 2): factor = 64 // w output_margin = 512 // factor input_margin = 25 * w - 2 * output_margin algo = hf.algorithms.lookup("sha3")(w=w, rounds=rounds) m = hf.models() m.remote = False m.start( "scratch-sha3-b" + str(25 * w) + "-r" + str(rounds) + "-collision", False, ) n_inputs = set() count = 0 double_expansions = 0 subset_expansions = 0 not_expansion = 0 f = open( "/home/cipherboy/GitHub/hash_framework/scratch/collisions-w" + str(w) + "-r" + str(rounds) + ".txt", "w", ) print(f) f.flush() for rs in m.results_generator(algo): obj = {} obj["w"] = w obj["rounds"] = rounds obj["h1input"] = rs["h1i"] obj["h2input"] = rs["h2i"] obj["dinput"] = hf.models.vars.compute_ddelta( rs["h1i"], rs["h2i"]) obj["rinput"] = hf.models.vars.compute_ddelta( rs["h1i"], rs["h2i"]) for r in range(0, rounds): obj["h1r" + str(r)] = rs["h1r" + str(r) + "i"] obj["h2r" + str(r)] = rs["h2r" + str(r) + "i"] obj["dr" + str(r)] = hf.models.vars.compute_ddelta( rs["h1r" + str(r) + "i"], rs["h2r" + str(r) + "i"]) obj["rr" + str(r)] = hf.models.vars.compute_ddelta( rs["h1r" + str(r) + "i"], rs["h2r" + str(r) + "i"]) f.write(str(obj) + "\n") f.flush() f.close()
def __main__(): m = hf.models() m.remote = False m.start("scratch-sha3-b" + str(w * 25) + "-r" + str(rounds) + "-collision", True) m.cms_args = [""] hf.models.vars.write_header() output_margin = 512 // factor input_margin = 25 * w - 2 * output_margin algo = hf.algorithms.lookup("sha3")(w=w, rounds=rounds) algo.generate() ccollision = ["and"] for i in range(0, output_margin): ccollision.append(("equal", "h1out" + str(i), "h2out" + str(i))) ccollision = tuple(ccollision) hf.models.vars.write_clause("ccollision", ccollision, "50-collision.txt") cinput = ["or"] for i in range(0, input_margin): cinput.append(("not", ("equal", "h1in" + str(i), "h2in" + str(i)))) cinput = tuple(cinput) hf.models.vars.write_clause("cinput", cinput, "52-input.txt") czero = ["and"] for i in range(input_margin, 25 * w): czero.append(("equal", "h1in" + str(i), "F")) czero.append(("equal", "h2in" + str(i), "F")) czero = tuple(czero) hf.models.vars.write_clause("czero", czero, "54-zero.txt") hf.models.vars.write_assign(["ccollision", "cinput", "czero"]) m.collapse() m.build() r = m.run(count=count, random=random.randint(2, 1000000000)) for rs in m.results_generator(algo): obj = {} obj["w"] = w obj["rounds"] = rounds obj["input"] = hf.models.vars.compute_ddelta(rs["h1i"], rs["h2i"])[ 0:input_margin ] for r in range(0, rounds): obj["r" + str(r)] = hf.models.vars.compute_ddelta( rs["h1r" + str(r) + "i"], rs["h2r" + str(r) + "i"] ) print(obj)
def __main__(): m = hf.models() m.remote = False m.start("scratch-sha3-b" + str(w * 25) + "-r" + str(rounds) + "-collision", True) m.cms_args = [''] hf.models.vars.write_header() output_margin = 512 // factor input_margin = (25 * w - 2 * output_margin) algo = hf.algorithms.lookup('sha3')(w=w, rounds=rounds) algo.generate() ccollision = ['and'] for i in range(0, output_margin): ccollision.append(('equal', 'h1out' + str(i), 'h2out' + str(i))) ccollision = tuple(ccollision) hf.models.vars.write_clause('ccollision', ccollision, '50-collision.txt') cinput = ['or'] for i in range(0, input_margin): cinput.append(('not', ('equal', 'h1in' + str(i), 'h2in' + str(i)))) cinput = tuple(cinput) hf.models.vars.write_clause('cinput', cinput, '52-input.txt') czero = ['and'] for i in range(input_margin, 25 * w): czero.append(('equal', 'h1in' + str(i), 'F')) czero.append(('equal', 'h2in' + str(i), 'F')) czero = tuple(czero) hf.models.vars.write_clause('czero', czero, '54-zero.txt') hf.models.vars.write_assign(['ccollision', 'cinput', 'czero']) m.collapse() m.build() r = m.run(count=count, random=random.randint(2, 1000000000)) for rs in m.results_generator(algo): obj = {} obj['w'] = w obj['rounds'] = rounds obj['input'] = hf.models.vars.compute_ddelta(rs['h1i'], rs['h2i'])[0:input_margin] for r in range(0, rounds): obj['r' + str(r)] = hf.models.vars.compute_ddelta( rs['h1r' + str(r) + 'i'], rs['h2r' + str(r) + 'i']) print(obj)
def __main__(): for w in [2]: for rounds in range(1, 2): factor = 64 // w output_margin = 512 // factor input_margin = (25 * w - 2 * output_margin) algo = hf.algorithms.lookup('sha3')(w=w, rounds=rounds) m = hf.models() m.remote = False m.start( "scratch-sha3-b" + str(25 * w) + "-r" + str(rounds) + "-collision", False) n_inputs = set() count = 0 double_expansions = 0 subset_expansions = 0 not_expansion = 0 f = open( '/home/cipherboy/GitHub/hash_framework/scratch/collisions-w' + str(w) + '-r' + str(rounds) + ".txt", 'w') print(f) f.flush() for rs in m.results_generator(algo): obj = {} obj['w'] = w obj['rounds'] = rounds obj['h1input'] = rs['h1i'] obj['h2input'] = rs['h2i'] obj['dinput'] = hf.models.vars.compute_ddelta( rs['h1i'], rs['h2i']) obj['rinput'] = hf.models.vars.compute_ddelta( rs['h1i'], rs['h2i']) for r in range(0, rounds): obj['h1r' + str(r)] = rs['h1r' + str(r) + 'i'] obj['h2r' + str(r)] = rs['h2r' + str(r) + 'i'] obj['dr' + str(r)] = hf.models.vars.compute_ddelta( rs['h1r' + str(r) + 'i'], rs['h2r' + str(r) + 'i']) obj['rr' + str(r)] = hf.models.vars.compute_ddelta( rs['h1r' + str(r) + 'i'], rs['h2r' + str(r) + 'i']) f.write(str(obj) + "\n") f.flush() f.close()
def setup(self): hf.config.default_adder = self.adder_cfg m = hf.models() m.remote = False m.start("benchmark-adders-associativity-" + str(self.bits), True) m.bc_args = self.bc_args hf.models.vars.write_header() a = ['a' + str(i) for i in range(0, self.bits)] b = ['b' + str(i) for i in range(0, self.bits)] c = ['c' + str(i) for i in range(0, self.bits)] d = ['d' + str(i) for i in range(0, self.bits)] print(a) lo1, _ = hf.boolean.b_addl('lo1', a, b) lo2, _ = hf.boolean.b_addl('lo2', lo1, c) lo3, _ = hf.boolean.b_addl('lo3', lo2, d) ro1, _ = hf.boolean.b_addl('ro1', c, d) ro2, _ = hf.boolean.b_addl('ro2', b, ro1) ro3, _ = hf.boolean.b_addl('ro3', a, ro2) to1, _ = hf.boolean.b_addl('to1', a, b) to2, _ = hf.boolean.b_addl('to2', c, d) to3, _ = hf.boolean.b_addl('to3', to1, to2) hf.models.vars.write_dedupe(name="01-dedupe.txt") equality = ['and'] for i in range(0, self.bits): equality.append(('equal', lo3[i], ro3[i])) equality.append(('equal', lo3[i], to3[i])) equality.append(('equal', ro3[i], to3[i])) equality = tuple(equality) hf.models.vars.write_clause('equality', ('not', equality), '98-problem.txt') hf.models.vars.write_assign(['equality']) m.collapse() m.build() self.m = m self.lo3 = lo3 self.ro3 = ro3 self.to3 = to3
def setup(self): hf.config.default_adder = self.adder_cfg m = hf.models() m.remote = False m.start("benchmark-adders-associativity-" + str(self.bits), True) m.bc_args = self.bc_args hf.models.vars.write_header() a = ["a" + str(i) for i in range(0, self.bits)] b = ["b" + str(i) for i in range(0, self.bits)] c = ["c" + str(i) for i in range(0, self.bits)] d = ["d" + str(i) for i in range(0, self.bits)] print(a) lo1, _ = hf.boolean.b_addl("lo1", a, b) lo2, _ = hf.boolean.b_addl("lo2", lo1, c) lo3, _ = hf.boolean.b_addl("lo3", lo2, d) ro1, _ = hf.boolean.b_addl("ro1", c, d) ro2, _ = hf.boolean.b_addl("ro2", b, ro1) ro3, _ = hf.boolean.b_addl("ro3", a, ro2) to1, _ = hf.boolean.b_addl("to1", a, b) to2, _ = hf.boolean.b_addl("to2", c, d) to3, _ = hf.boolean.b_addl("to3", to1, to2) hf.models.vars.write_dedupe(name="01-dedupe.txt") equality = ["and"] for i in range(0, self.bits): equality.append(("equal", lo3[i], ro3[i])) equality.append(("equal", lo3[i], to3[i])) equality.append(("equal", ro3[i], to3[i])) equality = tuple(equality) hf.models.vars.write_clause("equality", ("not", equality), "98-problem.txt") hf.models.vars.write_assign(["equality"]) m.collapse() m.build() self.m = m self.lo3 = lo3 self.ro3 = ro3 self.to3 = to3
def prove_identity_theta(): """ 1 - 3 2 - 6 4 - 12 8 - 24 16 - 48 32 - 96 64 - 192 """ for w in [1, 2, 4, 8, 16, 32, 64]: count = w * 3 rounds = ['t'] * count algo = hf.algorithms.sha3(w=w, rounds=rounds) tag = "keccak-attacks-chi4-identity-theta-w" + str(w) m = hf.models() m.bc_args = [] m.start(tag, recreate=True) hf.models.vars.write_header() hf.models.generate(algo, ['h1'], rounds=rounds, bypass=True) hf.models.vars.write_assign(['cidentity']) cidentity = ['and'] for i in range(0, 25 * w): cidentity.append(('equal', 'h1in' + str(i), 'h1out' + str(i))) cidentity = ('not', tuple(cidentity)) hf.models.vars.write_clause('cidentity', cidentity, '01-problem.txt') m.collapse() m.build() res = m.run(count=1) if res: rg = m.results_generator(algo, prefixes=["h1"]) count = 0 for r in rg: print(r['h1in'], r['h1out']) assert (False)
def __main__(): r = 20 algo = hf.algorithms.md4() algo.rounds = r input_block = ["F"] * 512 output = {} output["oaa"] = "F" * 32 output["obb"] = "F" * 32 output["occ"] = "F" * 32 output["odd"] = "F" * 32 m = hf.models() m.start("md4-inverse-r" + str(r), False) hf.models.vars.write_header() hf.models.generate(algo, ["h"], rounds=r, bypass=True) hf.models.vars.write_values(input_block, "hb", "08-block.txt") output_constraints(output) hf.models.vars.write_assign(["coutput"])
def parallel_find_path(algo, db, start, target, tag): cols = [start.copy()] min_distance = hash_framework.attacks.collision.metric.loose.distance( algo, start, target ) hash_framework.attacks.collision.insert_db_multiple( algo, db, cols, tag + "-original" ) wqs = {} wqs[0] = [] wcols = {} wcols[0] = [start] wdist = {} wdist[0] = [min_distance] for i in range(0, 48): wqs[0].append((i)) working_cols = [0] jq = [] while len(working_cols) > 0 and min_distance > 0: print("min_distance: " + str(min_distance)) print("working_cols: " + str(working_cols)) wc = working_cols.pop(0) print("wc: " + str(wc)) print("wqs[wc]: " + str(wqs[wc])) i = wqs[wc].pop(0) print("i: " + str(i)) if len(wqs[wc]) > 0: working_cols.append(wc) found = False for j in range(0, len(jq)): if jq[j][0] == wc and jq[j][1] == i: found = True break if found: wqs[wc].append(i) if wc not in working_cols: working_cols.append(wc) continue m = hash_framework.models() m.start(tag + "-c" + str(wc) + "-r" + str(i), False) hash_framework.models.vars.write_header() hash_framework.models.generate(algo, ["h1", "h2"]) hash_framework.attacks.collision.connected.loose.constraints_new_neighbor( algo, wcols[wc], i ) hash_framework.attacks.collision.write_constraints(algo) hash_framework.attacks.collision.write_optional_differential(algo) hash_framework.models.vars.write_assign( ["ccollision", "cblocks", "cdifferentials"] ) m.collapse() m.build() jqj = compute.perform_sat( "problem.cnf", "problem.out", count=1, no_wait=True, ident=(wc, i) ) jq.append((wc, i, jqj)) while compute.assign_work() is None or (len(working_cols) == 0 and len(jq) > 0): print("Waiting for work...") fj = compute.wait_job_hosts(loop_until_found=True) fj_status = fj[0] fj_job = fj[1] fj_ident = fj_job[6] for j in range(0, len(jq)): jqe = jq[j] jqwc = jqe[0] jqi = jqe[1] jqj = jqe[2] if fj_ident[0] == jqwc and fj_ident[1] == jqi: print("Found finished job:") print((fj, jqe)) if fj_status: m = hash_framework.models() m.start(tag + "-c" + str(jqwc) + "-r" + str(jqi), False) rs = m.results(algo) ncols = hash_framework.attacks.collision.build_col_rows( algo, db, rs, tag ) if len(ncols) < 1: continue ncol = ncols[0] hash_framework.attacks.collision.insert_db_multiple( algo, db, ncols, tag ) # Check if min_distance needs updating ndist = hash_framework.attacks.collision.metric.loose.distance( algo, ncol, target ) # Only add if not already in cols nci = -1 dte = list( map( lambda x: hash_framework.attacks.collision.metric.loose.distance( algo, ncol, x ), cols, ) ) if 0 not in dte: nci = len(cols) cols.append(ncol) wcols[nci] = [] wdist[nci] = [] wqs[nci] = list(range(0, 48)) wcols[nci].append(ncol) wdist[nci].append(ndist) else: nci = dte.index(0) if jqwc not in working_cols: working_cols.append(jqwc) # Add to every other class of distance 1 for i in range(0, len(dte)): if dte[i] == 1: wcols[i].append(ncol) wdist[i].append(ndist) dr = hash_framework.attacks.collision.metric.loose.delta( algo, cols[i], cols[nci] ) if dr[0][0] not in wqs[i]: wqs[i].append(dr[0][0]) if ( i not in working_cols and wdist[i][0] <= min_distance ): working_cols.append(i) wcols[nci].append(cols[i]) wdist[nci].append(wdist[i][0]) if dr[0][0] not in wqs[nci]: wqs[nci].append(dr[0][0]) # Check if min_dist needs updating # If so, add it to wq if ndist <= min_distance: print( "Updating min_distance: " + str(min_distance) + " - " + str(ndist) ) min_distance = ndist working_cols.append(nci) offset = 0 working_cols = [] while len(working_cols) == 0 and offset < 4: for i in range(0, len(cols)): if ( hash_framework.attacks.collision.metric.loose.distance( algo, target, cols[i] ) <= (min_distance + offset) and len(wqs[i]) > 0 ): working_cols.append(i) offset += 1 jq.remove(jq[j]) break hash_framework.attacks.collision.insert_db_multiple(algo, db, cols, tag) return cols
def setup(self): hf.config.default_adder = self.adder_cfg m = hf.models() m.remote = False m.start("benchmark-adders-equality-" + str(self.bits), True) m.bc_args = self.bc_args hf.models.vars.write_header() la = ['la' + str(i) for i in range(0, self.bits)] lb = ['lb' + str(i) for i in range(0, self.bits)] lc = ['lc' + str(i) for i in range(0, self.bits)] ld = ['ld' + str(i) for i in range(0, self.bits)] ra = ['ra' + str(i) for i in range(0, self.bits)] rb = ['rb' + str(i) for i in range(0, self.bits)] rc = ['rc' + str(i) for i in range(0, self.bits)] rd = ['rd' + str(i) for i in range(0, self.bits)] lo1 = None lo2 = None lo3 = None ro1 = None ro2 = None ro3 = None if self.shape == 'tree': lo1, _ = hf.boolean.b_addl('lo1', la, lb) lo2, _ = hf.boolean.b_addl('lo2', lc, ld) lo3, _ = hf.boolean.b_addl('lo3', lo1, lo2) ro1, _ = hf.boolean.b_addl('ro1', ra, rb) ro2, _ = hf.boolean.b_addl('ro2', rc, rd) ro3, _ = hf.boolean.b_addl('ro3', ro1, ro2) elif self.shape == 'left': lo1, _ = hf.boolean.b_addl('lo1', la, lb) lo2, _ = hf.boolean.b_addl('lo2', lo1, lc) lo3, _ = hf.boolean.b_addl('lo3', lo2, ld) ro1, _ = hf.boolean.b_addl('ro1', ra, rb) ro2, _ = hf.boolean.b_addl('ro2', ro1, rc) ro3, _ = hf.boolean.b_addl('ro3', ro2, rd) elif self.shape == 'right': lo1, _ = hf.boolean.b_addl('lo1', lc, ld) lo2, _ = hf.boolean.b_addl('lo2', lb, lo1) lo3, _ = hf.boolean.b_addl('lo3', la, lo2) ro1, _ = hf.boolean.b_addl('ro1', rc, rd) ro2, _ = hf.boolean.b_addl('ro2', rb, ro1) ro3, _ = hf.boolean.b_addl('ro3', ra, ro2) hf.models.vars.write_dedupe(name="01-dedupe.txt") equality = ['and'] for i in range(0, self.bits): equality.append(('equal', lo3[i], ro3[i])) equality = tuple(equality) hf.models.vars.write_clause('equality', equality, '50-equality.txt') diff_input = ['or'] for i in range(0, self.bits): diff_input.append(('not', ('equal', la[i], ra[i]))) diff_input.append(('not', ('equal', lb[i], rb[i]))) diff_input.append(('not', ('equal', lc[i], rc[i]))) diff_input.append(('not', ('equal', ld[i], rd[i]))) diff_input = tuple(diff_input) hf.models.vars.write_clause('input', diff_input, '60-input.txt') hf.models.vars.write_assign(['equality', 'input']) m.collapse() m.build() self.m = m self.lo3 = lo3 self.ro3 = ro3
import rca import cla import hash_framework import time m = hash_framework.models() m.model_dir = '.' m.start('models', False) m.remote = False hash_framework.models.vars.write_header() bit_size = 1024 rcaa = [] rcab = [] claa = [] clab = [] for i in range(0, bit_size): rcaa.append('rcaa' + str(i)) rcab.append('rcab' + str(i)) claa.append('claa' + str(i)) clab.append('clab' + str(i)) rca.__main__(rcaa, rcab, "rca") cla.__main__(claa, clab, "cla") f = open('01-inputs.txt', 'w') s = "AND(" for i in range(0, bit_size): s += "rcaa" + str(i) + " == claa" + str(i) + ","
def sha3_xof_recreate(w, r, e): margin = e * w // 64 algo = hf.algorithms.sha3(w=w, rounds=r) tag = "sha3-xof_tree-w" + str(w) + "-r" + str(r) + '-e' + str(e) r_tree = {} for elm in itertools.product('TF', repeat=4 * margin): m = hf.models() m.start(tag, recreate=True) hf.models.vars.write_header() hf.models.generate(algo, ['h1', 'h2', 'h3'], rounds=r, bypass=True) hf.models.vars.write_assign(['cassign', 'cchain']) cassign = ['and'] for j in range(0, margin): cassign.append(('equal', 'h1in' + str(j), elm[j])) for j in range(0, margin): cassign.append(('equal', 'h1out' + str(j), elm[j + margin])) for j in range(0, margin): cassign.append(('equal', 'h2out' + str(j), elm[j + 2 * margin])) for j in range(0, margin): cassign.append(('equal', 'h3out' + str(j), elm[j + 3 * margin])) cassign = tuple(cassign) hf.models.vars.write_clause('cassign', cassign, '10-assign.txt') cchain = ['and'] for j in range(0, 25 * w): cchain.append(('equal', 'h1out' + str(j), 'h2in' + str(j))) cchain.append(('equal', 'h2out' + str(j), 'h3in' + str(j))) cchain = tuple(cchain) hf.models.vars.write_clause('cchain', cchain, '15-chain.txt') m.collapse() m.build() t1 = time.time() res = m.run(count=1) t2 = (time.time() - t1) print("Run time: " + str(t2)) if res: result = m.load_results()[0] for j in range(0, margin): assert (result['h1in' + str(j)] == elm[j]) for j in range(0, margin): assert (result['h1out' + str(j)] == elm[j + margin]) for j in range(0, margin): assert (result['h2out' + str(j)] == elm[j + 2 * margin]) for j in range(0, margin): assert (result['h3out' + str(j)] == elm[j + 3 * margin]) el = len(elm) pre = ''.join(elm[:margin]) preleaf = ''.join(elm[margin:2 * margin]) postleaf = ''.join(elm[2 * margin:3 * margin]) leaf = ''.join(elm[2 * margin:]) if not pre in r_tree: r_tree[pre] = {} if not preleaf in r_tree[pre]: r_tree[pre][preleaf] = {} if not postleaf in r_tree[pre][preleaf]: r_tree[pre][preleaf][postleaf] = [] r_tree[pre][preleaf][postleaf].append(leaf) print(r_tree) #r_tree[pre][preleaf].append(leaf) f = open('/tmp/xof-tree-w' + str(w) + '-r' + str(w) + '.json', 'w') json.dump(r_tree, f)
def search_sat(w): Ap = theta(w) Apf = flatten(Ap, w) Cp = [] Rp = [] for i in range(0, 5): Cp.append([None] * w) Rp.append([None] * w) for x in range(0, 5): for z in range(0, w): Cpi = [] Rpi = None for y in range(0, 5): Cpi.append((x, y, z)) if Rpi == None: Rpi = Ap[x][y][z] else: Rpi = Rpi.intersection(Ap[x][y][z]) Cp[x][z] = d_cxor(Apf, Cpi) Rp[x][z] = Rpi MCp = [None] * (5 * w) MRp = [None] * (5 * w) for rx in range(0, 5): for rz in range(0, w): p = sha3ri(w, rx, rz) Cs = ['F'] * (5 * w) Rs = ['F'] * (5 * w) for i in sorted( list( set( map(lambda y: sha3i(w, y[0], y[1], y[2]) % (5 * w), Rp[rx][rz])))): Rs[i] = 'T' for i in sorted( list( set( map(lambda y: sha3i(w, y[0], y[1], y[2]) % (5 * w), Cp[rx][rz])))): Cs[i] = 'T' MCp[p] = ''.join(Cs) MRp[p] = ''.join(Rs) print(rx, rz, rx * w + rz, MRp[p], m_itop(w, Rp[rx][rz])) print(rx, rz, rx * w + rz, MCp[p], m_itop(w, Cp[rx][rz])) results = {} for p in range(0, 5 * w): tag = "keccak-attacks-theta-inverse-w" + str(w) + "-p" + str(p) m = hf.models() m.start(tag, recreate=True) hf.models.vars.write_header() hf.models.vars.write_assign(['cresult']) cresult = ['and'] for j in range(0, len(MRp[p])): oc = MRp[p][j] xor = ['xor'] for i in range(0, 5 * w): var = 'a' + str(i) value = MCp[i][j] xor.append(('and', var, value)) xor = tuple(xor) cresult.append(('equal', oc, xor)) cresult = tuple(cresult) hf.models.vars.write_clause('cresult', cresult, '01-problem.txt') m.collapse() m.build() m.run(count=1) rg = m.load_results_generator() for r in rg: indices = [] for i in range(0, 5 * w): if r['a' + str(i)] == 'T': indices.append(i) print(p, len(indices)) results[p] = list(indices) print(results) return results
def __main__(): in_objs = [] in_obj = in_objs[0] ow = in_obj["w"] w = ow * 2 factor = 64 // w rounds = in_obj["rounds"] m = hf.models() m.remote = False m.start("scratch-sha3-b" + str(w * 25) + "-collision", True) m.cms_args = [""] hf.models.vars.write_header() output_margin = 512 // factor input_margin = 25 * w - 2 * output_margin algo = hf.algorithms.lookup("sha3")(w=w, rounds=rounds) algo.generate() ccollision = ["and"] for i in range(0, output_margin): ccollision.append(("equal", "h1out" + str(i), "h2out" + str(i))) ccollision = tuple(ccollision) hf.models.vars.write_clause("ccollision", ccollision, "50-collision.txt") cinput = ["or"] for i in range(0, input_margin): cinput.append(("not", ("equal", "h1in" + str(i), "h2in" + str(i)))) cinput = tuple(cinput) hf.models.vars.write_clause("cinput", cinput, "52-input.txt") czero = ["and"] for i in range(input_margin, 25 * w): czero.append(("equal", "h1in" + str(i), "F")) czero.append(("equal", "h2in" + str(i), "F")) czero = tuple(czero) hf.models.vars.write_clause("czero", czero, "54-zero.txt") cexpand = ["and"] for i in range(0, rounds - 1): k = "r" + str(i) if not k in in_obj: continue for i in range(0, len(in_obj[k]), ow): delta = in_obj[k][i : i + ow] pos = i * 2 print(pos) cexpand.append( hf.models.vars.differential( delta, "h1" + k + "i", pos, "h2" + k + "i", pos ) ) cexpand = tuple(cexpand) hf.models.vars.write_clause("cexpand", cexpand, "56-expand.txt") ceinput = ["and"] for i in range(0, len(in_obj["input"]), ow): delta = in_obj["input"][i : i + ow] pos = i * 2 ceinput.append(hf.models.vars.differential(delta, "h1in", pos, "h2in", pos)) ceinput = tuple(ceinput) # print(cexpand) hf.models.vars.write_clause("ceinput", ceinput, "58-einput.txt") hf.models.vars.write_assign(["ccollision", "cinput", "czero", "cexpand", "ceinput"]) m.collapse() m.build() r = m.run(count=1) if r: for rs in m.results_generator(algo): obj = {} obj["w"] = w obj["rounds"] = rounds obj["input"] = hf.models.vars.compute_ddelta(rs["h1i"], rs["h2i"])[ 0:input_margin ] for r in range(0, rounds): obj["r" + str(r)] = hf.models.vars.compute_ddelta( rs["h1r" + str(r) + "i"], rs["h2r" + str(r) + "i"] ) print(obj)
algo = hf.algorithms.sha3(w=w, rounds=rounds) db_path = hf.config.results_dir + "/worker_results.db" db = hf.database(path=db_path) sat = set() for in_margin in range(input_error, 25 * w): min_v = 0 max_v = 25 * w out_margin = (min_v + max_v) // 2 searched = set() while min_v < max_v: m = hf.models() m.remote = False mn = ("sha3-margin-w" + str(w) + "-ie" + str(input_error) + "-im" + str(in_margin) + "-om" + str(out_margin)) m.start(mn, False) hf.models.vars.write_header() hf.models.generate(algo, ["h1", "h2"], bypass=True) hf.models.vars.write_assign(["cstart", "cinput", "coutput"]) tail = "F" * (algo.state_size - in_margin) cstart = hf.models.vars.differential(tail, "h1i", in_margin, "h2i", in_margin) hf.models.vars.write_clause("cstart", cstart, "50-start.txt") tail = "*" * in_margin cinput = hf.models.vars.differential(tail, "h1i", 0, "h2i", 0)
def setup(self): hf.config.default_adder = self.adder_cfg m = hf.models() m.remote = False m.start("benchmark-adders-equality-" + str(self.bits), True) m.bc_args = self.bc_args hf.models.vars.write_header() la = ["la" + str(i) for i in range(0, self.bits)] lb = ["lb" + str(i) for i in range(0, self.bits)] lc = ["lc" + str(i) for i in range(0, self.bits)] ld = ["ld" + str(i) for i in range(0, self.bits)] ra = ["ra" + str(i) for i in range(0, self.bits)] rb = ["rb" + str(i) for i in range(0, self.bits)] rc = ["rc" + str(i) for i in range(0, self.bits)] rd = ["rd" + str(i) for i in range(0, self.bits)] lo1 = None lo2 = None lo3 = None ro1 = None ro2 = None ro3 = None if self.shape == "tree": lo1, _ = hf.boolean.b_addl("lo1", la, lb) lo2, _ = hf.boolean.b_addl("lo2", lc, ld) lo3, _ = hf.boolean.b_addl("lo3", lo1, lo2) ro1, _ = hf.boolean.b_addl("ro1", ra, rb) ro2, _ = hf.boolean.b_addl("ro2", rc, rd) ro3, _ = hf.boolean.b_addl("ro3", ro1, ro2) elif self.shape == "left": lo1, _ = hf.boolean.b_addl("lo1", la, lb) lo2, _ = hf.boolean.b_addl("lo2", lo1, lc) lo3, _ = hf.boolean.b_addl("lo3", lo2, ld) ro1, _ = hf.boolean.b_addl("ro1", ra, rb) ro2, _ = hf.boolean.b_addl("ro2", ro1, rc) ro3, _ = hf.boolean.b_addl("ro3", ro2, rd) elif self.shape == "right": lo1, _ = hf.boolean.b_addl("lo1", lc, ld) lo2, _ = hf.boolean.b_addl("lo2", lb, lo1) lo3, _ = hf.boolean.b_addl("lo3", la, lo2) ro1, _ = hf.boolean.b_addl("ro1", rc, rd) ro2, _ = hf.boolean.b_addl("ro2", rb, ro1) ro3, _ = hf.boolean.b_addl("ro3", ra, ro2) hf.models.vars.write_dedupe(name="01-dedupe.txt") equality = ["and"] for i in range(0, self.bits): equality.append(("equal", lo3[i], ro3[i])) equality = tuple(equality) hf.models.vars.write_clause("equality", equality, "50-equality.txt") diff_input = ["or"] for i in range(0, self.bits): diff_input.append(("not", ("equal", la[i], ra[i]))) diff_input.append(("not", ("equal", lb[i], rb[i]))) diff_input.append(("not", ("equal", lc[i], rc[i]))) diff_input.append(("not", ("equal", ld[i], rd[i]))) diff_input = tuple(diff_input) hf.models.vars.write_clause("input", diff_input, "60-input.txt") hf.models.vars.write_assign(["equality", "input"]) m.collapse() m.build() self.m = m self.lo3 = lo3 self.ro3 = ro3
def parallel_find_path_hybrid(algo, db, start, target, tag): cols = [start.copy()] min_distance = hash_framework.attacks.collision.metric.loose.distance( algo, start, target ) hash_framework.attacks.collision.insert_db_multiple( algo, db, cols, tag + "-original" ) wqs = {} wqs[0] = [-1] + list(range(0, 48)) wcols = {} wcols[0] = [cols[0]] wdist = {} wdist[0] = [min_distance] working_cols = [] unit_cols = [0] new_found = 0 repeat_found = 0 jq = [] while (len(working_cols) > 0 or len(unit_cols) > 0) and min_distance > 1: print("min_distance: " + str(min_distance)) print("discovered vertices: " + str(len(cols))) print("unit_cols: " + str(unit_cols)) print("working_cols: " + str(working_cols)) print("job_queue: " + str(list(map(lambda x: (x[0], x[1]), jq)))) print("ratio: " + str(new_found) + " : " + str(repeat_found)) override = False if len(unit_cols) > 0: wc = unit_cols.pop() i = -1 found = False for j in range(0, len(jq)): if jq[j][0] == wc and jq[j][1] == i: found = True break if found: override = True print(len(wcols[wc])) m = hash_framework.models() m.start(tag + "-c" + str(wc) + "-r" + str(i), False) hash_framework.models.vars.write_header() hash_framework.models.generate(algo, ["h1", "h2"]) hash_framework.attacks.collision.connected.loose.new_unit_step( algo, wcols[wc][0], wcols[wc], target ) hash_framework.attacks.collision.write_constraints(algo) hash_framework.attacks.collision.write_optional_differential(algo) hash_framework.models.vars.write_assign( ["ccollision", "cblocks", "cdifferentials"] ) m.collapse() m.build() jqj = compute.perform_sat( "problem.cnf", "problem.out", count=1, no_wait=True, ident=(wc, i) ) jq.append((wc, i, jqj)) wqs[wc].remove(-1) else: wc = working_cols.pop(0) print("wc: " + str(wc)) print("wqs[wc]: " + str(wqs[wc])) i = wqs[wc].pop(0) print("i: " + str(i)) if len(wqs[wc]) > 0: working_cols.append(wc) found = False for j in range(0, len(jq)): if jq[j][0] == wc and jq[j][1] == i: found = True break if found: if wc not in working_cols and len(wqs[wc]) > 0: working_cols.append(wc) override = True else: m = hash_framework.models() m.start(tag + "-c" + str(wc) + "-r" + str(i), False) hash_framework.models.vars.write_header() hash_framework.models.generate(algo, ["h1", "h2"]) hash_framework.attacks.collision.connected.loose.constraints_new_neighbor( algo, wcols[wc], i ) hash_framework.attacks.collision.write_constraints(algo) hash_framework.attacks.collision.write_optional_differential(algo) hash_framework.models.vars.write_assign( ["ccollision", "cblocks", "cdifferentials"] ) m.collapse() m.build() jqj = compute.perform_sat( "problem.cnf", "problem.out", count=1, no_wait=True, ident=(wc, i) ) jq.append((wc, i, jqj)) while ( override or compute.assign_work() is None or (len(working_cols) == 0 and len(unit_cols) == 0 and len(jq) > 0) ): override = False print("Waiting for work...") fj = compute.wait_job_hosts(loop_until_found=True) fj_status = fj[0] fj_job = fj[1] fj_ident = fj_job[6] for j in range(0, len(jq)): jqe = jq[j] jqwc = jqe[0] jqi = jqe[1] jqj = jqe[2] if fj_ident[0] == jqwc and fj_ident[1] == jqi: print("Found finished job:") print((fj, jqe)) if fj_status: m = hash_framework.models() m.start(tag + "-c" + str(jqwc) + "-r" + str(jqi), False) rs = m.results(algo) ncols = hash_framework.attacks.collision.build_col_rows( algo, db, rs, tag ) if len(ncols) < 1: continue ncol = ncols[0] hash_framework.attacks.collision.insert_db_multiple( algo, db, ncols, tag ) # Check if min_distance needs updating ndist = hash_framework.attacks.collision.metric.loose.distance( algo, ncol, target ) # Only add if not already in cols nci = -1 dte = list( map( lambda x: hash_framework.attacks.collision.metric.loose.distance( algo, ncol, x ), cols, ) ) if 0 not in dte: nci = len(cols) cols.append(ncol) wcols[nci] = [ncol] wqs[nci] = [-1] + list(range(0, 48)) wdist[nci] = [ndist] new_found += 1 else: nci = dte.index(0) repeat_found += 1 if jqi == -1 and -1 not in wqs[jqwc]: wqs[jqwc] = [-1] + wqs[jqwc] # Add to every other class of distance 1 for i in range(0, len(dte)): if dte[i] == 1 and i in wqs: wcols[i].append(ncol) wdist[i].append(ndist) dr = hash_framework.attacks.collision.metric.loose.delta( algo, cols[i], cols[nci] ) if dr[0][0] not in wqs[i]: wqs[i].append(dr[0][0]) wcols[nci].append(cols[i]) wdist[nci].append(wdist[i][0]) if dr[0][0] not in wqs[nci]: wqs[nci].append(dr[0][0]) # Check if min_dist needs updating # If so, add it to wq if ndist <= min_distance: print( "Updating min_distance: " + str(min_distance) + " - " + str(ndist) ) min_distance = ndist elif jqi == -1: if -1 in wqs[jqwc]: jqwc.remove(-1) for i in range(0, len(cols)): if ( hash_framework.attacks.collision.metric.loose.distance( algo, cols[i], cols[jqwc] ) == 1 ): wcols[jqwc].append(cols[i]) wdist[jqwc].append(wdist[i][0]) if cols[jqwc] not in wcols[i]: wcols[i].append(cols[jqwc]) wdist[i].append(ndist) working_cols.append(jqwc) jq.remove(jq[j]) # Recompute unit_cols after updating unit_cols = [] offset = 0 while len(unit_cols) == 0 and offset < 1: for i in range(0, len(cols)): if ( hash_framework.attacks.collision.metric.loose.distance( algo, target, cols[i] ) <= (min_distance + offset) and -1 in wqs[i] ): found = False for job in jq: if job[0] == i: found = True break if not found and i not in unit_cols: unit_cols = [i] + unit_cols offset += 1 if fj_status and jqi == -1 and jqwc not in unit_cols: unit_cols = [jqwc] + unit_cols # Recompute working_cols after updating offset = 0 working_cols = [] while len(working_cols) == 0 and offset < 3: for i in range(0, len(cols)): if ( hash_framework.attacks.collision.metric.loose.distance( algo, target, cols[i] ) <= (min_distance + offset) and -1 not in wqs[i] and len(wqs[i]) > 0 ): working_cols = [i] + working_cols offset += 1 break hash_framework.attacks.collision.insert_db_multiple(algo, db, cols, tag) return cols
def sha3_xof_recreate(w, r, e, b): margin = e * w // 64 s = b // margin if s * margin != b: s += 1 s += 1 algo = hf.algorithms.sha3(w=w, rounds=r) tag = "sha3-xof_recreate-w" + str(w) + "-r" + str(r) + '-e' + str( e) + "-s" + str(s) initial_prefixes = [] for i in range(0, s): initial_prefixes.append('s' + str(i)) prefixes = [] for p in initial_prefixes: prefixes.append('h1' + p) prefixes.append('h2' + p) m = hf.models() m.start(tag, recreate=True) print(w, r, e, b, margin, s) hf.models.vars.write_header() hf.models.generate(algo, prefixes, rounds=r, bypass=True) hf.models.vars.write_assign(['cdifferent', 'cknown', 'cchain', 'cloop']) cdifferent = ['and'] for j in range(0, 25 * w): cdifferent.append(('equal', 'h1s0in' + str(j), 'h2s0in' + str(j))) cdifferent = ('not', tuple(cdifferent)) hf.models.vars.write_clause('cdifferent', cdifferent, '10-different.txt') if s > 1: cchain = ['and'] for i in range(0, s - 1): for j in range(0, 25 * w): cchain.append(('equal', 'h1s' + str(i) + 'out' + str(j), 'h1s' + str(i + 1) + 'in' + str(j))) cchain.append(('equal', 'h2s' + str(i) + 'out' + str(j), 'h2s' + str(i + 1) + 'in' + str(j))) cchain = tuple(cchain) hf.models.vars.write_clause('cchain', cchain, '15-chain.txt') cknown = ['and'] for i in range(0, s): for j in range(0, margin): if i * margin + j >= b: break cknown.append(('equal', 'h1s' + str(i) + 'out' + str(j), 'h2s' + str(i) + 'out' + str(j))) cknown = tuple(cknown) hf.models.vars.write_clause('cknown', cknown, '20-known.txt') cloop = ['and'] for i in range(0, s): for j in range(0, margin): if i * margin + j < b: continue cloop.append(('equal', 'h1s' + str(i) + 'out' + str(j), 'h2s' + str(i) + 'out' + str(j))) cloop = ('not', tuple(cloop)) hf.models.vars.write_clause('cloop', cloop, '25-loop.txt') m.collapse() m.build() if run: t1 = time.time() res = m.run(count=1) t2 = (time.time() - t1) print("Run time: " + str(t2)) for result in m.load_results(): h1o_s = "" for j in range(0, 25 * w): h1o_s += result['h1s0in' + str(j)] print("h1seed: " + str(h1o_s)) h2o_s = "" for j in range(0, 25 * w): h2o_s += result['h2s0in' + str(j)] print("h2seed: " + str(h2o_s)) for i in range(0, s): o_s = "" for j in range(0, 25 * w): o_s += result['h1s' + str(i) + 'out' + str(j)] print("\th1s" + str(i) + ": " + str(o_s) + " -- " + str(o_s == h1o_s) + " -- " + str(o_s == h2o_s)) o_s = "" for j in range(0, 25 * w): o_s += result['h2s' + str(i) + 'out' + str(j)] print("\th2s" + str(i) + ": " + str(o_s) + " -- " + str(o_s == h1o_s) + " -- " + str(o_s == h2o_s)) if release: os.system("rm -rf *.txt *.bc *.concat *.out") print("")
def __main__(): in_objs = [] in_obj = in_objs[0] ow = in_obj['w'] w = ow * 2 factor = 64 // w rounds = in_obj['rounds'] m = hf.models() m.remote = False m.start("scratch-sha3-b" + str(w * 25) + "-collision", True) m.cms_args = [''] hf.models.vars.write_header() output_margin = 512 // factor input_margin = (25 * w - 2 * output_margin) algo = hf.algorithms.lookup('sha3')(w=w, rounds=rounds) algo.generate() ccollision = ['and'] for i in range(0, output_margin): ccollision.append(('equal', 'h1out' + str(i), 'h2out' + str(i))) ccollision = tuple(ccollision) hf.models.vars.write_clause('ccollision', ccollision, '50-collision.txt') cinput = ['or'] for i in range(0, input_margin): cinput.append(('not', ('equal', 'h1in' + str(i), 'h2in' + str(i)))) cinput = tuple(cinput) hf.models.vars.write_clause('cinput', cinput, '52-input.txt') czero = ['and'] for i in range(input_margin, 25 * w): czero.append(('equal', 'h1in' + str(i), 'F')) czero.append(('equal', 'h2in' + str(i), 'F')) czero = tuple(czero) hf.models.vars.write_clause('czero', czero, '54-zero.txt') cexpand = ['and'] for i in range(0, rounds - 1): k = 'r' + str(i) if not k in in_obj: continue for i in range(0, len(in_obj[k]), ow): delta = in_obj[k][i:i + ow] pos = i * 2 print(pos) cexpand.append( hf.models.vars.differential(delta, 'h1' + k + 'i', pos, 'h2' + k + 'i', pos)) cexpand = tuple(cexpand) hf.models.vars.write_clause('cexpand', cexpand, '56-expand.txt') ceinput = ['and'] for i in range(0, len(in_obj['input']), ow): delta = in_obj['input'][i:i + ow] pos = i * 2 ceinput.append( hf.models.vars.differential(delta, 'h1in', pos, 'h2in', pos)) ceinput = tuple(ceinput) #print(cexpand) hf.models.vars.write_clause('ceinput', ceinput, '58-einput.txt') hf.models.vars.write_assign( ['ccollision', 'cinput', 'czero', 'cexpand', 'ceinput']) m.collapse() m.build() r = m.run(count=1) if r: for rs in m.results_generator(algo): obj = {} obj['w'] = w obj['rounds'] = rounds obj['input'] = hf.models.vars.compute_ddelta( rs['h1i'], rs['h2i'])[0:input_margin] for r in range(0, rounds): obj['r' + str(r)] = hf.models.vars.compute_ddelta( rs['h1r' + str(r) + 'i'], rs['h2r' + str(r) + 'i']) print(obj)