def test_nonces_with_slices(self, seed: ElementModQ): n = Nonces(seed) count: int = 0 l: List[ElementModQ] = [] for i in iter(n): count += 1 l.append(i) if count == 10: break self.assertEqual(len(l), 10) l2 = Nonces(seed)[0:10] self.assertEqual(len(l2), 10) self.assertEqual(l, l2)
def fast_decrypt_tally( tally: TALLY_TYPE, cec: CiphertextElectionContext, keypair: ElGamalKeyPair, proof_seed: ElementModQ, pool: Optional[Pool] = None, show_progress: bool = True, ) -> DECRYPT_TALLY_OUTPUT_TYPE: """ Given a tally, as we might get from `fast_tally_ballots`, this decrypts the tally and returns a dict from selection object_ids to tuples containing the decrypted total as well as a Chaum-Pedersen proof that the total corresponds to the ciphertext. """ tkeys = tally.keys() proof_seeds: List[ElementModQ] = Nonces(proof_seed)[0:len(tkeys)] inputs = [ DecryptInput(object_id, seed, tally[object_id]) for seed, object_id in zip(proof_seeds, tkeys) ] # Performance note: at this point, the tallies have been computed, so we # don't actually have all that much data left to process. There's almost # certainly no benefit to distributing this on a cluster. if show_progress: # pragma: no cover inputs = tqdm(list(inputs), "Decrypting") wrapped_func = functools.partial(_decrypt, cec, keypair) result: List[DecryptOutput] = ([ wrapped_func(x) for x in inputs ] if pool is None else pool.map(func=wrapped_func, iterable=inputs)) return {r.object_id: (r.plaintext, r.decryption_proof) for r in result}
def test_reduce_with_ray_wait_with_progress( self, counters: List[int], keypair: ElGamalKeyPair) -> None: nonces = Nonces(int_to_q(3))[0:len(counters)] pbar = ProgressBar({ "Ballots": len(counters), "Tallies": len(counters), "Iterations": 0 }) ciphertexts: List[ObjectRef] = [ r_encrypt.remote(pbar.actor, p, n, keypair.public_key) for p, n in zip(counters, nonces) ] # compute in parallel ptotal = ray.get( ray_reduce_with_ray_wait( inputs=ciphertexts, shard_size=3, reducer_first_arg=pbar.actor, reducer=r_elgamal_add.remote, progressbar=pbar, progressbar_key="Tallies", timeout=None, verbose=False, )) # recompute serially stotal = elgamal_add(*ray.get(ciphertexts)) self.assertEqual(stotal, ptotal)
def test_electionguard_basics(self) -> None: plaintexts = range(0, 1000) nonces = Nonces(int_to_q(3)) keypair = elgamal_keypair_random() r_public_key = ray.put(keypair.public_key) start = timer() serial_ciphertexts: List[ElGamalCiphertext] = [ elgamal_encrypt(p, n, keypair.public_key) for p, n in zip(plaintexts, nonces) ] serial_time = timer() # List[ObjectRef[ElGamalCiphertext] parallel_ciphertext_objects: List[ObjectRef] = [ r_encrypt.remote(p, n, r_public_key) for p, n in zip(plaintexts, nonces) ] parallel_ciphertexts: List[ElGamalCiphertext] = ray.get( parallel_ciphertext_objects ) parallel_time = timer() self.assertEqual(serial_ciphertexts, parallel_ciphertexts) print( f"Parallel speedup: {(serial_time - start) / (parallel_time - serial_time):.3f}x" )
def test_gmpy2_parallelism_is_safe(self): cpus = cpu_count() problem_size = 1000 secret_keys = Nonces(int_to_q_unchecked(3))[ 0:problem_size] # list of 1000 might-as-well-be-random Q's log_info( f"testing GMPY2 powmod parallelism safety (cpus = {cpus}, problem_size = {problem_size})" ) # compute in parallel start = timer() p = Pool(cpus) keypairs = p.map(elgamal_keypair_from_secret, secret_keys) end1 = timer() # verify scalar for keypair in keypairs: self.assertEqual( keypair.public_key, elgamal_keypair_from_secret(keypair.secret_key).public_key, ) end2 = timer() p.close( ) # apparently necessary to avoid warnings from the Pool system log_info(f"Parallelism speedup: {(end2 - end1) / (end1 - start):.3f}")
def test_gmpy2_parallelism_is_safe(self): """ Ensures running lots of parallel exponentiations still yields the correct answer. This verifies that nothing incorrect is happening in the GMPY2 library """ # Arrange scheduler = Scheduler() problem_size = 1000 random_secret_keys = Nonces(int_to_q_unchecked(3))[0:problem_size] log_info( f"testing GMPY2 powmod parallelism safety (cpus = {scheduler.cpu_count}, problem_size = {problem_size})" ) # Act start = timer() keypairs = scheduler.schedule( elgamal_keypair_from_secret, [list([secret_key]) for secret_key in random_secret_keys], ) end1 = timer() # Assert for keypair in keypairs: self.assertEqual( keypair.public_key, elgamal_keypair_from_secret(keypair.secret_key).public_key, ) end2 = timer() scheduler.close() log_info(f"Parallelism speedup: {(end2 - end1) / (end1 - start):.3f}")
def test_eg_conversion(self, state: DominionBallotsAndContext, seed: ElementModQ) -> None: ied = InternalElectionDescription(state.ed) ballot_box = BallotBox(ied, state.cec) seed_hash = EncryptionDevice("Location").get_hash() nonces = Nonces(seed)[0:len(state.ballots)] for b, n in zip(state.ballots, nonces): eb = encrypt_ballot(b, ied, state.cec, seed_hash, n) self.assertIsNotNone(eb) pb = decrypt_ballot_with_secret( eb, ied, state.cec.crypto_extended_base_hash, state.cec.elgamal_public_key, state.secret_key, ) self.assertEqual(b, pb) self.assertGreater(len(eb.contests), 0) cast_result = ballot_box.cast(eb) self.assertIsNotNone(cast_result) tally = tally_ballots(ballot_box._store, ied, state.cec) self.assertIsNotNone(tally) results = decrypt_tally_with_secret(tally, state.secret_key) self.assertEqual(len(results.keys()), len(state.id_map.keys())) for obj_id in results.keys(): self.assertIn(obj_id, state.id_map) cvr_sum = int(state.dominion_cvrs.data[state.id_map[obj_id]].sum()) decryption = results[obj_id] self.assertEqual(cvr_sum, decryption)
def ray_decrypt_tally( tally: TALLY_TYPE, cec: ObjectRef, # ObjectRef[CiphertextElectionContext] keypair: ObjectRef, # ObjectRef[ElGamalKeyPair] proof_seed: ElementModQ, ) -> DECRYPT_TALLY_OUTPUT_TYPE: """ Given a tally, this decrypts the tally and returns a dict from selection object_ids to tuples containing the decrypted total as well as a Chaum-Pedersen proof that the total corresponds to the ciphertext. :param tally: an election tally :param cec: a Ray ObjectRef containing a `CiphertextElectionContext` :param keypair: a Ray ObjectRef containing an `ElGamalKeyPair` :param proof_seed: an ElementModQ """ tkeys = tally.keys() proof_seeds: List[ElementModQ] = Nonces(proof_seed)[0:len(tkeys)] inputs: List[DecryptInput] = [ DecryptInput(object_id, seed, tally[object_id]) for seed, object_id in zip(proof_seeds, tkeys) ] # We can't be lazy here: we need to have all this data in hand so we can # rearrange it into a dictionary and return it. result: List[Optional[DecryptOutput]] = ray.get( [r_decrypt.remote(cec, keypair, x) for x in inputs]) if None in result: log_and_print( f"Unexpected failure from in ray_decrypt_tally, returning an empty dict", True, ) return {} # mypy can't figure this that None isn't here any more, so we need to check for None again return { r.object_id: (r.plaintext, r.decryption_proof) for r in result if r is not None }
def test_reduce_with_rounds_without_progress( self, counters: List[int], keypair: ElGamalKeyPair) -> None: nonces = Nonces(int_to_q(3))[0:len(counters)] ciphertexts: List[ObjectRef] = [ r_encrypt.remote(None, p, n, keypair.public_key) for p, n in zip(counters, nonces) ] # compute in parallel ptotal = ray.get( ray_reduce_with_rounds( inputs=ciphertexts, shard_size=3, reducer_first_arg=None, reducer=r_elgamal_add.remote, progressbar=None, verbose=True, )) # recompute serially stotal = elgamal_add(*ray.get(ciphertexts)) self.assertEqual(stotal, ptotal)
def test_nonces_type_errors(self): n = Nonces(int_to_q_unchecked(3)) self.assertRaises(TypeError, len, n) self.assertRaises(TypeError, lambda: n[1:]) self.assertRaises(TypeError, lambda: n.get_with_headers(-1))
def ray_tally_everything( cvrs: DominionCSV, verbose: bool = True, use_progressbar: bool = True, date: Optional[datetime] = None, seed_hash: Optional[ElementModQ] = None, master_nonce: Optional[ElementModQ] = None, secret_key: Optional[ElementModQ] = None, root_dir: Optional[str] = None, ) -> "RayTallyEverythingResults": """ This top-level function takes a collection of Dominion CVRs and produces everything that we might want for arlo-e2e: a list of encrypted ballots, their encrypted and decrypted tally, and proofs of the correctness of the whole thing. The election `secret_key` is an optional parameter. If absent, a random keypair is generated and used. Similarly, if a `seed_hash` or `master_nonce` is not provided, random ones are generated and used. For parallelism, Ray is used. Make sure you've called `ray.init()` or `ray_localhost_init()` before calling this. If `root_dir` is specified, then the tally is written out to the specified directory, and the resulting `RayTallyEverythingResults` object will support the methods that allow those ballots to be read back in again. Conversely, if `root_dir` is `None`, then nothing is written to disk, and the result will not have access to individual ballots. """ rows, cols = cvrs.data.shape ray_wait_for_workers(min_workers=2) if date is None: date = datetime.now() if root_dir is not None: mkdir_helper(root_dir, num_retries=NUM_WRITE_RETRIES) r_manifest_aggregator = ManifestAggregatorActor.remote( root_dir) # type: ignore else: r_manifest_aggregator = None r_root_dir = ray.put(root_dir) start_time = timer() # Performance note: by using to_election_description_ray rather than to_election_description, we're # only getting back a list of dictionaries rather than a list of PlaintextBallots. We're pushing that # work out into the nodes, where it will run in parallel. The BallotPlaintextFactory wraps up all # the (immutable) state necessary to convert from these dicts to PlaintextBallots and is meant to # be sent to every node in the cluster. ed, bpf, ballot_dicts, id_map = cvrs.to_election_description_ray(date=date) setup_time = timer() num_ballots = len(ballot_dicts) assert num_ballots > 0, "can't have zero ballots!" log_and_print( f"ElectionGuard setup time: {setup_time - start_time: .3f} sec, {num_ballots / (setup_time - start_time):.3f} ballots/sec" ) keypair = (elgamal_keypair_random() if secret_key is None else elgamal_keypair_from_secret(secret_key)) assert keypair is not None, "unexpected failure with keypair computation" secret_key, public_key = keypair cec = make_ciphertext_election_context( number_of_guardians=1, quorum=1, elgamal_public_key=public_key, description_hash=ed.crypto_hash(), ) r_cec = ray.put(cec) ied = InternalElectionDescription(ed) r_ied = ray.put(ied) if seed_hash is None: seed_hash = rand_q() r_seed_hash = ray.put(seed_hash) r_keypair = ray.put(keypair) r_ballot_plaintext_factory = ray.put(bpf) if master_nonce is None: master_nonce = rand_q() nonces = Nonces(master_nonce) r_nonces = ray.put(nonces) nonce_indices = range(num_ballots) inputs = list(zip(ballot_dicts, nonce_indices)) batches = shard_list_uniform(inputs, BATCH_SIZE) num_batches = len(batches) log_and_print( f"Launching Ray.io remote encryption! (number of batches: {num_batches})" ) start_time = timer() progressbar = (ProgressBar({ "Ballots": num_ballots, "Tallies": num_ballots, "Iterations": 0, "Batch": 0, }) if use_progressbar else None) progressbar_actor = progressbar.actor if progressbar is not None else None batch_tallies: List[ObjectRef] = [] for batch in batches: if progressbar_actor: progressbar_actor.update_completed.remote("Batch", 1) num_ballots_in_batch = len(batch) sharded_inputs = shard_list_uniform(batch, BALLOTS_PER_SHARD) num_shards = len(sharded_inputs) partial_tally_refs = [ r_encrypt_and_write.remote( r_ied, r_cec, r_seed_hash, r_root_dir, r_manifest_aggregator, progressbar_actor, r_ballot_plaintext_factory, r_nonces, right_tuple_list(shard), *(left_tuple_list(shard)), ) for shard in sharded_inputs ] # log_and_print("Remote tallying.") btally = ray_tally_ballots(partial_tally_refs, BALLOTS_PER_SHARD, progressbar) batch_tallies.append(btally) # Each batch ultimately yields one partial tally; we add these up here at the # very end. If we have a million ballots and have batches of 10k ballots, this # would mean we'd have only 100 partial tallies. So, what's here works just fine. # If we wanted, we could certainly burn some scalar time and keep a running, # singular, partial tally. It's probably more important to push onward to the # next batch, so we can do as much work in parallel as possible. if len(batch_tallies) > 1: tally = ray.get(ray_tally_ballots(batch_tallies, 10, progressbar)) else: tally = ray.get(batch_tallies[0]) if progressbar: progressbar.close() assert tally is not None, "tally failed!" log_and_print("Tally decryption.") decrypted_tally: DECRYPT_TALLY_OUTPUT_TYPE = ray_decrypt_tally( tally, r_cec, r_keypair, seed_hash) log_and_print("Validating tally.") # Sanity-checking logic: make sure we don't have any unexpected keys, and that the decrypted totals # match up with the columns in the original plaintext data. tally_keys = set(decrypted_tally.keys()) expected_keys = set(id_map.keys()) assert tally_keys.issubset( expected_keys ), f"bad tally keys (actual keys: {sorted(tally_keys)}, expected keys: {sorted(expected_keys)})" for obj_id in decrypted_tally.keys(): cvr_sum = int(cvrs.data[id_map[obj_id]].sum()) decryption, proof = decrypted_tally[obj_id] assert cvr_sum == decryption, f"decryption failed for {obj_id}" final_manifest: Optional[Manifest] = None if root_dir is not None: final_manifest = ray.get(r_manifest_aggregator.result.remote()) assert isinstance( final_manifest, Manifest), "type error: bad result from manifest aggregation" # Assemble the data structure that we're returning. Having nonces in the ciphertext makes these # structures sensitive for writing out to disk, but otherwise they're ready to go. log_and_print("Constructing results.") reported_tally: Dict[str, SelectionInfo] = { k: SelectionInfo( object_id=k, encrypted_tally=tally[k], # we need to forcibly convert mpz to int here to make serialization work properly decrypted_tally=int(decrypted_tally[k][0]), proof=decrypted_tally[k][1], ) for k in tally.keys() } tabulate_time = timer() log_and_print( f"Encryption and tabulation: {rows} ballots, {rows / (tabulate_time - start_time): .3f} ballot/sec", verbose, ) return RayTallyEverythingResults( metadata=cvrs.metadata, cvr_metadata=cvrs.dataframe_without_selections(), election_description=ed, num_ballots=rows, manifest=final_manifest, tally=SelectionTally(reported_tally), context=cec, )
def test_accumulation_encryption_decryption( self, everything: ELECTIONS_AND_BALLOTS_TUPLE_TYPE, nonce: ElementModQ, ): """ Tests that decryption is the inverse of encryption over arbitrarily generated elections and ballots. This test uses an abitrarily generated dataset with a single public-private keypair for the election encryption context. It also manually verifies that homomorphic accumulation works as expected. """ # Arrange election_description, metadata, ballots, secret_key, context = everything # Tally the plaintext ballots for comparison later plaintext_tallies = accumulate_plaintext_ballots(ballots) num_ballots = len(ballots) num_contests = len(metadata.contests) zero_nonce, *nonces = Nonces(nonce)[:num_ballots + 1] self.assertEqual(len(nonces), num_ballots) self.assertTrue(len(metadata.contests) > 0) # Generatea valid encryption of zero encrypted_zero = elgamal_encrypt(0, zero_nonce, context.elgamal_public_key) # Act encrypted_ballots = [] # encrypt each ballot for i in range(num_ballots): encrypted_ballot = encrypt_ballot(ballots[i], metadata, context, SEED_HASH, nonces[i]) encrypted_ballots.append(encrypted_ballot) # sanity check the encryption self.assertIsNotNone(encrypted_ballot) self.assertEqual(num_contests, len(encrypted_ballot.contests)) # decrypt the ballot with secret and verify it matches the plaintext decrypted_ballot = decrypt_ballot_with_secret( ballot=encrypted_ballot, election_metadata=metadata, crypto_extended_base_hash=context.crypto_extended_base_hash, public_key=context.elgamal_public_key, secret_key=secret_key, remove_placeholders=True, ) self.assertEqual(ballots[i], decrypted_ballot) # homomorphically accumualte the encrypted ballot representations encrypted_tallies = _accumulate_encrypted_ballots( encrypted_zero, encrypted_ballots) decrypted_tallies = {} for object_id in encrypted_tallies.keys(): decrypted_tallies[object_id] = encrypted_tallies[ object_id].decrypt(secret_key) # loop through the contest descriptions and verify # the decrypted tallies match the plaintext tallies for contest in metadata.contests: # Sanity check the generated data self.assertTrue(len(contest.ballot_selections) > 0) self.assertTrue(len(contest.placeholder_selections) > 0) decrypted_selection_tallies = [ decrypted_tallies[selection.object_id] for selection in contest.ballot_selections ] decrypted_placeholder_tallies = [ decrypted_tallies[placeholder.object_id] for placeholder in contest.placeholder_selections ] plaintext_tally_values = [ plaintext_tallies[selection.object_id] for selection in contest.ballot_selections ] # verify the plaintext tallies match the decrypted tallies self.assertEqual(decrypted_selection_tallies, plaintext_tally_values) # validate the right number of selections including placeholders across all ballots self.assertEqual( contest.number_elected * num_ballots, sum(decrypted_selection_tallies) + sum(decrypted_placeholder_tallies), )
valid = proof.is_valid(ciphertext, keypair.public_key) end2 = timer() if not valid: raise Exception( "Wasn't expecting an invalid proof during a benchmark!") return end1 - start1, end2 - end1 def identity(x: int) -> int: """Placeholder function used just to warm up the parallel mapper prior to benchmarking.""" return x if __name__ == "__main__": problem_sizes = (100, 500, 1000, 5000) rands = Nonces(int_to_q_unchecked(31337)) speedup: Dict[int, float] = {} print(f"CPUs detected: {cpu_count()}, launching thread pool") pool = Pool(cpu_count()) # warm up the pool to help get consistent measurements results = pool.map(identity, range(1, 30000)) assert results == list(range(1, 30000)) bench_start = timer() for size in problem_sizes: print("Benchmarking on problem size: ", size) seeds = rands[0:size] inputs = [ BenchInput(
def fast_tally_everything( cvrs: DominionCSV, pool: Optional[Pool] = None, verbose: bool = True, date: Optional[datetime] = None, seed_hash: Optional[ElementModQ] = None, master_nonce: Optional[ElementModQ] = None, secret_key: Optional[ElementModQ] = None, use_progressbar: bool = True, ) -> FastTallyEverythingResults: """ This top-level function takes a collection of Dominion CVRs and produces everything that we might want for arlo-e2e: a list of encrypted ballots, their encrypted and decrypted tally, and proofs of the correctness of the whole thing. The election `secret_key` is an optional parameter. If absent, a random keypair is generated and used. Similarly, if a `seed_hash` or `master_nonce` is not provided, random ones are generated and used. For parallelism, a `multiprocessing.pool.Pool` may be provided, and should result in significant speedups on multicore computers. If absent, the computation will proceed sequentially. """ rows, cols = cvrs.data.shape if date is None: date = datetime.now() parse_time = timer() log_and_print(f"Rows: {rows}, cols: {cols}", verbose) ed, ballots, id_map = cvrs.to_election_description(date=date) assert len(ballots) > 0, "can't have zero ballots!" keypair = (elgamal_keypair_random() if secret_key is None else elgamal_keypair_from_secret(secret_key)) assert keypair is not None, "unexpected failure with keypair computation" secret_key, public_key = keypair # This computation exists only to cause side-effects in the DLog engine, so the lame nonce is not an issue. assert len(ballots) == get_optional( elgamal_encrypt(m=len(ballots), nonce=int_to_q_unchecked(3), public_key=public_key)).decrypt( secret_key), "got wrong ElGamal decryption!" dlog_prime_time = timer() log_and_print( f"DLog prime time (n={len(ballots)}): {dlog_prime_time - parse_time: .3f} sec", verbose, ) cec = make_ciphertext_election_context( number_of_guardians=1, quorum=1, elgamal_public_key=public_key, description_hash=ed.crypto_hash(), ) ied = InternalElectionDescription(ed) # REVIEW THIS: is this cryptographically sound? Is the seed_hash properly a secret? Should # it go in the output? The nonces are clearly secret. If you know them, you can decrypt. if seed_hash is None: seed_hash = rand_q() if master_nonce is None: master_nonce = rand_q() nonces: List[ElementModQ] = Nonces(master_nonce)[0:len(ballots)] # even if verbose is false, we still want to see the progress bar for the encryption cballots = fast_encrypt_ballots(ballots, ied, cec, seed_hash, nonces, pool, use_progressbar=use_progressbar) eg_encrypt_time = timer() log_and_print( f"Encryption time: {eg_encrypt_time - dlog_prime_time: .3f} sec", verbose) log_and_print( f"Encryption rate: {rows / (eg_encrypt_time - dlog_prime_time): .3f} ballot/sec", verbose, ) tally: TALLY_TYPE = fast_tally_ballots(cballots, pool) eg_tabulate_time = timer() log_and_print( f"Tabulation time: {eg_tabulate_time - eg_encrypt_time: .3f} sec", verbose) log_and_print( f"Tabulation rate: {rows / (eg_tabulate_time - eg_encrypt_time): .3f} ballot/sec", verbose, ) log_and_print( f"Encryption and tabulation: {rows} ballots / {eg_tabulate_time - dlog_prime_time: .3f} sec = {rows / (eg_tabulate_time - dlog_prime_time): .3f} ballot/sec", verbose, ) assert tally is not None, "tally failed!" if verbose: # pragma: no cover print("Decryption & Proofs: ") decrypted_tally: DECRYPT_TALLY_OUTPUT_TYPE = fast_decrypt_tally( tally, cec, keypair, seed_hash, pool, verbose) eg_decryption_time = timer() log_and_print( f"Decryption time: {eg_decryption_time - eg_tabulate_time: .3f} sec", verbose) log_and_print( f"Decryption rate: {len(decrypted_tally.keys()) / (eg_decryption_time - eg_tabulate_time): .3f} selection/sec", verbose, ) # Sanity-checking logic: make sure we don't have any unexpected keys, and that the decrypted totals # match up with the columns in the original plaintext data. for obj_id in decrypted_tally.keys(): assert obj_id in id_map, "object_id in results that we don't know about!" cvr_sum = int(cvrs.data[id_map[obj_id]].sum()) decryption, proof = decrypted_tally[obj_id] assert cvr_sum == decryption, f"decryption failed for {obj_id}" # Assemble the data structure that we're returning. Having nonces in the ciphertext makes these # structures sensitive for writing out to disk, but otherwise they're ready to go. reported_tally: Dict[str, SelectionInfo] = { k: SelectionInfo( object_id=k, encrypted_tally=tally[k], # we need to forcibly convert mpz to int here to make serialization work properly decrypted_tally=int(decrypted_tally[k][0]), proof=decrypted_tally[k][1], ) for k in tally.keys() } # strips the ballots of their nonces, which is important because those could allow for decryption accepted_ballots = [ciphertext_ballot_to_accepted(x) for x in cballots] return FastTallyEverythingResults( metadata=cvrs.metadata, cvr_metadata=cvrs.dataframe_without_selections(), election_description=ed, encrypted_ballot_memos={ ballot.object_id: make_memo_value(ballot) for ballot in accepted_ballots }, tally=SelectionTally(reported_tally), context=cec, )
def test_nonces_seed_matters(self, seed1: ElementModQ, seed2: ElementModQ, i: int): assume(seed1 != seed2) n1 = Nonces(seed1) n2 = Nonces(seed2) self.assertNotEqual(n1[i], n2[i])
def test_nonces_deterministic(self, seed: ElementModQ, i: int): n1 = Nonces(seed) n2 = Nonces(seed) self.assertEqual(n1[i], n2[i])
def test_nonces_iterable(self, seed: ElementModQ): n = Nonces(seed) i = iter(n) q0 = next(i) q1 = next(i) self.assertTrue(q0 != q1)