def verify(self, circuits, num_nodes, num_threads, qubit_limit, eval_mode): if self.verbose: print('*'*20,'Verify','*'*20) self.circuits = circuits errors = {} row_format = '{:<20} {:<10} {:<30}' if self.verbose: print(row_format.format('Circuit Name','QPU','Error')) for circuit_name in self.circuits: max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') cut_solution = read_dict(filename='%s/cut_solution.pckl'%source_folder) circuit = cut_solution['circuit'] complete_path_map = cut_solution['complete_path_map'] subcircuits = cut_solution['subcircuits'] summation_terms = pickle.load(open('%s/summation_terms.pckl'%source_folder,'rb')) smart_order = [x[0] for x in summation_terms[0]] dest_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, num_threads=num_threads,eval_mode=eval_mode,qubit_limit=qubit_limit,field='build') build_output = read_dict(filename='%s/build_output.pckl'%dest_folder) reconstructed_prob = build_output['reconstructed_prob'] num_summation_terms_sampled = build_output['num_summation_terms_sampled'] num_summation_terms = build_output['num_summation_terms'] squared_error = verify(full_circuit=circuit,unordered=reconstructed_prob,complete_path_map=complete_path_map,subcircuits=subcircuits,smart_order=smart_order) key = (circuit_name,eval_mode) errors[key] = squared_error if self.verbose: print(row_format.format(*key,'%.1e'%squared_error)) if self.verbose: print() return errors
def _write_all_files(self, job:QuantumExecutionJob, eval_mode:str): """Create all missing files for the post-processing of the CutQC framework Args: job (QuantumExecutionJob): the partitioned QuantumExecutionJob eval_mode (str) """ circuit_name = job.id cut_solution = self._partition_dict[job.id]["cut_solution"] max_subcircuit_qubit = cut_solution['max_subcircuit_qubit'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None,eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') if not os.path.exists(source_folder): os.makedirs(source_folder) pickle.dump(cut_solution, open('%s/subcircuits.pckl'%(source_folder),'wb')) eval_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None,num_threads=None,eval_mode=eval_mode,qubit_limit=None,field='evaluator') if not os.path.exists(eval_folder): os.makedirs(eval_folder) all_indexed_combinations = self._partition_dict[job.id]["all_indexed_combinations"] pickle.dump(all_indexed_combinations, open('%s/all_indexed_combinations.pckl'%(eval_folder),'wb'))
def post_process(self, job:QuantumExecutionJob, eval_mode:str, num_threads:int, early_termination:int, qubit_limit:int, recursion_depth:int)->np.ndarray: """Calculates the probability distribution of the partitioned quantum circuit via the post-processing of the CutQc framework Args: job (QuantumExecutionJob) eval_mode (str) num_threads (int) early_termination (int) qubit_limit (int) recursion_depth (int) Returns: np.ndarray: probability distribution """ self._log.debug('Postprocess, job = %s'%job.id) subprocess.run(['rm','./cutqc/merge']) subprocess.run(['icc','-mkl','./cutqc/merge.c','-o','./cutqc/merge','-lm']) subprocess.run(['rm','./cutqc/build']) subprocess.run(['icc','-fopenmp','-mkl','-lpthread','-march=native','./cutqc/build.c','-o','./cutqc/build','-lm']) circuit_name = job.id cut_solution = self._partition_dict[job.id]["cut_solution"] max_subcircuit_qubit = cut_solution['max_subcircuit_qubit'] full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] circuit_case = '%s|%d'%(circuit_name,max_subcircuit_qubit) dest_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination,num_threads=num_threads,eval_mode=eval_mode,qubit_limit=qubit_limit,field='build') if os.path.exists('%s'%dest_folder): subprocess.run(['rm','-r',dest_folder]) os.makedirs(dest_folder) vertical_collapse_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination,num_threads=None,eval_mode=eval_mode,qubit_limit=None,field='vertical_collapse') reconstructed_prob = None for recursion_layer in range(recursion_depth): self._log.debug('*'*20 + '%s Recursion Layer %d'%(circuit_case,recursion_layer) + '*'*20) recursion_qubit = qubit_limit self._log.debug('__Distribute__') distribute(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=eval_mode,early_termination=early_termination,num_threads=num_threads,qubit_limit=qubit_limit, recursion_layer=recursion_layer,recursion_qubit=recursion_qubit,verbose=self._verbose) self._log.debug('__Merge__') terminated = self._merge(circuit_case=circuit_case,vertical_collapse_folder=vertical_collapse_folder,dest_folder=dest_folder, recursion_layer=recursion_layer,eval_mode=eval_mode) if terminated: break self._log.debug('__Build__') reconstructed_prob = self._build(circuit_case=circuit_case,dest_folder=dest_folder,recursion_layer=recursion_layer,eval_mode=eval_mode) return reconstructed_prob
def _measure(self, eval_mode, num_nodes, num_threads): subprocess.run(['rm', './cutqc/measure']) subprocess.run( ['icc', './cutqc/measure.c', '-o', './cutqc/measure', '-lm']) for circuit_case in self.circuit_cases: circuit_name = circuit_case.split('|')[0] max_subcircuit_qubit = int(circuit_case.split('|')[1]) source_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') cut_solution = read_dict('%s/subcircuits.pckl' % (source_folder)) if len(cut_solution) == 0: continue assert ( max_subcircuit_qubit == cut_solution['max_subcircuit_qubit']) full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] eval_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='evaluator') for subcircuit_idx in range(len(subcircuits)): eval_files = glob.glob('%s/raw_%d_*.txt' % (eval_folder, subcircuit_idx)) child_processes = [] for rank in range(num_threads): process_eval_files = find_process_jobs( jobs=range(len(eval_files)), rank=rank, num_workers=num_threads) process_eval_files = [str(x) for x in process_eval_files] if rank == 0 and self.verbose: print( '%s subcircuit %d : rank %d/%d needs to measure %d/%d instances' % (circuit_case, subcircuit_idx, rank, num_threads, len(process_eval_files), len(eval_files)), flush=True) p = subprocess.Popen(args=[ './cutqc/measure', '%d' % rank, eval_folder, eval_mode, '%d' % full_circuit.num_qubits, '%d' % subcircuit_idx, '%d' % len(process_eval_files), *process_eval_files ]) child_processes.append(p) [cp.wait() for cp in child_processes]
def cut(self, circuits, max_subcircuit_qubit, num_subcircuits, max_cuts): self.circuits = circuits self._check_input() if self.verbose: print('*' * 20, 'Cut', '*' * 20) pool = mp.Pool(processes=mp.cpu_count()) data = [] for circuit_name in self.circuits: circuit = self.circuits[circuit_name] data.append([ circuit, max_subcircuit_qubit, num_subcircuits, max_cuts, False ]) cut_solutions = pool.starmap(find_cuts, data) pool.close() for circuit_name, cut_solution in zip(self.circuits, cut_solutions): source_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') if os.path.exists(source_folder): subprocess.run(['rm', '-r', source_folder]) os.makedirs(source_folder) pickle.dump(cut_solution, open('%s/subcircuits.pckl' % (source_folder), 'wb')) if self.verbose: print('{:s} : {:d} cuts --> {}'.format( circuit_name, len(cut_solution['positions']), cut_solution['counter']), flush=True)
def _measure(self, job:QuantumExecutionJob, eval_mode:str, num_threads:int): """Calls the measure routine of the CutQC framework Args: job (QuantumExecutionJob): Job to be measured eval_mode (str) num_threads (int) """ subprocess.run(['rm','./cutqc/measure']) subprocess.run(['icc','./cutqc/measure.c','-o','./cutqc/measure','-lm']) cut_solution = self._partition_dict[job.id]["cut_solution"] max_subcircuit_qubit = cut_solution['max_subcircuit_qubit'] full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] eval_folder = get_dirname(circuit_name=job.id,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None,num_threads=None,eval_mode=eval_mode, qubit_limit=None,field='evaluator') for subcircuit_idx in range(len(subcircuits)): eval_files = glob.glob('%s/raw_%d_*.txt'%(eval_folder,subcircuit_idx)) child_processes = [] for rank in range(num_threads): process_eval_files = find_process_jobs(jobs=range(len(eval_files)),rank=rank,num_workers=num_threads) process_eval_files = [str(x) for x in process_eval_files] if rank==0 and self._verbose: self._log.debug('%s subcircuit %d : rank %d/%d needs to measure %d/%d instances'%( job.id,subcircuit_idx,rank,num_threads,len(process_eval_files),len(eval_files))) p = subprocess.Popen(args=['./cutqc/measure', '%d'%rank, eval_folder, eval_mode, '%d'%full_circuit.num_qubits,'%d'%subcircuit_idx, '%d'%len(process_eval_files), *process_eval_files]) child_processes.append(p) [cp.wait() for cp in child_processes]
def _write(self, job:QuantumExecutionJob): """Write all information of a job to the disk for the post-processing Args: job (QuantumExecutionJob) """ subcircuit_idx, inits, meas = job.key cut_solution = self._partition_dict[job.parent]["cut_solution"] all_indexed_combinations = self._partition_dict[job.parent]["all_indexed_combinations"] max_subcircuit_qubit = cut_solution['max_subcircuit_qubit'] counter = cut_solution['counter'] eval_folder = get_dirname(circuit_name=job.parent,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None,num_threads=None,eval_mode="ibmq",qubit_limit=None,field='evaluator') if not os.path.exists(eval_folder): os.makedirs(eval_folder) subcircuit_inst_prob = self._get_prob_dist(job) mutated_meas = mutate_measurement_basis(meas) for meas in mutated_meas: index = all_indexed_combinations[subcircuit_idx][(tuple(inits),tuple(meas))] eval_file_name = '%s/raw_%d_%d.txt'%(eval_folder,subcircuit_idx,index) eval_file = open(eval_file_name,'w') eval_file.write('d=%d effective=%d\n'%(counter[subcircuit_idx]['d'],counter[subcircuit_idx]['effective'])) [eval_file.write('%s '%x) for x in inits] eval_file.write('\n') [eval_file.write('%s '%x) for x in meas] eval_file.write('\n') [eval_file.write('%e '%x) for x in subcircuit_inst_prob] if type(subcircuit_inst_prob)==np.ndarray else eval_file.write('%e '%subcircuit_inst_prob) eval_file.close()
def _attribute_shots(self,subcircuit_results,eval_mode,all_subcircuit_entries_sampled): ''' Attribute the shots into respective subcircuit entries ''' row_format = '{:<15} {:<15} {:<25} {:<30}' if self.verbose: print('--> Attribute shots') print(row_format.format('circuit_name','subcircuit_idx','subcircuit_instance_idx','coefficient, subcircuit_entry_idx')) ctr = 0 subcircuit_entry_probs = {} for key in subcircuit_results: ctr += 1 circuit_name, subcircuit_idx, init, meas = key subcircuit_entries_sampled = all_subcircuit_entries_sampled[circuit_name] subcircuit_instance_prob = subcircuit_results[key] max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') subcircuit_instances_idx = read_dict(filename='%s/subcircuit_instances_idx.pckl'%source_folder) subcircuit_instance_idx = subcircuit_instances_idx[subcircuit_idx][(init,meas)] subcircuit_instance_attribution = read_dict(filename='%s/subcircuit_instance_attribution.pckl'%source_folder) attributions = subcircuit_instance_attribution[subcircuit_idx][subcircuit_instance_idx] if self.verbose and ctr<=10: print(row_format.format(circuit_name,subcircuit_idx,subcircuit_instance_idx,str(attributions)[:30])) eval_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=eval_mode,num_threads=None,qubit_limit=None,field='evaluator') for item in attributions: coefficient, subcircuit_entry_idx = item if (subcircuit_idx,subcircuit_entry_idx) not in subcircuit_entries_sampled: continue subcircuit_entry_prob_key = (eval_folder,subcircuit_idx,subcircuit_entry_idx) if subcircuit_entry_prob_key in subcircuit_entry_probs: subcircuit_entry_probs[subcircuit_entry_prob_key] += coefficient*subcircuit_instance_prob else: subcircuit_entry_probs[subcircuit_entry_prob_key] = coefficient*subcircuit_instance_prob for subcircuit_entry_prob_key in subcircuit_entry_probs: subcircuit_entry_prob = subcircuit_entry_probs[subcircuit_entry_prob_key] eval_folder,subcircuit_idx,subcircuit_entry_idx = subcircuit_entry_prob_key subcircuit_entry_file = open('%s/%d_%d.txt'%(eval_folder,subcircuit_idx,subcircuit_entry_idx),'w') [subcircuit_entry_file.write('%e '%x) for x in subcircuit_entry_prob] subcircuit_entry_file.close() if self.verbose: print('... Total %d subcircuit results attributed\n'%ctr)
def _vertical_collapse(self, job:QuantumExecutionJob, early_termination:int, eval_mode:str): """Calls the vertical collapse routine of the CutQC framework Args: job (QuantumExecutionJob) early_termination (int) eval_mode (str) Raises: Exception: If the necassary files are missing """ subprocess.run(['rm','./cutqc/vertical_collapse']) subprocess.run(['icc','-mkl','./cutqc/vertical_collapse.c','-o','./cutqc/vertical_collapse','-lm']) cut_solution = self._partition_dict[job.id]["cut_solution"] max_subcircuit_qubit = cut_solution['max_subcircuit_qubit'] full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] eval_folder = get_dirname(circuit_name=job.id,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None,num_threads=None,eval_mode=eval_mode,qubit_limit=None,field='evaluator') vertical_collapse_folder = get_dirname(circuit_name=job.id,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination,num_threads=None,eval_mode=eval_mode,qubit_limit=None,field='vertical_collapse') rank_files = glob.glob('%s/subcircuit_kron_terms_*.txt'%eval_folder) if len(rank_files)==0: raise Exception('There are no rank_files for _vertical_collapse') if os.path.exists(vertical_collapse_folder): subprocess.run(['rm','-r',vertical_collapse_folder]) os.makedirs(vertical_collapse_folder) child_processes = [] for rank in range(len(rank_files)): subcircuit_kron_terms_file = '%s/subcircuit_kron_terms_%d.txt'%(eval_folder,rank) p = subprocess.Popen(args=['./cutqc/vertical_collapse', '%d'%full_circuit.num_qubits, '%s'%subcircuit_kron_terms_file, '%s'%eval_folder, '%s'%vertical_collapse_folder, '%d'%early_termination, '%d'%rank, '%s'%eval_mode]) child_processes.append(p) [cp.wait() for cp in child_processes] if early_termination==1: measured_files = glob.glob('%s/measured*.txt'%eval_folder) [subprocess.run(['rm',measured_file]) for measured_file in measured_files] [subprocess.run(['rm','%s/subcircuit_kron_terms_%d.txt'%(eval_folder,rank)]) for rank in range(len(rank_files))]
def _gather_subcircuits(self,eval_mode): circ_dict = {} all_subcircuit_entries_sampled = {} for circuit_name in self.circuits: circuit = self.circuits[circuit_name]['circuit'] max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] max_cuts = self.circuits[circuit_name]['max_cuts'] num_subcircuits = self.circuits[circuit_name]['num_subcircuits'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') eval_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, num_threads=None,eval_mode=eval_mode,qubit_limit=None,field='evaluator') if os.path.exists(eval_folder): subprocess.run(['rm','-r',eval_folder]) os.makedirs(eval_folder) subcircuit_instances = read_dict(filename='%s/subcircuit_instances.pckl'%source_folder) summation_terms = pickle.load(open('%s/summation_terms.pckl'%source_folder,'rb')) subcircuit_entries = read_dict(filename='%s/subcircuit_entries.pckl'%source_folder) summation_terms_sampled = dummy_sample(summation_terms=summation_terms) subcircuit_entries_sampled = get_subcircuit_entries_sampled(summation_terms=summation_terms_sampled) all_subcircuit_entries_sampled[circuit_name] = subcircuit_entries_sampled subcircuit_instances_sampled = get_subcircuit_instances_sampled(subcircuit_entries=subcircuit_entries,subcircuit_entry_samples=subcircuit_entries_sampled) for subcircuit_instance in subcircuit_instances_sampled: subcircuit_idx, subcircuit_instance_idx = subcircuit_instance parent_subcircuit_instance_idx = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['parent'] circuit = subcircuit_instances[subcircuit_idx][parent_subcircuit_instance_idx]['circuit'] shots = subcircuit_instances[subcircuit_idx][parent_subcircuit_instance_idx]['shots'] init = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['init'] meas = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['meas'] circ_dict_key = (circuit_name,subcircuit_idx,parent_subcircuit_instance_idx) if circ_dict_key in circ_dict: assert circ_dict[circ_dict_key]['init'] == init circ_dict[circ_dict_key]['meas'].append(meas) else: circ_dict[circ_dict_key] = { 'circuit':circuit, 'shots':shots, 'init':init, 'meas':[meas]} pickle.dump(summation_terms_sampled, open('%s/summation_terms_sampled.pckl'%(eval_folder),'wb')) return circ_dict, all_subcircuit_entries_sampled
def _organize(self, job:QuantumExecutionJob, eval_mode:str, num_threads:int): """Organize parallel processing for the subsequent vertical collapse procedure Args: job (QuantumExecutionJob) eval_mode (str) num_threads (int) """ cut_solution = self._partition_dict[job.id]["cut_solution"] max_subcircuit_qubit = cut_solution['max_subcircuit_qubit'] full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] eval_folder = get_dirname(circuit_name=job.id,max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None,num_threads=None,eval_mode=eval_mode,qubit_limit=None,field='evaluator') all_indexed_combinations = self._partition_dict[job.id]["all_indexed_combinations"] O_rho_pairs, combinations = get_combinations(complete_path_map=complete_path_map) kronecker_terms, _ = build(full_circuit=full_circuit, combinations=combinations, O_rho_pairs=O_rho_pairs, subcircuits=subcircuits, all_indexed_combinations=all_indexed_combinations) for rank in range(num_threads): subcircuit_kron_terms_file = open('%s/subcircuit_kron_terms_%d.txt'%(eval_folder,rank),'w') subcircuit_kron_terms_file.write('%d subcircuits\n'%len(kronecker_terms)) for subcircuit_idx in kronecker_terms: if eval_mode=='runtime': rank_subcircuit_kron_terms = [list(kronecker_terms[subcircuit_idx].keys())[0]] else: rank_subcircuit_kron_terms = find_process_jobs(jobs=list(kronecker_terms[subcircuit_idx].keys()),rank=rank,num_workers=num_threads) subcircuit_kron_terms_file.write('subcircuit %d kron_terms %d num_effective %d\n'%( subcircuit_idx,len(rank_subcircuit_kron_terms),counter[subcircuit_idx]['effective'])) for subcircuit_kron_term in rank_subcircuit_kron_terms: subcircuit_kron_terms_file.write('subcircuit_kron_index=%d kron_term_len=%d\n'%(kronecker_terms[subcircuit_idx][subcircuit_kron_term],len(subcircuit_kron_term))) if eval_mode=='runtime': [subcircuit_kron_terms_file.write('%d,0 '%(x[0])) for x in subcircuit_kron_term] else: [subcircuit_kron_terms_file.write('%d,%d '%(x[0],x[1])) for x in subcircuit_kron_term] subcircuit_kron_terms_file.write('\n') if rank==0: self._log.debug('%s subcircuit %d : rank %d/%d needs to vertical collapse %d/%d instances'%( job.id,subcircuit_idx,rank,num_threads,len(rank_subcircuit_kron_terms),len(kronecker_terms[subcircuit_idx]))) subcircuit_kron_terms_file.close()
def cut(self, circuits): self.circuits = circuits self._check_input() if self.verbose: print('*'*20,'Cut','*'*20) data = [] for circuit_name in self.circuits: circuit = self.circuits[circuit_name]['circuit'] max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] max_cuts = self.circuits[circuit_name]['max_cuts'] num_subcircuits = self.circuits[circuit_name]['num_subcircuits'] data.append([circuit,max_subcircuit_qubit,num_subcircuits,max_cuts,False]) pool = mp.Pool(processes=mp.cpu_count()) cut_solutions = pool.starmap(find_cuts,data) pool.close() for circuit_name, cut_solution in zip(self.circuits,cut_solutions): if len(cut_solution) == 0: continue max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') if os.path.exists(source_folder): subprocess.run(['rm','-r',source_folder]) os.makedirs(source_folder) pickle.dump(cut_solution, open('%s/cut_solution.pckl'%(source_folder),'wb')) if self.verbose: print(self.circuits[circuit_name]['circuit']) print('width = %d, depth = %d, size = %d'%( self.circuits[circuit_name]['circuit'].num_qubits, self.circuits[circuit_name]['circuit'].depth(), self.circuits[circuit_name]['circuit'].size())) print('{:s} on {:d}-q : {:d} cuts -->'.format( circuit_name,max_subcircuit_qubit,len(cut_solution['positions'])),flush=True) for subcircuit_idx in cut_solution['counter']: print('Subcircuit {:d} : {}'.format(subcircuit_idx,cut_solution['counter'][subcircuit_idx]),flush=True) print(cut_solution['subcircuits'][subcircuit_idx]) print('Estimated postprocessing cost = %.3e'%cut_solution['cost_estimate'],flush=True) self._generate_subcircuits(circuit_name=circuit_name)
def _build(self, eval_mode, qubit_limit, num_nodes, num_threads): if self.verbose: print('--> Build') row_format = '{:<15} {:<20} {:<30}' print(row_format.format('circuit_name','summation_term_idx','summation_term')) reconstructed_probs = {} for circuit_name in self.circuits: circuit = self.circuits[circuit_name]['circuit'] max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] max_cuts = self.circuits[circuit_name]['max_cuts'] num_subcircuits = self.circuits[circuit_name]['num_subcircuits'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') summation_terms = pickle.load(open('%s/summation_terms.pckl'%source_folder,'rb')) eval_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=eval_mode,num_threads=None,qubit_limit=None,field='evaluator') summation_terms_sampled = pickle.load(open('%s/summation_terms_sampled.pckl'%eval_folder,'rb')) if self.verbose: [print(row_format.format(circuit_name,x['summation_term_idx'],str(x['summation_term'])[:30])) for x in summation_terms_sampled[:10]] print('... Total %d summation terms sampled\n'%len(summation_terms_sampled)) cut_solution = read_dict(filename='%s/cut_solution.pckl'%source_folder) full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] dest_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=eval_mode,num_threads=num_threads,qubit_limit=qubit_limit,field='build') if os.path.exists(dest_folder): subprocess.run(['rm','-r',dest_folder]) os.makedirs(dest_folder) ''' TODO: handle the new summation_terms_sampled format 1. Get rid of repeated summation term computations ''' num_samples = 1 child_processes = [] for rank in range(num_threads): rank_summation_terms = find_process_jobs(jobs=summation_terms_sampled,rank=rank,num_workers=num_threads) build_command = './cutqc/build %d %s %s %d %d %d %d %d'%( rank,eval_folder,dest_folder,int(2**full_circuit.num_qubits),len(cut_solution['positions']),len(rank_summation_terms),len(subcircuits),num_samples) build_command_file = open('%s/build_command_%d.txt'%(dest_folder,rank),'w') for rank_summation_term in rank_summation_terms: build_command_file.write('%e '%rank_summation_term['sampling_prob']) build_command_file.write('%d '%rank_summation_term['frequency']) for item in rank_summation_term['summation_term']: subcircuit_idx, subcircuit_entry_idx = item build_command_file.write('%d %d %d '%(subcircuit_idx,subcircuit_entry_idx,int(2**counter[subcircuit_idx]['effective']))) build_command_file.close() p = subprocess.Popen(args=build_command.split(' ')) child_processes.append(p) for rank in range(num_threads): cp = child_processes[rank] cp.wait() time.sleep(1) elapsed = [] reconstructed_prob = None for rank in range(num_threads): rank_logs = open('%s/rank_%d_summary.txt'%(dest_folder,rank), 'r') lines = rank_logs.readlines() assert lines[-2].split(' = ')[0]=='Total build time' and lines[-1] == 'DONE\n' elapsed.append(float(lines[-2].split(' = ')[1])) fp = open('%s/build_%d.txt'%(dest_folder,rank), 'r') for i, line in enumerate(fp): rank_reconstructed_prob = line.split(' ')[:-1] rank_reconstructed_prob = np.array(rank_reconstructed_prob) rank_reconstructed_prob = rank_reconstructed_prob.astype(np.float) if i>0: raise Exception('C build_output should not have more than 1 line') fp.close() subprocess.run(['rm','%s/build_%d.txt'%(dest_folder,rank)]) if isinstance(reconstructed_prob,np.ndarray): reconstructed_prob += rank_reconstructed_prob else: reconstructed_prob = rank_reconstructed_prob elapsed = np.array(elapsed) reconstructed_probs[circuit_name] = reconstructed_prob if self.verbose: print('%s _build took %.3e seconds'%(circuit_name,np.mean(elapsed)),flush=True) print('Sampled %d/%d summation terms'%(len(summation_terms_sampled),len(summation_terms))) pickle.dump({'reconstructed_prob':reconstructed_prob,'num_summation_terms_sampled':len(summation_terms_sampled),'num_summation_terms':len(summation_terms)},open('%s/build_output.pckl'%(dest_folder),'wb')) return reconstructed_probs
def _generate_subcircuits(self, circuit_name): ''' Generate subcircuit variations and the summation terms ''' circuit = self.circuits[circuit_name]['circuit'] max_subcircuit_qubit = self.circuits[circuit_name]['max_subcircuit_qubit'] max_cuts = self.circuits[circuit_name]['max_cuts'] num_subcircuits = self.circuits[circuit_name]['num_subcircuits'] source_folder = get_dirname(circuit_name=circuit_name,max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=None,num_threads=None,qubit_limit=None,field='cutter') cut_solution = read_dict('%s/cut_solution.pckl'%(source_folder)) assert(max_subcircuit_qubit == cut_solution['max_subcircuit_qubit']) full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] subcircuit_instances, subcircuit_instances_idx = generate_subcircuit_instances(subcircuits=subcircuits,complete_path_map=complete_path_map) summation_terms, subcircuit_entries, subcircuit_instance_attribution = generate_summation_terms(full_circuit=full_circuit,subcircuits=subcircuits,complete_path_map=complete_path_map,subcircuit_instances_idx=subcircuit_instances_idx,counter=counter) if self.verbose: print('--> %s subcircuit_instances:'%circuit_name) row_format = '{:<30} {:<10} {:<30} {:<30}' for subcircuit_idx in subcircuit_instances: print(row_format.format('subcircuit_%d_instance_idx'%subcircuit_idx,'#shots','init','meas')) for subcircuit_instance_idx in subcircuit_instances[subcircuit_idx]: circuit = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['circuit'] shots = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['shots'] init = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['init'] meas = subcircuit_instances[subcircuit_idx][subcircuit_instance_idx]['meas'] print(row_format.format(subcircuit_instance_idx,shots,str(init)[:30],str(meas)[:30])) print(circuit) print('--> %s subcircuit_entries:'%circuit_name) row_format = '{:<30} {:<30}' for subcircuit_idx in subcircuit_entries: print(row_format.format('subcircuit_%d_entry_idx'%subcircuit_idx,'kronecker term (coeff, instance)')) ctr = 0 for subcircuit_entry_idx in subcircuit_entries[subcircuit_idx]: if type(subcircuit_entry_idx) is int: ctr += 1 if ctr<=10: print(row_format.format(subcircuit_entry_idx,str(subcircuit_entries[subcircuit_idx][subcircuit_entry_idx])[:30])) print('... Total %d subcircuit entries\n'%ctr) print('--> %s subcircuit_instance_attribution:'%circuit_name) row_format = '{:<30} {:<50}' for subcircuit_idx in subcircuit_instance_attribution: print(row_format.format('subcircuit_%d_instance_idx'%subcircuit_idx,'coefficient, subcircuit_entry_idx')) ctr = 0 for subcircuit_instance_idx in subcircuit_instance_attribution[subcircuit_idx]: ctr += 1 if ctr>10: break print(row_format.format(subcircuit_instance_idx,str(subcircuit_instance_attribution[subcircuit_idx][subcircuit_instance_idx])[:50])) print('... Total %d subcircuit instances to attribute\n'%len(subcircuit_instance_attribution[subcircuit_idx])) print('--> %s summation_terms:'%circuit_name) row_format = '{:<10}'*len(subcircuits) for summation_term in summation_terms[:10]: row = [] for subcircuit_entry in summation_term: subcircuit_idx, subcircuit_entry_idx = subcircuit_entry row.append('%d,%d'%(subcircuit_idx,subcircuit_entry_idx)) print(row_format.format(*row)) print('... Total %d summations\n'%len(summation_terms)) pickle.dump(subcircuit_instances, open('%s/subcircuit_instances.pckl'%(source_folder),'wb')) pickle.dump(subcircuit_instances_idx, open('%s/subcircuit_instances_idx.pckl'%(source_folder),'wb')) pickle.dump(subcircuit_instance_attribution, open('%s/subcircuit_instance_attribution.pckl'%(source_folder),'wb')) pickle.dump(summation_terms, open('%s/summation_terms.pckl'%(source_folder),'wb')) pickle.dump(subcircuit_entries, open('%s/subcircuit_entries.pckl'%(source_folder),'wb'))
def post_process(self, circuit_cases, eval_mode, num_nodes, num_threads, early_termination, qubit_limit, recursion_depth): if self.verbose: print('-' * 20, 'Postprocess, mode = %s' % eval_mode, '-' * 20) self.circuit_cases = circuit_cases subprocess.run(['rm', './cutqc/merge']) subprocess.run( ['icc', '-mkl', './cutqc/merge.c', '-o', './cutqc/merge', '-lm']) subprocess.run(['rm', './cutqc/build']) subprocess.run([ 'icc', '-fopenmp', '-mkl', '-lpthread', '-march=native', './cutqc/build.c', '-o', './cutqc/build', '-lm' ]) for circuit_case in self.circuit_cases: circuit_name = circuit_case.split('|')[0] max_subcircuit_qubit = int(circuit_case.split('|')[1]) source_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') cut_solution = read_dict('%s/subcircuits.pckl' % (source_folder)) if len(cut_solution) == 0: continue assert ( max_subcircuit_qubit == cut_solution['max_subcircuit_qubit']) full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] dest_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination, num_threads=num_threads, eval_mode=eval_mode, qubit_limit=qubit_limit, field='build') if os.path.exists('%s' % dest_folder): subprocess.run(['rm', '-r', dest_folder]) os.makedirs(dest_folder) vertical_collapse_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='vertical_collapse') for recursion_layer in range(recursion_depth): if self.verbose: print('*' * 20, '%s Recursion Layer %d' % (circuit_case, recursion_layer), '*' * 20, flush=True) recursion_qubit = qubit_limit if self.verbose: print('__Distribute__', flush=True) distribute(circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, eval_mode=eval_mode, early_termination=early_termination, num_threads=num_threads, qubit_limit=qubit_limit, recursion_layer=recursion_layer, recursion_qubit=recursion_qubit, verbose=self.verbose) if self.verbose: print('__Merge__', flush=True) terminated = self._merge( circuit_case=circuit_case, vertical_collapse_folder=vertical_collapse_folder, dest_folder=dest_folder, recursion_layer=recursion_layer, eval_mode=eval_mode) if terminated: break if self.verbose: print('__Build__', flush=True) reconstructed_prob = self._build( circuit_case=circuit_case, dest_folder=dest_folder, recursion_layer=recursion_layer, eval_mode=eval_mode)
def _vertical_collapse(self, early_termination, eval_mode): subprocess.run(['rm', './cutqc/vertical_collapse']) subprocess.run([ 'icc', '-mkl', './cutqc/vertical_collapse.c', '-o', './cutqc/vertical_collapse', '-lm' ]) for circuit_case in self.circuit_cases: circuit_name = circuit_case.split('|')[0] max_subcircuit_qubit = int(circuit_case.split('|')[1]) source_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') cut_solution = read_dict('%s/subcircuits.pckl' % (source_folder)) if len(cut_solution) == 0: continue assert ( max_subcircuit_qubit == cut_solution['max_subcircuit_qubit']) full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] eval_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='evaluator') vertical_collapse_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='vertical_collapse') rank_files = glob.glob('%s/subcircuit_kron_terms_*.txt' % eval_folder) if len(rank_files) == 0: raise Exception( 'There are no rank_files for _vertical_collapse') if os.path.exists(vertical_collapse_folder): subprocess.run(['rm', '-r', vertical_collapse_folder]) os.makedirs(vertical_collapse_folder) child_processes = [] for rank in range(len(rank_files)): subcircuit_kron_terms_file = '%s/subcircuit_kron_terms_%d.txt' % ( eval_folder, rank) p = subprocess.Popen(args=[ './cutqc/vertical_collapse', '%d' % full_circuit.num_qubits, '%s' % subcircuit_kron_terms_file, '%s' % eval_folder, '%s' % vertical_collapse_folder, '%d' % early_termination, '%d' % rank, '%s' % eval_mode ]) child_processes.append(p) [cp.wait() for cp in child_processes] if early_termination == 1: measured_files = glob.glob('%s/measured*.txt' % eval_folder) [ subprocess.run(['rm', measured_file]) for measured_file in measured_files ] [ subprocess.run([ 'rm', '%s/subcircuit_kron_terms_%d.txt' % (eval_folder, rank) ]) for rank in range(len(rank_files)) ]
def _run_subcircuits(self, eval_mode, num_nodes, num_threads, ibmq): for circuit_case in self.circuit_cases: circuit_name = circuit_case.split('|')[0] max_subcircuit_qubit = int(circuit_case.split('|')[1]) source_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') cut_solution = read_dict('%s/subcircuits.pckl' % (source_folder)) if len(cut_solution) == 0: continue assert ( max_subcircuit_qubit == cut_solution['max_subcircuit_qubit']) subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] eval_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='evaluator') if os.path.exists(eval_folder): subprocess.run(['rm', '-r', eval_folder]) os.makedirs(eval_folder) circ_dict, all_indexed_combinations = generate_subcircuit_instances( subcircuits=subcircuits, complete_path_map=complete_path_map) pickle.dump( all_indexed_combinations, open('%s/all_indexed_combinations.pckl' % (eval_folder), 'wb')) if eval_mode == 'sv': data = [] for key in circ_dict: data.append([ key, circ_dict[key]['circuit'], eval_mode, eval_folder, counter ]) random.shuffle( data) # Ensure a somewhat fair distribution of workloads chunksize = max(len(data) // num_threads // 10, 1) pool = mp.Pool(processes=num_threads) pool.starmap(simulate_subcircuit, data, chunksize=chunksize) pool.close() elif eval_mode == 'runtime': data = [] # NOTE: only compute one instance per subcircuit subcircuit_idx_written = [] for key in circ_dict: subcircuit_idx, _, _ = key if subcircuit_idx not in subcircuit_idx_written: subcircuit_idx_written.append(subcircuit_idx) data.append([ key, circ_dict[key]['circuit'], eval_mode, eval_folder, counter ]) random.shuffle( data) # Ensure a somewhat fair distribution of workloads chunksize = max(len(data) // num_threads // 10, 1) pool = mp.Pool(processes=num_threads) pool.starmap(simulate_subcircuit, data, chunksize=chunksize) pool.close() elif 'ibmq' in eval_mode: # NOTE: control whether to use real device scheduler = Scheduler(circ_dict=circ_dict, token=ibmq['token'], hub=ibmq['hub'], group=ibmq['group'], project=ibmq['project'], device_name=eval_mode, datetime=datetime.now()) scheduler.submit_jobs(real_device=True, transpilation=True, verbose=True) scheduler.retrieve_jobs(force_prob=True, save_memory=False, save_directory=None, verbose=True) data = [] for key in scheduler.circ_dict: data.append([ key, eval_folder, counter, scheduler.circ_dict[key]['prob'], eval_mode ]) random.shuffle( data) # Ensure a somewhat fair distribution of workloads chunksize = max(len(data) // num_threads // 10, 1) pool = mp.Pool(processes=num_threads) pool.starmap(write_subcircuit, data, chunksize=chunksize) pool.close() else: raise NotImplementedError
parser.add_argument( '--qubit_limit', type=int, help='Determines number of bins during dynamic definition') parser.add_argument('--num_threads', type=int, help='Number of parallel threads for post-process') parser.add_argument('--eval_mode', type=str, help='Evaluation backend mode') args = parser.parse_args() source_folder = get_dirname(circuit_name=args.circuit_name, max_subcircuit_qubit=args.max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') dest_folder = get_dirname(circuit_name=args.circuit_name, max_subcircuit_qubit=args.max_subcircuit_qubit, early_termination=args.early_termination, num_threads=args.num_threads, eval_mode=args.eval_mode, qubit_limit=args.qubit_limit, field='build') case_dict = read_dict(filename='%s/subcircuits.pckl' % source_folder) if len(case_dict) == 0: exit(0) full_circuit = case_dict['circuit']
def distribute(circuit_name, max_subcircuit_qubit, eval_mode, early_termination, num_threads, qubit_limit, recursion_layer, recursion_qubit, verbose): source_folder = get_dirname(circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') eval_folder = get_dirname(circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=eval_mode, num_threads=None, qubit_limit=None, field='evaluator') vertical_collapse_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='vertical_collapse') dest_folder = get_dirname(circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=early_termination, num_threads=num_threads, eval_mode=eval_mode, qubit_limit=qubit_limit, field='build') case_dict = read_dict(filename='%s/subcircuits.pckl' % source_folder) all_indexed_combinations = read_dict( filename='%s/all_indexed_combinations.pckl' % (eval_folder)) if len(case_dict) == 0: return full_circuit = case_dict['circuit'] subcircuits = case_dict['subcircuits'] complete_path_map = case_dict['complete_path_map'] counter = case_dict['counter'] num_subcircuits = len(subcircuits) meta_data = generate_meta_data(recursion_layer=recursion_layer, counter=counter, recursion_qubit=recursion_qubit, dest_folder=dest_folder, verbose=verbose) pickle.dump(meta_data, open('%s/meta_data.pckl' % (dest_folder), 'wb')) O_rho_pairs, combinations = get_combinations( complete_path_map=complete_path_map) num_cuts = len(O_rho_pairs) _, summation_terms = build( full_circuit=full_circuit, combinations=combinations, O_rho_pairs=O_rho_pairs, subcircuits=subcircuits, all_indexed_combinations=all_indexed_combinations) dynamic_definition_folder = '%s/dynamic_definition_%d' % (dest_folder, recursion_layer) if os.path.exists(dynamic_definition_folder): subprocess.run(['rm', '-r', dynamic_definition_folder]) os.makedirs(dynamic_definition_folder) write_files( recursion_layer=recursion_layer, num_threads=num_threads, counter=counter, vertical_collapse_folder=vertical_collapse_folder, dynamic_definition_folder=dynamic_definition_folder, dynamic_definition_schedule=meta_data['dynamic_definition_schedule'], summation_terms=summation_terms, num_cuts=num_cuts, num_subcircuits=num_subcircuits, verbose=verbose)
def _organize(self, eval_mode, num_threads): ''' Organize parallel processing for the subsequent vertical collapse procedure ''' for circuit_case in self.circuit_cases: circuit_name = circuit_case.split('|')[0] max_subcircuit_qubit = int(circuit_case.split('|')[1]) source_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, eval_mode=None, num_threads=None, qubit_limit=None, field='cutter') cut_solution = read_dict('%s/subcircuits.pckl' % (source_folder)) if len(cut_solution) == 0: continue assert ( max_subcircuit_qubit == cut_solution['max_subcircuit_qubit']) full_circuit = cut_solution['circuit'] subcircuits = cut_solution['subcircuits'] complete_path_map = cut_solution['complete_path_map'] counter = cut_solution['counter'] eval_folder = get_dirname( circuit_name=circuit_name, max_subcircuit_qubit=max_subcircuit_qubit, early_termination=None, num_threads=None, eval_mode=eval_mode, qubit_limit=None, field='evaluator') all_indexed_combinations = read_dict( filename='%s/all_indexed_combinations.pckl' % (eval_folder)) O_rho_pairs, combinations = get_combinations( complete_path_map=complete_path_map) kronecker_terms, _ = build( full_circuit=full_circuit, combinations=combinations, O_rho_pairs=O_rho_pairs, subcircuits=subcircuits, all_indexed_combinations=all_indexed_combinations) for rank in range(num_threads): subcircuit_kron_terms_file = open( '%s/subcircuit_kron_terms_%d.txt' % (eval_folder, rank), 'w') subcircuit_kron_terms_file.write('%d subcircuits\n' % len(kronecker_terms)) for subcircuit_idx in kronecker_terms: if eval_mode == 'runtime': rank_subcircuit_kron_terms = [ list(kronecker_terms[subcircuit_idx].keys())[0] ] else: rank_subcircuit_kron_terms = find_process_jobs( jobs=list(kronecker_terms[subcircuit_idx].keys()), rank=rank, num_workers=num_threads) subcircuit_kron_terms_file.write( 'subcircuit %d kron_terms %d num_effective %d\n' % (subcircuit_idx, len(rank_subcircuit_kron_terms), counter[subcircuit_idx]['effective'])) for subcircuit_kron_term in rank_subcircuit_kron_terms: subcircuit_kron_terms_file.write( 'subcircuit_kron_index=%d kron_term_len=%d\n' % (kronecker_terms[subcircuit_idx] [subcircuit_kron_term], len(subcircuit_kron_term))) if eval_mode == 'runtime': [ subcircuit_kron_terms_file.write('%d,0 ' % (x[0])) for x in subcircuit_kron_term ] else: [ subcircuit_kron_terms_file.write('%d,%d ' % (x[0], x[1])) for x in subcircuit_kron_term ] subcircuit_kron_terms_file.write('\n') if rank == 0 and self.verbose: print( '%s subcircuit %d : rank %d/%d needs to vertical collapse %d/%d instances' % (circuit_case, subcircuit_idx, rank, num_threads, len(rank_subcircuit_kron_terms), len(kronecker_terms[subcircuit_idx])), flush=True) subcircuit_kron_terms_file.close()