class Virtual_Execution_Environment(): def __init__(self, provider: Provider, config: Dict) -> None: """ Creates a Virtual_Execution_Environment object Args: provider (Provider): provider object that handles the communication with IBMQ config (Dict): configuration """ self._log = logger.get_logger(type(self).__name__) self.input = Queue() self.output = Queue() self.errors = Queue() input_execution = Queue() output_execution = Queue() input_aggregation = Queue() input_partition = Queue() input_aggregation_result = Queue() input_partition_result = Queue() all_results_are_available = Queue() aggregation_dict = {} partition_dict = {} self.backend_chooser = Backend_Chooser(provider, config["quantum_resource_mapper"]["backend_chooser"]) self.quantum_resource_mapper = QuantumResourceMapper(input=self.input, output=input_execution, output_agg=input_aggregation, output_part=input_partition, backend_chooser=self.backend_chooser, config=config["quantum_resource_mapper"]) self.aggregator = Aggregator(input=input_aggregation, output=input_execution, job_dict=aggregation_dict, timeout=config["aggregator"]["timeout"]) self.partitioner = Partitioner(input=input_partition, output=input_execution, partition_dict=partition_dict, error_queue=self.errors, **config["partitioner"]) self.execution_handler = ExecutionHandler(provider, input=input_execution, output=output_execution, **config["execution_handler"]) self.result_analyzer = ResultAnalyzer(input=output_execution, output=self.output, output_agg=input_aggregation_result, output_part=input_partition_result) self.aggregation_result_processor = AggregatorResults(input=input_aggregation_result, output=self.output, job_dict=aggregation_dict) self.partition_result_writer = ResultWriter(input=input_partition_result, completed_jobs=all_results_are_available, partition_dict=partition_dict) self.partition_result_processor = ResultProcessing(input=all_results_are_available, output=self.output, partition_dict=partition_dict) def start(self): """Start all threads of the Virtual_Execution_Environment object """ self.quantum_resource_mapper.start() self.aggregator.start() self.partitioner.start() self.execution_handler.start() self.result_analyzer.start() self.aggregation_result_processor.start() self.partition_result_writer.start() self.partition_result_processor.start()
def execute_search(query: str, user_id: str) -> List[Raw_Document]: BM25 = BM25Search() if (not BM25.is_initialized()) or _should_update(): all_documents = fetch_all_documents_from_database() BM25.build_bm25_model(all_documents) search_results: List[BM25_graded_document] = BM25.search_query(query) graded_results: List[BM25_Distance_Document] = DistanceFetcher( ).fetch_document_distance_user_history(search_results, user_id) final_results: List[Raw_Document] = [ result[1] for result in Aggregator().normalize_and_sort_search_results( graded_results) ] return final_results
def runConfiguration(weights, subjects): print "Testing configuration: " + str(weights) aggregator = Aggregator() evaluator = Evaluator() aggregator.loadFeatures() aggregator.setWeights(weights) # (TO-DO: actually search for optimal values in n-weights space.) evaluations = list() for subject in subjects: aggregator.setSubject(subjects[subject]) aggregator.run() resultingSummary = aggregator.getResultingSummary() idealSummary = aggregator.getIdealSummary() evaluator.setTest(resultingSummary, idealSummary) evaluations.append( evaluator.run() ) print "Resulting evaluations: " + str(evaluations) meanEvaluation = sum(evaluations) / float(len(evaluations)) print "So that mean evaluation is: " + str(meanEvaluation) + "\n" localResult = dict() localResult["weights"] = weights localResult["evaluation"] = meanEvaluation return localResult
for circ in circuits[type]: input_pipeline.put( QuantumExecutionJob( circuit=circ.measure_all(inplace=False), shots=shots, backend_data=backend_data)) input_exec.put( QuantumExecutionJob( circuit=circ.measure_all(inplace=False), shots=shots, backend_data=backend_data)) agg_job_dict = {} aggregator = Aggregator(input=input_pipeline, output=input_exec, job_dict=agg_job_dict, timeout=10) aggregator.start() exec_handler = ExecutionHandler(provider, input=input_exec, output=output_exec, batch_timeout=5) exec_handler.start() result_analyzer = ResultAnalyzer(input=output_exec, output=output_pipline, output_agg=agg_results, output_part=None) result_analyzer.start()
def get_aggregator(self): if self._aggregator is None: authority = '0x0' # authority pk prover = self.get_prover() self._aggregator = Aggregator(authority, prover) return self._aggregator