def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance. Args: params (dict): parameters dictionary algo_input (EnergyInput): EnergyInput instance Returns: VQE: vqe object """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op vqe_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) initial_point = vqe_params.get('initial_point') max_evals_grouped = vqe_params.get('max_evals_grouped') # Set up variational form, we need to add computed num qubits # Pass all parameters so that Variational Form can create its dependents var_form_params = params.get(Pluggable.SECTION_KEY_VAR_FORM) var_form_params['num_qubits'] = operator.num_qubits var_form = get_pluggable_class(PluggableType.VARIATIONAL_FORM, var_form_params['name']).init_params(params) # Set up optimizer opt_params = params.get(Pluggable.SECTION_KEY_OPTIMIZER) optimizer = get_pluggable_class(PluggableType.OPTIMIZER, opt_params['name']).init_params(params) return cls(operator, var_form, optimizer, initial_point=initial_point, max_evals_grouped=max_evals_grouped, aux_operators=algo_input.aux_ops)
def init_params(cls, params, algo_input): """Constructor from params.""" feature_dimension = get_feature_dimension(algo_input.training_dataset) fea_map_params = params.get(Pluggable.SECTION_KEY_FEATURE_MAP) fea_map_params['feature_dimension'] = feature_dimension feature_map = get_pluggable_class( PluggableType.FEATURE_MAP, fea_map_params['name']).init_params(params) multiclass_extension = None multiclass_extension_params = params.get( Pluggable.SECTION_KEY_MULTICLASS_EXT) if multiclass_extension_params is not None: multiclass_extension_params['params'] = [feature_map] multiclass_extension_params['estimator_cls'] = _QSVM_Estimator multiclass_extension = \ get_pluggable_class(PluggableType.MULTICLASS_EXTENSION, multiclass_extension_params['name']).init_params(params) logger.info("Multiclass classifier based on %s", multiclass_extension_params['name']) return cls(feature_map, algo_input.training_dataset, algo_input.test_dataset, algo_input.datapoints, multiclass_extension)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params (dict): parameters dictionary algo_input (EnergyInput): EnergyInput instance """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op qaoa_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) p = qaoa_params.get('p') initial_point = qaoa_params.get('initial_point') max_evals_grouped = qaoa_params.get('max_evals_grouped') init_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) init_state_params['num_qubits'] = operator.num_qubits init_state = get_pluggable_class(PluggableType.INITIAL_STATE, init_state_params['name']).init_params(params) # Set up optimizer opt_params = params.get(Pluggable.SECTION_KEY_OPTIMIZER) optimizer = get_pluggable_class(PluggableType.OPTIMIZER, opt_params['name']).init_params(params) return cls(operator, optimizer, p=p, initial_state=init_state, initial_point=initial_point, max_evals_grouped=max_evals_grouped, aux_operators=algo_input.aux_ops)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params: parameters dictionary algo_input: Input instance """ if algo_input is not None: raise AquaError('Input instance not supported.') ae_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) num_eval_qubits = ae_params.get('num_eval_qubits') # Set up uncertainty problem. The params can include an uncertainty model # type dependent on the uncertainty problem and is this its responsibility # to create for itself from the complete params set that is passed to it. uncertainty_problem_params = params.get( Pluggable.SECTION_KEY_UNCERTAINTY_PROBLEM) uncertainty_problem = get_pluggable_class( PluggableType.UNCERTAINTY_PROBLEM, uncertainty_problem_params['name']).init_params(params) # Set up iqft, we need to add num qubits to params which is our num_ancillae bits here iqft_params = params.get(Pluggable.SECTION_KEY_IQFT) iqft_params['num_qubits'] = num_eval_qubits iqft = get_pluggable_class(PluggableType.IQFT, iqft_params['name']).init_params(params) return cls(num_eval_qubits, uncertainty_problem, q_factory=None, iqft=iqft)
def init_params(cls, params, algo_input): algo_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) override_spsa_params = algo_params.get('override_SPSA_params') batch_mode = algo_params.get('batch_mode') minibatch_size = algo_params.get('minibatch_size') # Set up optimizer opt_params = params.get(QuantumAlgorithm.SECTION_KEY_OPTIMIZER) # If SPSA then override SPSA params as reqd to our predetermined values if opt_params['name'] == 'SPSA' and override_spsa_params: opt_params['c0'] = 4.0 opt_params['c1'] = 0.1 opt_params['c2'] = 0.602 opt_params['c3'] = 0.101 opt_params['c4'] = 0.0 opt_params['skip_calibration'] = True optimizer = get_pluggable_class(PluggableType.OPTIMIZER, opt_params['name']).init_params(opt_params) # Set up feature map fea_map_params = params.get(QuantumAlgorithm.SECTION_KEY_FEATURE_MAP) num_qubits = get_feature_dimension(algo_input.training_dataset) fea_map_params['num_qubits'] = num_qubits feature_map = get_pluggable_class(PluggableType.FEATURE_MAP, fea_map_params['name']).init_params(fea_map_params) # Set up variational form var_form_params = params.get(QuantumAlgorithm.SECTION_KEY_VAR_FORM) var_form_params['num_qubits'] = num_qubits var_form = get_pluggable_class(PluggableType.VARIATIONAL_FORM, var_form_params['name']).init_params(var_form_params) return cls(optimizer, feature_map, var_form, algo_input.training_dataset, algo_input.test_dataset, algo_input.datapoints, batch_mode, minibatch_size)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params: parameters dictionary algo_input: input instance """ if algo_input is not None: raise AquaError("Unexpected Input instance.") grover_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) incremental = grover_params.get(Grover.PROP_INCREMENTAL) num_iterations = grover_params.get(Grover.PROP_NUM_ITERATIONS) mct_mode = grover_params.get(Grover.PROP_MCT_MODE) oracle_params = params.get(Pluggable.SECTION_KEY_ORACLE) oracle = get_pluggable_class(PluggableType.ORACLE, oracle_params['name']).init_params(params) # Set up initial state, we need to add computed num qubits to params init_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) init_state_params['num_qubits'] = len(oracle.variable_register) init_state = get_pluggable_class( PluggableType.INITIAL_STATE, init_state_params['name']).init_params(params) return cls(oracle, init_state=init_state, incremental=incremental, num_iterations=num_iterations, mct_mode=mct_mode)
def init_params(cls, params, algo_input): """Constructor from params.""" num_qubits = get_feature_dimension(algo_input.training_dataset) fea_map_params = params.get(QuantumAlgorithm.SECTION_KEY_FEATURE_MAP) fea_map_params['num_qubits'] = num_qubits feature_map = get_pluggable_class( PluggableType.FEATURE_MAP, fea_map_params['name']).init_params(fea_map_params) multiclass_extension = None multiclass_extension_params = params.get( QuantumAlgorithm.SECTION_KEY_MULTICLASS_EXTENSION, None) if multiclass_extension_params is not None: multiclass_extension_params['params'] = [feature_map] multiclass_extension_params[ 'estimator_cls'] = _QSVM_Kernel_Estimator multiclass_extension = get_pluggable_class( PluggableType.MULTICLASS_EXTENSION, multiclass_extension_params['name']).init_params( multiclass_extension_params) logger.info("Multiclass classifier based on {}".format( multiclass_extension_params['name'])) return cls(feature_map, algo_input.training_dataset, algo_input.test_dataset, algo_input.datapoints, multiclass_extension)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance. Args: params: parameters dictionary algo_input: EnergyInput instance """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op qpe_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) num_time_slices = qpe_params.get(QPE.PROP_NUM_TIME_SLICES) expansion_mode = qpe_params.get(QPE.PROP_EXPANSION_MODE) expansion_order = qpe_params.get(QPE.PROP_EXPANSION_ORDER) num_ancillae = qpe_params.get(QPE.PROP_NUM_ANCILLAE) # Set up initial state, we need to add computed num qubits to params init_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) init_state_params['num_qubits'] = operator.num_qubits init_state = get_pluggable_class(PluggableType.INITIAL_STATE, init_state_params['name']).init_params(params) # Set up iqft, we need to add num qubits to params which is our num_ancillae bits here iqft_params = params.get(Pluggable.SECTION_KEY_IQFT) iqft_params['num_qubits'] = num_ancillae iqft = get_pluggable_class(PluggableType.IQFT, iqft_params['name']).init_params(params) return cls(operator, init_state, iqft, num_time_slices, num_ancillae, expansion_mode=expansion_mode, expansion_order=expansion_order)
def init_params(cls, params, algo_input): """ Initialize qGAN via parameters dictionary and algorithm input instance. Args: params: parameters dictionary algo_input: Input instance Returns: QGAN: qgan object """ if algo_input is None: raise AquaError("Input instance not supported.") qgan_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) num_qubits = qgan_params.get('num_qubits') batch_size = qgan_params.get('batch_size') num_epochs = qgan_params.get('num_epochs') seed = qgan_params.get('seed') tol_rel_ent = qgan_params.get('tol_rel_ent') snapshot_dir = qgan_params.get('snapshot_dir') discriminator_params = params.get(Pluggable.SECTION_KEY_DISCRIMINATIVE_NETWORK) generator_params = params.get(Pluggable.SECTION_KEY_GENERATIVE_NETWORK) generator_params['num_qubits'] = num_qubits discriminator = get_pluggable_class(PluggableType.DISCRIMINATIVE_NETWORK, discriminator_params['name']).init_params(params) generator = get_pluggable_class(PluggableType.GENERATIVE_NETWORK, generator_params['name']).init_params(params) return cls(algo_input.data, algo_input.bounds, num_qubits, batch_size, num_epochs, seed, discriminator, generator, tol_rel_ent, snapshot_dir)
def init_params(cls, params, matrix): # pylint: disable=arguments-differ """ Initialize via parameters dictionary and algorithm input instance Args: params (dict): parameters dictionary matrix (numpy.ndarray): two dimensional array which represents the operator Returns: EigsQPE: instance of this class Raises: AquaError: Operator instance is required """ if matrix is None: raise AquaError("Operator instance is required.") if not isinstance(matrix, np.ndarray): matrix = np.array(matrix) eigs_params = params.get(Pluggable.SECTION_KEY_EIGS) args = {k: v for k, v in eigs_params.items() if k != 'name'} num_ancillae = eigs_params['num_ancillae'] negative_evals = eigs_params['negative_evals'] # Adding an additional flag qubit for negative eigenvalues if negative_evals: num_ancillae += 1 args['num_ancillae'] = num_ancillae args['operator'] = MatrixOperator(matrix=matrix) # Set up iqft, we need to add num qubits to params which is our num_ancillae bits here iqft_params = params.get(Pluggable.SECTION_KEY_IQFT) iqft_params['num_qubits'] = num_ancillae args['iqft'] = get_pluggable_class( PluggableType.IQFT, iqft_params['name']).init_params(params) # For converting the encoding of the negative eigenvalues, we need two # additional instances for QFT and IQFT if negative_evals: ne_params = params qft_num_qubits = iqft_params['num_qubits'] ne_qft_params = params.get(Pluggable.SECTION_KEY_QFT) ne_qft_params['num_qubits'] = qft_num_qubits - 1 ne_iqft_params = params.get(Pluggable.SECTION_KEY_IQFT) ne_iqft_params['num_qubits'] = qft_num_qubits - 1 ne_params['qft'] = ne_qft_params ne_params['iqft'] = ne_iqft_params args['ne_qfts'] = [ get_pluggable_class( PluggableType.QFT, ne_qft_params['name']).init_params(ne_params), get_pluggable_class( PluggableType.IQFT, ne_iqft_params['name']).init_params(ne_params) ] else: args['ne_qfts'] = [None, None] return cls(**args)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance. Args: params (dict): parameters dictionary algo_input (EnergyInput): EnergyInput instance Returns: QEomVQE: Newly created instance Raises: AquaError: EnergyInput instance is required """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op q_eom_vqe_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) initial_point = q_eom_vqe_params.get('initial_point') max_evals_grouped = q_eom_vqe_params.get('max_evals_grouped') num_orbitals = q_eom_vqe_params.get('num_orbitals') num_particles = q_eom_vqe_params.get('num_particles') qubit_mapping = q_eom_vqe_params.get('qubit_mapping') two_qubit_reduction = q_eom_vqe_params.get('two_qubit_reduction') active_occupied = q_eom_vqe_params.get('active_occupied') active_unoccupied = q_eom_vqe_params.get('active_unoccupied') # Set up variational form, we need to add computed num qubits, and initial state to params var_form_params = params.get(Pluggable.SECTION_KEY_VAR_FORM) var_form_params['num_qubits'] = operator.num_qubits var_form = get_pluggable_class( PluggableType.VARIATIONAL_FORM, var_form_params['name']).init_params(params) # Set up optimizer opt_params = params.get(Pluggable.SECTION_KEY_OPTIMIZER) optimizer = get_pluggable_class(PluggableType.OPTIMIZER, opt_params['name']).init_params(params) return cls(operator, var_form, optimizer, initial_point=initial_point, max_evals_grouped=max_evals_grouped, aux_operators=algo_input.aux_ops, num_orbitals=num_orbitals, num_particles=num_particles, qubit_mapping=qubit_mapping, two_qubit_reduction=two_qubit_reduction, active_occupied=active_occupied, active_unoccupied=active_unoccupied)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance. Args: params (dict): parameters dictionary algo_input (EnergyInput): EnergyInput instance Returns: VQE: vqe object """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op vqe_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) operator_mode = vqe_params.get('operator_mode') initial_point = vqe_params.get('initial_point') batch_mode = vqe_params.get('batch_mode') # Set up initial state, we need to add computed num qubits to params init_state_params = params.get( QuantumAlgorithm.SECTION_KEY_INITIAL_STATE) init_state_params['num_qubits'] = operator.num_qubits init_state = get_pluggable_class( PluggableType.INITIAL_STATE, init_state_params['name']).init_params(init_state_params) # Set up variational form, we need to add computed num qubits, and initial state to params var_form_params = params.get(QuantumAlgorithm.SECTION_KEY_VAR_FORM) var_form_params['num_qubits'] = operator.num_qubits var_form_params['initial_state'] = init_state var_form = get_pluggable_class( PluggableType.VARIATIONAL_FORM, var_form_params['name']).init_params(var_form_params) # Set up optimizer opt_params = params.get(QuantumAlgorithm.SECTION_KEY_OPTIMIZER) optimizer = get_pluggable_class( PluggableType.OPTIMIZER, opt_params['name']).init_params(opt_params) return cls(operator, var_form, optimizer, operator_mode=operator_mode, initial_point=initial_point, batch_mode=batch_mode, aux_operators=algo_input.aux_ops)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance. Args: params: parameters dictionary algo_input: EnergyInput instance """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op evolution_fidelity_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) expansion_order = evolution_fidelity_params.get( EvolutionFidelity.PROP_EXPANSION_ORDER) # Set up initial state, we need to add computed num qubits to params initial_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) initial_state_params['num_qubits'] = operator.num_qubits initial_state = get_pluggable_class( PluggableType.INITIAL_STATE, initial_state_params['name']).init_params(params) return cls(operator, initial_state, expansion_order)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params (dict): parameters dictionary algo_input (object): Input instance Returns: MaximumLikelihoodAmplitudeEstimation: instance of this class Raises: AquaError: input instance not supported """ if algo_input is not None: raise AquaError("Input instance not supported.") ae_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) log_max_evals = ae_params.get('log_max_evals') # Set up uncertainty problem. The params can include an uncertainty model # type dependent on the uncertainty problem and is this its responsibility # to create for itself from the complete params set that is passed to it. uncertainty_problem_params = params.get( Pluggable.SECTION_KEY_UNCERTAINTY_PROBLEM) uncertainty_problem = get_pluggable_class( PluggableType.UNCERTAINTY_PROBLEM, uncertainty_problem_params['name']).init_params(params) return cls(log_max_evals, uncertainty_problem, q_factory=None)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params: parameters dictionary algo_input: EnergyInput instance """ if algo_input is None: raise AquaError("EnergyInput instance is required.") # For getting the extra operator, caller has to do something like: algo_input.add_aux_op(evo_op) operator = algo_input.qubit_op aux_ops = algo_input.aux_ops if aux_ops is None or len(aux_ops) != 1: raise AquaError("EnergyInput, a single aux op is required for evaluation.") evo_operator = aux_ops[0] if evo_operator is None: raise AquaError("EnergyInput, invalid aux op.") dynamics_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) operator_mode = dynamics_params.get(EOH.PROP_OPERATOR_MODE) evo_time = dynamics_params.get(EOH.PROP_EVO_TIME) num_time_slices = dynamics_params.get(EOH.PROP_NUM_TIME_SLICES) expansion_mode = dynamics_params.get(EOH.PROP_EXPANSION_MODE) expansion_order = dynamics_params.get(EOH.PROP_EXPANSION_ORDER) # Set up initial state, we need to add computed num qubits to params initial_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) initial_state_params['num_qubits'] = operator.num_qubits initial_state = get_pluggable_class(PluggableType.INITIAL_STATE, initial_state_params['name']).init_params(params) return cls(operator, initial_state, evo_operator, operator_mode, evo_time, num_time_slices, expansion_mode=expansion_mode, expansion_order=expansion_order)
def init_params(cls, params): uncertainty_problem_params = params.get( Pluggable.SECTION_KEY_UNCERTAINTY_PROBLEM) args = { k: v for k, v in uncertainty_problem_params.items() if k != 'name' } # Uncertainty problems take an uncertainty model. Each can take a specific type as # a dependent. We currently have two known types and to save having init_params in # each of the problems a problem can use this base class method that tries to find # params for the set of known uncertainty model types. uncertainty_model_params = params.get( Pluggable.SECTION_KEY_UNIVARIATE_DISTRIBUTION) pluggable_type = PluggableType.UNIVARIATE_DISTRIBUTION if uncertainty_model_params is None: uncertainty_model_params = params.get( Pluggable.SECTION_KEY_MULTIVARIATE_DISTRIBUTION) pluggable_type = PluggableType.MULTIVARIATE_DISTRIBUTION if uncertainty_model_params is None: raise AquaError( "No params for known uncertainty model types found") uncertainty_model = get_pluggable_class( pluggable_type, uncertainty_model_params['name']).init_params(params) return cls(uncertainty_model, **args)
def _validate_defaults_against_schema(self, dependency_pluggable_type, default_name, defaults): cls = get_pluggable_class(dependency_pluggable_type, default_name) default_config = get_pluggable_configuration(dependency_pluggable_type, default_name) if not isinstance(default_config, dict): return ["{} configuration isn't a dictionary.".format(cls)] schema = default_config.get('input_schema') if not isinstance(default_config, dict): return [ "{} configuration schema missing or isn't a dictionary.". format(cls) ] properties = schema.get('properties') if not isinstance(properties, dict): return [ "{} configuration schema '{}' missing or isn't a dictionary.". format(cls, 'properties') ] err_msgs = [] for default_property_name, default_property_value in defaults.items(): prop = properties.get(default_property_name) if not isinstance(prop, dict): err_msgs.append("{} configuration schema '{}/{}' " "missing or isn't a dictionary.".format( cls, 'properties', default_property_name)) return err_msgs
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance. Args: params: parameters dictionary algo_input: EnergyInput instance """ if algo_input is None: raise AquaError("EnergyInput instance is required.") operator = algo_input.qubit_op iqpe_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) num_time_slices = iqpe_params.get(IQPE.PROP_NUM_TIME_SLICES) expansion_mode = iqpe_params.get(IQPE.PROP_EXPANSION_MODE) expansion_order = iqpe_params.get(IQPE.PROP_EXPANSION_ORDER) num_iterations = iqpe_params.get(IQPE.PROP_NUM_ITERATIONS) # Set up initial state, we need to add computed num qubits to params init_state_params = params.get(QuantumAlgorithm.SECTION_KEY_INITIAL_STATE) init_state_params['num_qubits'] = operator.num_qubits init_state = get_pluggable_class(PluggableType.INITIAL_STATE, init_state_params['name']).init_params(init_state_params) return cls(operator, init_state, num_time_slices=num_time_slices, num_iterations=num_iterations, expansion_mode=expansion_mode, expansion_order=expansion_order)
def test_pluggable_inputs(self): algorithm_problems = set() for pluggable_name in local_pluggables(PluggableType.ALGORITHM): configuration = get_pluggable_configuration(PluggableType.ALGORITHM, pluggable_name) if isinstance(configuration, dict): algorithm_problems.update(configuration.get('problems', [])) err_msgs = [] all_problems = set() for pluggable_name in local_pluggables(PluggableType.INPUT): cls = get_pluggable_class(PluggableType.INPUT, pluggable_name) configuration = get_pluggable_configuration(PluggableType.INPUT, pluggable_name) missing_problems = [] if isinstance(configuration, dict): problem_names = configuration.get('problems', []) all_problems.update(problem_names) for problem_name in problem_names: if problem_name not in algorithm_problems: missing_problems.append(problem_name) if len(missing_problems) > 0: err_msgs.append("{}: No algorithm declares the problems {}.".format(cls, missing_problems)) invalid_problems = list(set(AlgorithmInput._PROBLEM_SET).difference(all_problems)) if len(invalid_problems) > 0: err_msgs.append("Base Class AlgorithmInput contains problems {} that don't belong to any Input class.".format(invalid_problems)) if len(err_msgs) > 0: self.fail('\n'.join(err_msgs))
def init_params(cls, params, algo_input): """Initialize via parameters dictionary and algorithm input instance Args: params: parameters dictionary algo_input: LinearSystemInput instance """ if algo_input is None: raise AquaError("LinearSystemInput instance is required.") matrix = algo_input.matrix vector = algo_input.vector if not isinstance(matrix, np.ndarray): matrix = np.asarray(matrix) if not isinstance(vector, np.ndarray): vector = np.asarray(vector) if matrix.shape[0] != len(vector): raise ValueError("Input vector dimension does not match input " "matrix dimension!") if np.log2(matrix.shape[0]) % 1 != 0: # TODO: extend vector and matrix for nonhermitian/non 2**n size # matrices and prune dimensions of HHL solution raise ValueError("Matrix dimension must be 2**n!") # Initialize eigenvalue finding module eigs_params = params.get(Pluggable.SECTION_KEY_EIGS) eigs = get_pluggable_class(PluggableType.EIGENVALUES, eigs_params['name']).init_params(params, matrix) num_q, num_a = eigs.get_register_sizes() # Initialize initial state module tmpvec = vector init_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) init_state_params["num_qubits"] = num_q init_state_params["state_vector"] = tmpvec init_state = get_pluggable_class(PluggableType.INITIAL_STATE, init_state_params['name']).init_params(params) # Initialize reciprocal rotation module reciprocal_params = params.get(Pluggable.SECTION_KEY_RECIPROCAL) reciprocal_params["negative_evals"] = eigs._negative_evals reciprocal_params["evo_time"] = eigs._evo_time reci = get_pluggable_class(PluggableType.RECIPROCAL, reciprocal_params['name']).init_params(params) return cls(matrix, vector, eigs, init_state, reci, num_q, num_a)
def init_params(cls, params, algo_input): if algo_input is not None: raise AquaError("Input instance not supported.") oracle_params = params.get(Pluggable.SECTION_KEY_ORACLE) oracle = get_pluggable_class(PluggableType.ORACLE, oracle_params['name']).init_params(params) return cls(oracle)
def test_pluggable_configuration(self): """ pluggable configuration tests """ err_msgs = [] for pluggable_type in local_pluggables_types(): for pluggable_name in local_pluggables(pluggable_type): cls = get_pluggable_class(pluggable_type, pluggable_name) configuration = get_pluggable_configuration( pluggable_type, pluggable_name) if not isinstance(configuration, dict): err_msgs.append( "{} configuration isn't a dictionary.".format(cls)) continue if pluggable_type in [ PluggableType.ALGORITHM, PluggableType.INPUT ]: if not configuration.get('problems', []): err_msgs.append( "{} missing or empty 'problems' section.".format( cls)) schema_found = False for configuration_name, configuration_value in configuration.items( ): if configuration_name in ['problems', 'depends']: if not isinstance(configuration_value, list): err_msgs.append( "{} configuration section:'{}' isn't a list.". format(cls, configuration_name)) continue if configuration_name == 'depends': err_msgs.extend( self._validate_depends(cls, configuration_value)) continue if configuration_name == 'input_schema': schema_found = True if not isinstance(configuration_value, dict): err_msgs.append( "{} configuration section:'{}' isn't a dictionary." .format(cls, configuration_name)) continue err_msgs.extend( self._validate_schema(cls, configuration_value)) continue if not schema_found: err_msgs.append( "{} configuration missing schema.".format(cls)) if err_msgs: self.fail('\n'.join(err_msgs))
def init_params(cls, params, algo_input): if algo_input is not None: raise AquaError("Unexpected Input instance.") dj_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) oracle_params = params.get(QuantumAlgorithm.SECTION_KEY_ORACLE) oracle = get_pluggable_class( PluggableType.ORACLE, oracle_params['name']).init_params(oracle_params) return cls(oracle)
def init_params(cls, params): var_form_params = params.get(Pluggable.SECTION_KEY_VAR_FORM) args = {k: v for k, v in var_form_params.items() if k != 'name'} # We pass on num_qubits to initial state since we know our dependent needs this init_state_params = params.get(Pluggable.SECTION_KEY_INITIAL_STATE) init_state_params['num_qubits'] = var_form_params['num_qubits'] args['initial_state'] = get_pluggable_class(PluggableType.INITIAL_STATE, init_state_params['name']).init_params(params) return cls(**args)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params: parameters dictionary algo_input: Input instance """ if algo_input is not None: raise AquaError("Input instance not supported.") ae_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) num_eval_qubits = ae_params.get('num_eval_qubits') # Set up uncertainty model and problem uncertainty_model_params = params.get( QuantumAlgorithm.SECTION_KEY_UNCERTAINTY_MODEL) uncertainty_model_params['num_target_qubits'] = num_eval_qubits uncertainty_model = get_pluggable_class( PluggableType.UNCERTAINTY_MODEL, uncertainty_model_params['name']).init_params( uncertainty_model_params) uncertainty_problem_params = params.get( QuantumAlgorithm.SECTION_KEY_UNCERTAINTY_PROBLEM) uncertainty_problem_params['uncertainty_model'] = uncertainty_model uncertainty_problem = get_pluggable_class( PluggableType.UNCERTAINTY_PROBLEM, uncertainty_problem_params['name']).init_params( uncertainty_problem_params) # Set up iqft, we need to add num qubits to params which is our num_ancillae bits here iqft_params = params.get(QuantumAlgorithm.SECTION_KEY_IQFT) iqft_params['num_qubits'] = num_eval_qubits iqft = get_pluggable_class( PluggableType.IQFT, iqft_params['name']).init_params(iqft_params) return cls(num_eval_qubits, uncertainty_problem, q_factory=None, iqft=iqft)
def init_params(cls, params, algo_input): """ init params """ algo_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) override_spsa_params = algo_params.get('override_SPSA_params') max_evals_grouped = algo_params.get('max_evals_grouped') minibatch_size = algo_params.get('minibatch_size') # Set up optimizer opt_params = params.get(Pluggable.SECTION_KEY_OPTIMIZER) # If SPSA then override SPSA params as reqd to our predetermined values if opt_params['name'] == 'SPSA' and override_spsa_params: opt_params['c0'] = 4.0 opt_params['c1'] = 0.1 opt_params['c2'] = 0.602 opt_params['c3'] = 0.101 opt_params['c4'] = 0.0 opt_params['skip_calibration'] = True optimizer = get_pluggable_class(PluggableType.OPTIMIZER, opt_params['name']).init_params(params) # Set up feature map fea_map_params = params.get(Pluggable.SECTION_KEY_FEATURE_MAP) feature_dimension = get_feature_dimension(algo_input.training_dataset) fea_map_params['feature_dimension'] = feature_dimension feature_map = get_pluggable_class( PluggableType.FEATURE_MAP, fea_map_params['name']).init_params(params) # Set up variational form, we need to add computed num qubits # Pass all parameters so that Variational Form can create its dependents var_form_params = params.get(Pluggable.SECTION_KEY_VAR_FORM) var_form_params['num_qubits'] = feature_map.num_qubits var_form = get_pluggable_class( PluggableType.VARIATIONAL_FORM, var_form_params['name']).init_params(params) return cls(optimizer, feature_map, var_form, algo_input.training_dataset, algo_input.test_dataset, algo_input.datapoints, max_evals_grouped, minibatch_size)
def init_params(cls, params, algo_input): svm_params = params.get(Pluggable.SECTION_KEY_ALGORITHM) gamma = svm_params.get('gamma', None) multiclass_extension = None multiclass_extension_params = params.get(Pluggable.SECTION_KEY_MULTICLASS_EXTENSION) if multiclass_extension_params is not None: multiclass_extension_params['estimator_cls'] = _RBF_SVC_Estimator multiclass_extension = get_pluggable_class( PluggableType.MULTICLASS_EXTENSION, multiclass_extension_params['name']).init_params(params) logger.info("Multiclass dataset with extension: {}".format(multiclass_extension_params['name'])) return cls(algo_input.training_dataset, algo_input.test_dataset, algo_input.datapoints, gamma, multiclass_extension)
def init_params(cls, params): """ Initialize via parameters dictionary. Args: params: parameters dictionary Returns: An object instance of this class """ uni_var_params_params = params.get(Pluggable.SECTION_KEY_UNIVARIATE_DISTRIBUTION) num_qubits = uni_var_params_params.get('num_qubits') params = uni_var_params_params.get('params') low = uni_var_params_params.get('low') high = uni_var_params_params.get('high') var_form_params = params.get(Pluggable.SECTION_KEY_VAR_FORM) var_form = get_pluggable_class(PluggableType.VARIATIONAL_FORM, var_form_params['name']).init_params(params) return cls(num_qubits, var_form, params, low, high)
def init_params(cls, params, algo_input): """ Initialize via parameters dictionary and algorithm input instance Args: params: parameters dictionary algo_input: input instance """ if algo_input is not None: raise AquaError("Unexpected Input instance.") grover_params = params.get(QuantumAlgorithm.SECTION_KEY_ALGORITHM) incremental = grover_params.get(Grover.PROP_INCREMENTAL) num_iterations = grover_params.get(Grover.PROP_NUM_ITERATIONS) mct_mode = grover_params.get(Grover.PROP_MCT_MODE) oracle_params = params.get(QuantumAlgorithm.SECTION_KEY_ORACLE) oracle = get_pluggable_class( PluggableType.ORACLE, oracle_params['name']).init_params(oracle_params) return cls(oracle, incremental=incremental, num_iterations=num_iterations, mct_mode=mct_mode)
def test_nlopt(self, name): """ NLopt test """ optimizer = get_pluggable_class(PluggableType.OPTIMIZER, name)() optimizer.set_options(**{'max_evals': 50000}) res = self._optimize(optimizer) self.assertLessEqual(res[2], 50000)