def run_calibration( self, job_id: Optional[str] = None, processor_ids: Sequence[str] = (), description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, ) -> engine_job.EngineJob: """Runs layers of calibration routines on the Quantum Engine. This method should only be used if the Program object was created with a `FocusedCalibration`. This method does not block until a result is returned. However, no results will be available until all calibration routines complete. Args: job_id: Optional job id to use. If this is not provided, a random id of the format 'calibration-################YYMMDD' will be generated, where # is alphanumeric and YYMMDD is the current year, month, and day. processor_ids: The engine processors that should be candidates to run the program. Only one of these will be scheduled for execution. description: An optional description to set on the job. labels: Optional set of labels to set on the job. Returns: An EngineJob. Results can be accessed with calibration_results(). """ import cirq.google.engine.engine as engine_base if not job_id: job_id = engine_base._make_random_id('calibration-') if not processor_ids: raise ValueError('No processors specified') # Default run context # Note that Quantum Engine currently requires a valid type url # on a run context in order to succeed validation. any_context = qtypes.any_pb2.Any() any_context.Pack(v2.run_context_pb2.RunContext()) created_job_id, job = self.context.client.create_job( project_id=self.project_id, program_id=self.program_id, job_id=job_id, processor_ids=processor_ids, run_context=any_context, description=description, labels=labels, ) return engine_job.EngineJob( self.project_id, self.program_id, created_job_id, self.context, job, result_type=ResultType.Batch, )
def get_job(self, job_id: str) -> engine_job.EngineJob: """Returns an EngineJob for an existing Quantum Engine job. Args: job_id: Unique ID of the job within the parent program. Returns: A EngineJob for the job. """ return engine_job.EngineJob(self.project_id, self.program_id, job_id, self.context)
def list_jobs( self, created_before: Optional[Union[datetime.datetime, datetime.date]] = None, created_after: Optional[Union[datetime.datetime, datetime.date]] = None, has_labels: Optional[Dict[str, str]] = None, execution_states: Optional[Set[ quantum.enums.ExecutionStatus.State]] = None, ): """Returns the list of jobs in the project. All historical jobs can be retrieved using this method and filtering options are available too, to narrow down the search baesd on: * creation time * job labels * execution states Args: created_after: retrieve jobs that were created after this date or time. created_before: retrieve jobs that were created after this date or time. has_labels: retrieve jobs that have labels on them specified by this dict. If the value is set to `*`, filters having the label regardless of the label value will be filtered. For example, to query programs that have the shape label and have the color label with value red can be queried using {'color': 'red', 'shape':'*'} execution_states: retrieve jobs that have an execution state that is contained in `execution_states`. See `quantum.enums.ExecutionStatus.State` enum for accepted values. """ client = self.context.client response = client.list_jobs( self.project_id, None, created_before=created_before, created_after=created_after, has_labels=has_labels, execution_states=execution_states, ) return [ engine_job.EngineJob( project_id=client._ids_from_job_name(j.name)[0], program_id=client._ids_from_job_name(j.name)[1], job_id=client._ids_from_job_name(j.name)[2], context=self.context, _job=j, ) for j in response ]
def run_sweep( self, job_id: Optional[str] = None, params: study.Sweepable = None, repetitions: int = 1, processor_ids: Sequence[str] = ('xmonsim',), description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, ) -> engine_job.EngineJob: """Runs the program on the QuantumEngine. In contrast to run, this runs across multiple parameter sweeps, and does not block until a result is returned. Args: job_id: Optional job id to use. If this is not provided, a random id of the format 'job-################YYMMDD' will be generated, where # is alphanumeric and YYMMDD is the current year, month, and day. params: Parameters to run with the program. repetitions: The number of circuit repetitions to run. processor_ids: The engine processors that should be candidates to run the program. Only one of these will be scheduled for execution. description: An optional description to set on the job. labels: Optional set of labels to set on the job. Returns: An EngineJob. If this is iterated over it returns a list of TrialResults, one for each parameter sweep. """ import cirq.google.engine.engine as engine_base if self.result_type != ResultType.Program: raise ValueError('Please use run_batch() for batch mode.') if not job_id: job_id = engine_base._make_random_id('job-') sweeps = study.to_sweeps(params or study.ParamResolver({})) run_context = self._serialize_run_context(sweeps, repetitions) created_job_id, job = self.context.client.create_job( project_id=self.project_id, program_id=self.program_id, job_id=job_id, processor_ids=processor_ids, run_context=run_context, description=description, labels=labels, ) return engine_job.EngineJob( self.project_id, self.program_id, created_job_id, self.context, job )
def create_job( self, *, # Force keyword args. program_name: str, job_config: Optional[JobConfig] = None, params: study.Sweepable = None, repetitions: int = 1, priority: int = 500, processor_ids: Sequence[str] = ('xmonsim', ), gate_set: serializable_gate_set.SerializableGateSet = None ) -> engine_job.EngineJob: gate_set = gate_set or gate_sets.XMON # Check program to run and program parameters. if not 0 <= priority < 1000: raise ValueError('priority must be between 0 and 1000') job_config = self.implied_job_config(job_config) sweeps = study.to_sweeps(params or study.ParamResolver({})) run_context = self._serialize_run_context(sweeps, repetitions) # Create job. request = { 'name': '%s/jobs/%s' % (program_name, job_config.job_id), 'output_config': { 'gcs_results_location': { 'uri': job_config.gcs_results } }, 'scheduling_config': { 'priority': priority, 'processor_selector': { 'processor_names': [ 'projects/%s/processors/%s' % (self.project_id, processor_id) for processor_id in processor_ids ] } }, 'run_context': run_context } response = self._make_request( self.service.projects().programs().jobs().create( parent=program_name, body=request)) return engine_job.EngineJob(job_config, response, self)
def list_jobs(self, created_before: Optional[Union[datetime.datetime, datetime.date]] = None, created_after: Optional[Union[datetime.datetime, datetime.date]] = None, has_labels: Optional[Dict[str, str]] = None, execution_states: Optional[Set[ quantum.enums.ExecutionStatus.State]] = None): """Returns the list of jobs for this program. Args: project_id: A project_id of the parent Google Cloud Project. program_id: Unique ID of the program within the parent project. created_after: retrieve jobs that were created after this date or time. created_before: retrieve jobs that were created after this date or time. has_labels: retrieve jobs that have labels on them specified by this dict. If the value is set to `*`, filters having the label regardless of the label value will be filtered. For example, to query programs that have the shape label and have the color label with value red can be queried using {'color': 'red', 'shape':'*'} execution_states: retrieve jobs that have an execution state that is contained in `execution_states`. See `quantum.enums.ExecutionStatus.State` enum for accepted values. """ client = self.context.client response = client.list_jobs(self.project_id, self.program_id, created_before=created_before, created_after=created_after, has_labels=has_labels, execution_states=execution_states) return [ engine_job.EngineJob( project_id=client._ids_from_job_name(j.name)[0], program_id=client._ids_from_job_name(j.name)[1], job_id=client._ids_from_job_name(j.name)[2], context=self.context, _job=j, ) for j in response ]
def create_job( self, *, # Force keyword args. program_name: str, job_config: Optional[JobConfig] = None, params: study.Sweepable = None, repetitions: int = 1, priority: int = 500, processor_ids: Sequence[str] = ('xmonsim',), gate_set: serializable_gate_set.SerializableGateSet = None ) -> engine_job.EngineJob: gate_set = gate_set or gate_sets.XMON # Check program to run and program parameters. if not 0 <= priority < 1000: raise ValueError('priority must be between 0 and 1000') job_config = self.implied_job_config(job_config) sweeps = study.to_sweeps(params or study.ParamResolver({})) run_context = self._serialize_run_context(sweeps, repetitions) # Create job. request = qtypes.QuantumJob( name='%s/jobs/%s' % (program_name, job_config.job_id), scheduling_config=qtypes.SchedulingConfig( priority=priority, processor_selector=qtypes.SchedulingConfig.ProcessorSelector( processor_names=[ 'projects/%s/processors/%s' % (self.project_id, processor_id) for processor_id in processor_ids ])), run_context=run_context) response = self._make_request(lambda: self.client.create_quantum_job( program_name, request, False)) return engine_job.EngineJob(job_config, response, self)
def run_batch( self, job_id: Optional[str] = None, params_list: List[study.Sweepable] = None, repetitions: int = 1, processor_ids: Sequence[str] = (), description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, ) -> engine_job.EngineJob: """Runs a batch of circuits on the QuantumEngine. This method should only be used if the Program object was created with a BatchProgram. The number of parameter sweeps should match the number of circuits within that BatchProgram. This method does not block until a result is returned. However, no results will be available until the entire batch is complete. Args: job_id: Optional job id to use. If this is not provided, a random id of the format 'job-################YYMMDD' will be generated, where # is alphanumeric and YYMMDD is the current year, month, and day. params_list: Parameter sweeps to run with the program. There must be one Sweepable object for each circuit in the batch. If this is None, it is assumed that the circuits are not parameterized and do not require sweeps. repetitions: The number of circuit repetitions to run. processor_ids: The engine processors that should be candidates to run the program. Only one of these will be scheduled for execution. description: An optional description to set on the job. labels: Optional set of labels to set on the job. Returns: An EngineJob. If this is iterated over it returns a list of TrialResults. All TrialResults for the first circuit are listed first, then the TrialResults for the second, etc. The TrialResults for a circuit are listed in the order imposed by the associated parameter sweep. Raises: ValueError: if the program was not a batch program or no processors were supplied. """ import cirq.google.engine.engine as engine_base if self.result_type != ResultType.Batch: raise ValueError('Can only use run_batch() in batch mode.') if params_list is None: params_list = [None] * self.batch_size() if not job_id: job_id = engine_base._make_random_id('job-') if not processor_ids: raise ValueError('No processors specified') # Pack the run contexts into batches batch = v2.batch_pb2.BatchRunContext() for param in params_list: sweeps = study.to_sweeps(param) current_context = batch.run_contexts.add() for sweep in sweeps: sweep_proto = current_context.parameter_sweeps.add() sweep_proto.repetitions = repetitions v2.sweep_to_proto(sweep, out=sweep_proto.sweep) batch_context = qtypes.any_pb2.Any() batch_context.Pack(batch) created_job_id, job = self.context.client.create_job( project_id=self.project_id, program_id=self.program_id, job_id=job_id, processor_ids=processor_ids, run_context=batch_context, description=description, labels=labels, ) return engine_job.EngineJob( self.project_id, self.program_id, created_job_id, self.context, job, result_type=ResultType.Batch, )