Exemple #1
0
    def _format_data(self, datum: dict) -> dict:
        """
        Checks that the given data has a counts format.

        Args:
            datum: An instance of data the should be a dict with bit strings as keys
                and counts as values.
            validate: If True the DataAction checks that the format of the datum is valid.

        Returns:
            The datum as given.

        Raises:
            DataProcessorError: if the data is not a counts dict.
        """
        if self._validate:
            if not isinstance(datum, dict):
                raise DataProcessorError(
                    f"Given counts datum {datum} to "
                    f"{self.__class__.__name__} is not a valid count format.")

            for bit_str, count in datum.items():
                if not isinstance(bit_str, str):
                    raise DataProcessorError(
                        f"Key {bit_str} is not a valid count key in{self.__class__.__name__}."
                    )

                if not isinstance(count, (int, float)):
                    raise DataProcessorError(
                        f"Count {bit_str} is not a valid count value in {self.__class__.__name__}."
                    )

        return datum
Exemple #2
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Check that there are as many discriminators as there are slots."""
        self._n_shots = 0

        # identify shape
        try:
            # level1 single-shot data
            self._n_circs, self._n_shots, self._n_slots, self._n_iq = data.shape
        except ValueError as ex:
            raise DataProcessorError(
                f"The data given to {self.__class__.__name__} does not have the shape of "
                "single-shot IQ data; expecting a 4D array."
            ) from ex

        if self._validate:
            if data.shape[-1] != 2:
                raise DataProcessorError(
                    f"IQ data given to {self.__class__.__name__} must be a multi-dimensional array"
                    "of dimension [d0, d1, ..., 2] in which the last dimension "
                    "corresponds to IQ elements."
                    f"Input data contains element with length {data.shape[-1]} != 2."
                )

        if self._validate:
            if isinstance(self._discriminator, list):
                if self._n_slots != len(self._discriminator):
                    raise DataProcessorError(
                        f"The Discriminator node has {len(self._discriminator)} which does "
                        f"not match the {self._n_slots} slots in the data."
                    )

        return unp.nominal_values(data)
Exemple #3
0
    def _format_data(self, datum: Any, error: Optional[Any] = None) -> Tuple[Any, Any]:
        """Check that the IQ data is 2D and convert it to a numpy array.

        Args:
            datum: A single item of data which corresponds to single-shot IQ data.

        Returns:
            datum and any error estimate as a numpy array.

        Raises:
            DataProcessorError: If the datum does not have the correct format.
        """
        datum = np.asarray(datum, dtype=float)

        if error is not None:
            error = np.asarray(error, dtype=float)

        if self._validate:
            if len(datum.shape) not in {2, 3}:
                raise DataProcessorError(
                    f"IQ data given to {self.__class__.__name__} must be a 2D array. "
                    f"Instead, a {len(datum.shape)}D array was given."
                )

            if error is not None and len(error.shape) not in {2, 3}:
                raise DataProcessorError(
                    f"IQ data error given to {self.__class__.__name__} must be a 2D array."
                    f"Instead, a {len(error.shape)}D array was given."
                )

        return datum, error
Exemple #4
0
    def __init__(self, lda: "LinearDiscriminantAnalysis"):
        """
        Args:
            lda: The sklearn linear discriminant analysis. This may be a trained or an
                untrained discriminator.

        Raises:
            DataProcessorError: if SKlearn could not be imported.
        """
        if not HAS_SKLEARN:
            raise DataProcessorError(
                f"SKlearn is needed to initialize an {self.__class__.__name__}."
            )

        self._lda = lda
        self.attributes = [
            "coef_",
            "intercept_",
            "covariance_",
            "explained_variance_ratio_",
            "means_",
            "priors_",
            "scalings_",
            "xbar_",
            "classes_",
            "n_features_in_",
            "feature_names_in_",
        ]
Exemple #5
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Convert the data to an array.

        This node will also set all the attributes needed to process the data such as
        the number of shots and the number of circuits.

        Args:
            data: An array representing the memory.

        Returns:
            The data that has been processed.

        Raises:
            DataProcessorError: If the datum has the wrong shape.
        """

        self._n_shots = len(data[0])
        self._n_circuits = len(data)

        if self._validate:
            if data.shape[:2] != (self._n_circuits, self._n_shots):
                raise DataProcessorError(
                    f"The datum given to {self.__class__.__name__} does not convert "
                    "of an array with dimension (number of circuit, number of shots)."
                )

        return data
Exemple #6
0
    def __init__(
        self,
        outcome: str,
        alpha_prior: Union[float, Sequence[float]] = 0.5,
        validate: bool = True,
    ):
        """Initialize a counts to probability data conversion.

        Args:
            outcome: The bitstring for which to return the probability and variance.
            alpha_prior: A prior Beta distribution parameter ``[`alpha0, alpha1]``.
                         If specified as float this will use the same value for
                         ``alpha0`` and``alpha1`` (Default: 0.5).
            validate: If set to False the DataAction will not validate its input.

        Raises:
            DataProcessorError: When the dimension of the prior and expected parameter vector
                do not match.
        """
        self._outcome = outcome
        if isinstance(alpha_prior, Number):
            self._alpha_prior = [alpha_prior, alpha_prior]
        else:
            if validate and len(alpha_prior) != 2:
                raise DataProcessorError(
                    "Prior for probability node must be a float or pair of floats."
                )
            self._alpha_prior = list(alpha_prior)

        super().__init__(validate)
Exemple #7
0
    def _t1_check(self, rep_delay: float) -> bool:
        """Check that repetition delay < T1 of the physical qubits in the experiment.

        Args:
            rep_delay: The repetition delay. This is the delay between a measurement
                    and the subsequent quantum circuit.

        Returns:
            True if the repetition delay is smaller than the qubit T1 times.

        Raises:
            DataProcessorError: if the T1 values are not defined for the qubits of
                the used backend.
        """

        try:
            t1_values = [
                self._backend.properties().qubit_property(physical_qubit)["T1"]
                [0] for physical_qubit in self._physical_qubits
            ]

            if all(rep_delay / t1_value < 1.0 for t1_value in t1_values):
                return True
        except AttributeError as error:
            raise DataProcessorError(
                "The restless experiment can not be enabled since "
                "T1 values are not defined for the qubits of the used backend."
            ) from error

        return False
Exemple #8
0
def get_processor(
    meas_level: MeasLevel = MeasLevel.CLASSIFIED,
    meas_return: str = "avg",
    normalize: bool = True,
) -> DataProcessor:
    """Get a DataProcessor that produces a continuous signal given the options.

    Args:
        meas_level: The measurement level of the data to process.
        meas_return: The measurement return (single or avg) of the data to process.
        normalize: Add a data normalization node to the Kerneled data processor.

    Returns:
        An instance of DataProcessor capable of dealing with the given options.

    Raises:
        DataProcessorError: if the measurement level is not supported.
    """
    if meas_level == MeasLevel.CLASSIFIED:
        return DataProcessor("counts", [Probability("1")])

    if meas_level == MeasLevel.KERNELED:
        if meas_return == "single":
            processor = DataProcessor("memory", [AverageData(axis=1), SVD()])
        else:
            processor = DataProcessor("memory", [SVD()])

        if normalize:
            processor.append(MinMaxNormalize())

        return processor

    raise DataProcessorError(f"Unsupported measurement level {meas_level}.")
Exemple #9
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Validate the input data."""
        if self._validate:
            if len(data.shape) <= 1:
                raise DataProcessorError(
                    "The data should be an array with at least two dimensions."
                )

        return data
Exemple #10
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Check that the IQ data is 2D and convert it to a numpy array.

        Args:
            data: A data array to format. This is a single numpy array containing
                all circuit results input to the data processor.
                This data has different dimensions depending on whether
                single-shot or averaged data is being processed.
                Single-shot data is four dimensional, i.e., ``[#circuits, #shots, #slots, 2]``,
                while averaged IQ data is three dimensional, i.e., ``[#circuits, #slots, 2]``.
                Here, ``#slots`` is the number of classical registers used in the circuit.

        Returns:
            data and any error estimate as a numpy array.

        Raises:
            DataProcessorError: If the datum does not have the correct format.
        """
        self._n_circs = 0
        self._n_shots = 0
        self._n_slots = 0
        self._n_iq = 0

        # identify shape
        try:
            # level1 single-shot data
            self._n_circs, self._n_shots, self._n_slots, self._n_iq = data.shape
        except ValueError:
            try:
                # level1 data averaged over shots
                self._n_circs, self._n_slots, self._n_iq = data.shape
            except ValueError as ex:
                raise DataProcessorError(
                    f"Data given to {self.__class__.__name__} is not likely level1 data."
                ) from ex

        if self._validate:
            if self._n_iq != 2:
                raise DataProcessorError(
                    f"IQ data given to {self.__class__.__name__} does not have two-dimensions "
                    f"(I and Q). Instead, {self._n_iq} dimensions were found."
                )

        return data
Exemple #11
0
    def _process(
        self, datum: np.array, error: Optional[np.array] = None
    ) -> Tuple[np.array, np.array]:
        """Project the IQ data onto the axis defined by an SVD and scale it.

        Args:
            datum: A 2D array of qubits, and an average complex IQ point as [real, imaginary].
            error: An optional 2D array of qubits, and an error on an average complex IQ
                point as [real, imaginary].

        Returns:
            A Tuple of 1D arrays of the result of the SVD and the associated error. Each entry
            is the real part of the averaged IQ data of a qubit.

        Raises:
            DataProcessorError: If the SVD has not been previously trained on data.
        """

        if not self.is_trained:
            raise DataProcessorError("SVD must be trained on data before it can be used.")

        n_qubits = datum.shape[0] if len(datum.shape) == 2 else datum.shape[1]
        processed_data = []

        if error is not None:
            processed_error = []
        else:
            processed_error = None

        # process each averaged IQ point with its own axis.
        for idx in range(n_qubits):

            centered = np.array(
                [datum[..., idx, iq] - self.means(qubit=idx, iq_index=iq) for iq in [0, 1]]
            )

            processed_data.append((self._main_axes[idx] @ centered) / self.scales[idx])

            if error is not None:
                angle = np.arctan(self._main_axes[idx][1] / self._main_axes[idx][0])
                error_value = np.sqrt(
                    (error[..., idx, 0] * np.cos(angle)) ** 2
                    + (error[..., idx, 1] * np.sin(angle)) ** 2
                )
                processed_error.append(error_value / self.scales[idx])

        if len(processed_data) == 1:
            if error is None:
                return processed_data[0], None
            else:
                return processed_data[0], processed_error[0]

        if error is None:
            return np.array(processed_data), None
        else:
            return np.array(processed_data), np.array(processed_error)
Exemple #12
0
    def _format_data(self, datum: dict, error: Optional[Any] = None) -> Tuple[dict, Any]:
        """
        Checks that the given data has a counts format.

        Args:
            datum: An instance of data the should be a dict with bit strings as keys
                and counts as values.

        Returns:
            The datum as given.

        Raises:
            DataProcessorError: if the data is not a counts dict or a list of counts dicts.
        """
        if self._validate:

            if isinstance(datum, dict):
                data = [datum]
            elif isinstance(datum, list):
                data = datum
            else:
                raise DataProcessorError(f"Datum must be dict or list, received {type(datum)}.")

            for datum_ in data:
                if not isinstance(datum_, dict):
                    raise DataProcessorError(
                        f"Given counts datum {datum_} to "
                        f"{self.__class__.__name__} is not a valid count format."
                    )

                for bit_str, count in datum_.items():
                    if not isinstance(bit_str, str):
                        raise DataProcessorError(
                            f"Key {bit_str} is not a valid count key in{self.__class__.__name__}."
                        )

                    if not isinstance(count, (int, float, np.integer)):
                        raise DataProcessorError(
                            f"Count {bit_str} is not a valid count value in {self.__class__.__name__}."
                        )

        return datum, None
    def run(self, run_input, **options):
        """Run the restless backend."""

        self.options.update_options(**options)
        shots = self.options.get("shots")
        meas_level = self.options.get("meas_level")

        result = {
            "backend_name": f"{self.__class__.__name__}",
            "backend_version": "0",
            "qobj_id": 0,
            "job_id": 0,
            "success": True,
            "results": [],
        }

        self._compute_outcome_probabilities(run_input)

        if run_input[0].num_qubits != 2:
            raise DataProcessorError(f"{self.__class__.__name__} is a two qubit mock device.")

        prev_outcome, state_strings = "00", self._get_state_strings(2)

        # Setup the list of dicts where each dict corresponds to a circuit.
        sorted_memory = [{"memory": [], "metadata": circ.metadata} for circ in run_input]

        for _ in range(shots):
            for circ_idx, _ in enumerate(run_input):
                probs = self._precomputed_probabilities[(circ_idx, prev_outcome)]
                # Generate the next shot dependent on the pre-computed probabilities.
                outcome = self._rng.choice(state_strings, p=probs)
                # Append the single shot to the memory of the corresponding circuit.
                sorted_memory[circ_idx]["memory"].append(hex(int(outcome, 2)))

                prev_outcome = outcome

        for idx, circ in enumerate(run_input):
            counts = {}
            for key1, key2 in zip(["00", "01", "10", "11"], ["0x0", "0x1", "0x2", "0x3"]):
                counts[key1] = sorted_memory[idx]["memory"].count(key2)
            run_result = {
                "shots": shots,
                "success": True,
                "header": {"metadata": circ.metadata},
                "meas_level": meas_level,
                "data": {
                    "counts": counts,
                    "memory": sorted_memory[idx]["memory"],
                },
            }

            result["results"].append(run_result)

        return FakeJob(self, Result.from_dict(result))
Exemple #14
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """
        Checks that the given data has a counts format.

        Args:
            data: A data array to format. This is a single numpy array containing
                all circuit results input to the data processor.
                This is usually an object data type containing Python dictionaries of
                count data keyed on the measured bitstring.
                A count value is a discrete quantity representing the frequency of an event.
                Therefore, count values do not have an uncertainty.

        Returns:
            The ``data`` as given.

        Raises:
            DataProcessorError: If the data is not a counts dict or a list of counts dicts.
        """
        valid_count_type = int, np.integer

        if self._validate:
            for datum in data:
                if not isinstance(datum, dict):
                    raise DataProcessorError(
                        f"Data entry must be dictionary of counts, received {type(datum)}."
                    )
                for bit_str, count in datum.items():
                    if not isinstance(bit_str, str):
                        raise DataProcessorError(
                            f"Key {bit_str} is not a valid count key in {self.__class__.__name__}."
                        )
                    if not isinstance(count, valid_count_type):
                        raise DataProcessorError(
                            f"Count {bit_str} is not a valid count for {self.__class__.__name__}. "
                            "The uncertainty of probability is computed based on sampling error, "
                            "thus the count should be an error-free discrete quantity "
                            "representing the frequency of event."
                        )

        return data
Exemple #15
0
    def _format_data(self, datum: Any, error: Optional[Any] = None) -> Tuple[Any, Any]:
        """Check that the IQ data has the correct format and convert to numpy array.

        Args:
            datum: A single item of data which corresponds to single-shot IQ data. It's
                dimension will depend on whether it is single-shot IQ data (three-dimensional)
                or averaged IQ date (two-dimensional).

        Returns:
            datum and any error estimate as a numpy array.

        Raises:
            DataProcessorError: If the datum does not have the correct format.
        """
        datum = np.asarray(datum, dtype=float)

        if error is not None:
            error = np.asarray(error, dtype=float)

        if self._validate:
            if len(datum.shape) not in {2, 3, 4}:
                raise DataProcessorError(
                    f"IQ data given to {self.__class__.__name__} must be an N dimensional"
                    f"array with N in (2, 3, 4). Instead, a {len(datum.shape)}D array was given."
                )

            if error is not None and len(error.shape) not in {2, 3, 4}:
                raise DataProcessorError(
                    f"IQ data error given to {self.__class__.__name__} must be an N dimensional"
                    f"array with N in (2, 3, 4). Instead, a {len(error.shape)}D array was given."
                )

            if error is not None and len(error.shape) != len(datum.shape):
                raise DataProcessorError(
                    "Datum and error do not have the same shape: "
                    f"{len(datum.shape)} != {len(error.shape)}."
                )

        return datum, error
Exemple #16
0
    def _process(self, data: np.ndarray) -> np.ndarray:
        """Project the IQ data onto the axis defined by an SVD and scale it.

        Args:
            data: A data array to process. This is a single numpy array containing
                all circuit results input to the data processor.

        Returns:
            A Tuple of 1D arrays of the result of the SVD and the associated error. Each entry
            is the real part of the averaged IQ data of a qubit. The data has the shape
            n_circuits x n_slots for averaged data and n_circuits x n_shots x n_slots for
            single-shot data.

        Raises:
            DataProcessorError: If the SVD has not been previously trained on data.
        """
        if not self.is_trained:
            raise DataProcessorError(
                "SVD must be trained on data before it can be used.")

        # IQ axis is reduced by projection
        if self._n_shots == 0:
            # level1 average mode
            dims = self._n_circs, self._n_slots
        else:
            # level1 single mode
            dims = self._n_circs, self._n_shots, self._n_slots

        projected_data = np.zeros(dims, dtype=object)

        for idx in range(self._n_slots):
            scale = self.parameters.scales[idx]
            axis = self.parameters.main_axes[idx]
            mean_i = self.parameters.i_means[idx]
            mean_q = self.parameters.q_means[idx]

            if self._n_shots != 0:
                # Single shot
                for circ_idx in range(self._n_circs):
                    centered = [
                        data[circ_idx, :, idx, 0] - mean_i,
                        data[circ_idx, :, idx, 1] - mean_q,
                    ]
                    projected_data[circ_idx, :,
                                   idx] = axis @ np.array(centered) / scale
            else:
                # Averaged
                centered = [data[:, idx, 0] - mean_i, data[:, idx, 1] - mean_q]
                projected_data[:, idx] = axis @ np.array(centered) / scale

        return projected_data
Exemple #17
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Format and validate the input.

        Args:
            data: A data array to format. This is a single numpy array containing
                all circuit results input to the data processor.

        Returns:
            The data that has been validated and formatted.

        Raises:
            DataProcessorError: When input data is not likely IQ data.
        """
        self._n_shots = 0

        # identify shape
        try:
            # level1 single-shot data
            self._n_circs, self._n_shots, self._n_slots, self._n_iq = data.shape
        except ValueError:
            try:
                # level1 data averaged over shots
                self._n_circs, self._n_slots, self._n_iq = data.shape
            except ValueError as ex:
                raise DataProcessorError(
                    f"Data given to {self.__class__.__name__} is not likely level1 data."
                ) from ex

        if self._validate:
            if data.shape[-1] != 2:
                raise DataProcessorError(
                    f"IQ data given to {self.__class__.__name__} must be a multi-dimensional array"
                    "of dimension [d0, d1, ..., 2] in which the last dimension "
                    "corresponds to IQ elements."
                    f"Input data contains element with length {data.shape[-1]} != 2."
                )

        return data
    def _run_data_processing(
        self,
        raw_data: List[Dict],
        series: List[SeriesDef],
    ) -> CurveData:
        """Perform data processing from the experiment result payload.

        Args:
            raw_data: Payload in the experiment data.
            series: List of series definition defining filtering condition.

        Returns:
            Processed data that will be sent to the formatter method.

        Raises:
            DataProcessorError: When key for x values is not found in the metadata.
        """
        x_key = self.options.x_key

        try:
            xdata = np.asarray([datum["metadata"][x_key] for datum in raw_data], dtype=float)
        except KeyError as ex:
            raise DataProcessorError(
                f"X value key {x_key} is not defined in circuit metadata."
            ) from ex

        ydata = self.options.data_processor(raw_data)
        shots = np.asarray([datum.get("shots", np.nan) for datum in raw_data])

        def _matched(metadata, **filters):
            try:
                return all(metadata[key] == val for key, val in filters.items())
            except KeyError:
                return False

        data_allocation = np.full(xdata.size, -1, dtype=int)
        for sind, series_def in enumerate(series):
            matched_inds = np.asarray(
                [_matched(d["metadata"], **series_def.filter_kwargs) for d in raw_data], dtype=bool
            )
            data_allocation[matched_inds] = sind

        return CurveData(
            x=xdata,
            y=unp.nominal_values(ydata),
            y_err=unp.std_devs(ydata),
            shots=shots,
            data_allocation=data_allocation,
            labels=[s.name for s in series],
        )
Exemple #19
0
    def from_config(cls, config: Dict[str, Any]) -> "SkLDA":
        """Deserialize from an object."""

        if not HAS_SKLEARN:
            raise DataProcessorError(
                f"SKlearn is needed to initialize an {cls.__name__}.")

        lda = LinearDiscriminantAnalysis()
        lda.set_params(**config["params"])

        for name, value in config["attributes"].items():
            if value is not None:
                setattr(lda, name, value)

        return SkLDA(lda)
    def _data_extraction(self, data: Union[Dict, List[Dict]]) -> List:
        """Extracts the data on which to run the nodes.

        If the datum is a list of dicts then the data under self._input_key is extracted
        from each dict and appended to a list which therefore contains all the data. If the
        data processor has to_array set to True then the list will be converted to a numpy
        array.

        Args:
            data: A list of such dicts where the data is contained under the key self._input_key.

        Returns:
            The data formatted in such a way that it is ready to be processed by the nodes.

        Raises:
            DataProcessorError:
                - If the input datum is not a list or a dict.
                - If the data processor received a single datum but requires all the data to
                  process it properly.
                - If the input key of the data processor is not contained in the data.
        """
        if isinstance(data, dict):
            data = [data]

        try:
            data_ = [_datum[self._input_key] for _datum in iter(data)]
        except KeyError as error:
            raise DataProcessorError(
                f"The input key {self._input_key} was not found in the input datum."
            ) from error
        except TypeError as error:
            raise DataProcessorError(
                f"{self.__class__.__name__} only extracts data from "
                f"lists or dicts, received {type(data)}.") from error

        return data_
Exemple #21
0
    def _format_data(self, datum: Any, error: Optional[Any] = None):
        """Format the data into numpy arrays."""
        datum = np.asarray(datum, dtype=float)

        if self._validate:
            if len(datum.shape) <= self._axis:
                raise DataProcessorError(
                    f"Cannot average the {len(datum.shape)} dimensional "
                    f"array along axis {self._axis}."
                )

        if error is not None:
            error = np.asarray(error, dtype=float)

        return datum, error
Exemple #22
0
def get_kerneled_processor(
    dimensionality_reduction: Union[ProjectorType, str],
    meas_return: str,
    normalize: bool,
    pre_nodes: Optional[List[DataAction]] = None,
) -> DataProcessor:
    """Get a DataProcessor for `meas_level=1` data that returns a one-dimensional signal.

    Args:
        dimensionality_reduction: Type of the node that will reduce the two-dimensional data to
            one dimension.
        meas_return: Type of data returned by the backend, i.e., averaged data or single-shot data.
        normalize: If True then normalize the output data to the interval ``[0, 1]``.
        pre_nodes: any nodes to be applied first in the data processing chain such as restless nodes.

    Returns:
        An instance of DataProcessor capable of processing `meas_level=MeasLevel.KERNELED` data for
        the corresponding job.

    Raises:
        DataProcessorError: if the wrong dimensionality reduction for kerneled data
                is specified.
    """

    try:
        if isinstance(dimensionality_reduction, ProjectorType):
            projector_name = dimensionality_reduction.name
        else:
            projector_name = dimensionality_reduction

        projector = ProjectorType[projector_name].value

    except KeyError as error:
        raise DataProcessorError(
            f"Invalid dimensionality reduction: {dimensionality_reduction}."
        ) from error

    node = pre_nodes or []

    if meas_return == "single":
        node.append(nodes.AverageData(axis=1))

    node.append(projector())

    if normalize:
        node.append(nodes.MinMaxNormalize())

    return DataProcessor("memory", node)
Exemple #23
0
    def _call_internal(
            self,
            datum: Dict[str, Any],
            with_history: bool,
            history_nodes: Set = None) -> Union[Any, Tuple[Any, List]]:
        """
        Internal function to process the data with or with storing the history of the computation.

        Args:
            datum: A single item of data, typically from an ExperimentData instance, that
                needs to be processed.
            with_history: if True the history is returned otherwise it is not.
            history_nodes: The nodes, specified by index in the data processing chain, to
                include in the history. If None is given then all nodes will be included
                in the history.

        Returns:
            datum_ and history if with_history is True or datum_ if with_history is False.

        Raises:
            DataProcessorError: If the input key of the data processor is not contained in datum.
        """

        if self._input_key not in datum:
            raise DataProcessorError(
                f"The input key {self._input_key} was not found in the input datum."
            )

        datum_ = datum[self._input_key]

        history = []
        for index, node in enumerate(self._nodes):
            datum_ = node(datum_)

            if with_history and (history_nodes is None or
                                 (history_nodes and index in history_nodes)):
                history.append((node.__class__.__name__, datum_, index))

        if with_history:
            return datum_, history
        else:
            return datum_
Exemple #24
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Format and validate the input.

        Args:
            data: A data array to format. This is a single numpy array containing
                all circuit results input to the data processor.

        Returns:
            The data that has been validated and formatted.

        Raises:
            DataProcessorError: When input value is not in [0, 1]
        """
        if self._validate:
            if not all(0.0 <= p <= 1.0 for p in data):
                raise DataProcessorError(
                    f"Input data for node {self.__class__.__name__} is not likely probability."
                )

        return data
Exemple #25
0
    def _format_data(
        self, datum: np.ndarray, error: Optional[np.ndarray] = None
    ) -> Tuple[Any, Any]:
        """Check that the input data are probabilities.

        Args:
            datum: An array representing probabilities.
            error: An array representing error.

        Returns:
            Arrays of probability and its error

        Raises:
            DataProcessorError: When input value is not in [0, 1]
        """
        if not all(0.0 <= p <= 1.0 for p in datum):
            raise DataProcessorError(
                f"Input data for node {self.__class__.__name__} is not likely probability."
            )
        return datum, error
Exemple #26
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Format the data into numpy arrays.

        Args:
            data: A data array to format. This is a single numpy array containing
                all circuit results input to the data processor.

        Returns:
            The data that has been validated and formatted.

        Raises:
            DataProcessorError: When the specified axis does not exist in given array.
        """
        if self._validate:
            if len(data.shape) <= self._axis:
                raise DataProcessorError(
                    f"Cannot average the {len(data.shape)} dimensional "
                    f"array along axis {self._axis}.")

        return data
Exemple #27
0
    def _format_data(self, datum: Any) -> Any:
        """Check that the IQ data has the correct format and convert to numpy array.

        Args:
            datum: A single item of data which corresponds to single-shot IQ data. It should
                have dimension three: shots, qubits, iq-point as [real, imaginary].

        Returns:
            datum as a numpy array.

        Raises:
            DataProcessorError: If the datum does not have the correct format.
        """
        datum = np.asarray(datum, dtype=float)

        if self._validate and len(datum.shape) != 3:
            raise DataProcessorError(
                f"Single-shot data given {self.__class__.__name__}"
                f"must be a 3D array. Instead, a {len(datum.shape)}D "
                f"array was given.")

        return datum
Exemple #28
0
    def _process(self, data: np.ndarray) -> np.ndarray:
        """Project the IQ data onto the axis defined by an SVD and scale it.

        Args:
            data: A data array to process. This is a single numpy array containing
                all circuit results input to the data processor.

        Returns:
            A Tuple of 1D arrays of the result of the SVD and the associated error. Each entry
            is the real part of the averaged IQ data of a qubit.

        Raises:
            DataProcessorError: If the SVD has not been previously trained on data.
        """
        if not self.is_trained:
            raise DataProcessorError("SVD must be trained on data before it can be used.")

        # IQ axis is reduced by projection
        if self._n_shots == 0:
            # level1 average mode
            dims = self._n_circs, self._n_slots
        else:
            # level1 single mode
            dims = self._n_circs, self._n_shots, self._n_slots

        projected_data = np.zeros(dims, dtype=object)

        for idx in range(self._n_slots):
            scale = self.parameters.scales[idx]
            # error propagation is computed from data if any std error exists
            centered = np.array(
                [
                    data[..., idx, 0] - self.parameters.i_means[idx],
                    data[..., idx, 1] - self.parameters.q_means[idx],
                ]
            )
            projected_data[..., idx] = (self.parameters.main_axes[idx] @ centered) / scale

        return projected_data
Exemple #29
0
    def _format_data(self, data: np.ndarray) -> np.ndarray:
        """Format and validate the input.

        Args:
            data: A data array to format. This is a single numpy array containing
                all circuit results input to the data processor.

        Returns:
            The data that has been validated and formatted.

        Raises:
            DataProcessorError: When input data is not likely IQ data.
        """
        if self._validate:
            if data.shape[-1] != 2:
                raise DataProcessorError(
                    f"IQ data given to {self.__class__.__name__} must be a multi-dimensional array"
                    "of dimension [d0, d1, ..., 2] in which the last dimension "
                    "corresponds to IQ elements."
                    f"Input data contains element with length {data.shape[-1]} != 2."
                )

        return data
Exemple #30
0
def get_processor(experiment_data: ExperimentData,
                  analysis_options: Options) -> DataProcessor:
    """Get a DataProcessor that produces a continuous signal given the options.

    Args:
        experiment_data: The experiment data that holds all the data and metadata needed
            to determine the data processor to use to process the data for analysis.
        analysis_options: The analysis options with which to analyze the data. The options that
            are relevant for the configuration of a data processor are:
            - normalization (bool): A boolean to specify if the data should be normalized to
              the interval [0, 1]. The default is True. This option is only relevant if
              kerneled data is used.
            - dimensionality_reduction: An optional string or instance of :class:`ProjectorType`
              to represent the dimensionality reduction node for Kerneled data. For the
              supported nodes, see :class:`ProjectorType`. Typically, these nodes convert
              complex IQ data to real data, for example by performing a singular-value
              decomposition. This argument is only needed for Kerneled data (i.e. level 1)
              and can thus be ignored if Classified data (the default) is used.
            - outcome (string): The measurement outcome that will be passed to a Probability node.
              The default value is a string of 1's where the length of the string is the number of
              qubits, e.g. '111' for three qubits.

    Returns:
        An instance of DataProcessor capable of processing the data for the corresponding job.

    Notes:
        The `physical_qubits` argument is extracted from the `experiment_data`
        metadata and is used to determine the default `outcome` to extract from
        classified data if it was not given in the analysis options.

    Raises:
        DataProcessorError: if the measurement level is not supported.
    """
    metadata = experiment_data.metadata
    if "job_metadata" in metadata:
        # Backwards compatibility for old experiment data
        # remove job metadata and add required fields to new location in metadata
        job_meta = metadata.pop("job_metadata")
        run_options = job_meta[-1].get("run_options", {})
        for opt in ["meas_level", "meas_return"]:
            if opt in run_options:
                metadata[opt] = run_options[opt]
        warnings.warn(
            "The analyzed ExperimentData contains deprecated data processor "
            " job_metadata which has been been updated to current metadat format. "
            "If this data was loaded from a database servide you should re-save it "
            "to update the metadata in the database.",
            DeprecationWarning,
        )

    meas_level = metadata.get("meas_level", MeasLevel.CLASSIFIED)
    meas_return = metadata.get("meas_return", MeasReturnType.AVERAGE)
    normalize = analysis_options.get("normalization", True)
    dimensionality_reduction = analysis_options.get("dimensionality_reduction",
                                                    ProjectorType.SVD)

    if meas_level == MeasLevel.CLASSIFIED:
        num_qubits = len(metadata.get("physical_qubits", [0]))
        outcome = analysis_options.get("outcome", "1" * num_qubits)
        return DataProcessor("counts", [nodes.Probability(outcome)])

    if meas_level == MeasLevel.KERNELED:
        return get_kerneled_processor(dimensionality_reduction, meas_return,
                                      normalize)

    raise DataProcessorError(f"Unsupported measurement level {meas_level}.")