def test_invalid_n_mean(self):
     """Test if function raises a ``ValueError`` when the mean photon number is specified to
     be negative."""
     g = nx.complete_graph(10)
     with pytest.raises(ValueError,
                        match="Mean photon number must be non-negative"):
         similarity.prob_event_mc(g, 2, 2, n_mean=-1)
 def test_invalid_samples(self):
     """Test if function raises a ``ValueError`` when a number of samples less than one is
     requested."""
     g = nx.complete_graph(10)
     with pytest.raises(ValueError,
                        match="Number of samples must be at least one"):
         similarity.prob_event_mc(g, 2, 2, samples=0)
 def test_invalid_loss(self):
     """Test if function raises a ``ValueError`` when the loss parameter is specified outside
     of range."""
     g = nx.complete_graph(10)
     with pytest.raises(
             ValueError,
             match="Loss parameter must take a value between zero and"):
         similarity.prob_event_mc(g, 2, 2, loss=2)
    def test_loss(self, monkeypatch):
        """Test if function correctly creates the SF program for lossy GBS."""
        graph = nx.complete_graph(5)
        mock_eng_run = mock.MagicMock()

        with monkeypatch.context() as m:
            m.setattr(sf.LocalEngine, "run", mock_eng_run)
            similarity.prob_event_mc(graph, 6, 3, samples=1, loss=0.5)
            p_func = mock_eng_run.call_args[0][0]

        assert isinstance(p_func.circuit[1].op, sf.ops.LossChannel)
    def test_all_loss(self, monkeypatch):
        """Test if function samples from the vacuum when maximum loss is applied."""
        dim = 5
        graph = nx.complete_graph(dim)
        mock_eng_run = mock.MagicMock()

        with monkeypatch.context() as m:
            m.setattr(sf.LocalEngine, "run", mock_eng_run)
            similarity.prob_event_mc(graph, 6, 3, samples=1, loss=1)
            p_func = mock_eng_run.call_args[0][0]

        eng = sf.LocalEngine(backend="gaussian")

        state = eng.run(p_func).state
        cov = state.cov()
        disp = state.displacement()

        assert np.allclose(cov, 0.5 * state.hbar * np.eye(2 * dim))
        assert np.allclose(disp, np.zeros(dim))
Beispiel #6
0
    def test_mean_event(self, monkeypatch):
        """Tests if the calculation of the sample mean is performed correctly. The test
        monkeypatches the fock_prob function so that the probability is the same for each sample and
        is equal to 1/216, i.e., one over the number of samples in the event with 5 modes,
        6 photons, and max 3 photons per mode."""
        graph = nx.complete_graph(6)
        with monkeypatch.context() as m:
            m.setattr(
                "strawberryfields.backends.gaussianbackend.GaussianState.fock_prob",
                lambda *args, **kwargs: 1.0 / 336,
            )

            assert np.allclose(similarity.prob_event_mc(graph, 6, 3), 1.0)
##############################################################################
# To avoid calculating a large number of sample probabilities, an alternative is to perform a
# Monte Carlo approximation. Here, samples within an event are selected uniformly at random and
# their resultant probabilities are calculated. If :math:`N` samples :math:`\{S_{1}, S_{2},
# \ldots , S_{N}\}` are generated, then the event probability can be approximated as
#
# .. math::
#     p(E_{k, n_{\max}}) \approx \frac{1}{N}\sum_{i=1}^N p(S_i) |E_{k, n_{\max}}|,
#
# with :math:`|E_{k, n_{\max}}|` denoting the cardinality of the event.
#
# This method can be accessed using the :func:`~.prob_event_mc` function. The 4-photon event is
# approximated as:

print(
    similarity.prob_event_mc(nx.Graph(m0_a), 4, max_count_per_mode=2,
                             n_mean=6))

##############################################################################
# The feature vector can then be calculated through Monte Carlo sampling using
# :func:`~.feature_vector_mc`.
#
# .. note::
#     The results of :func:`~.prob_event_mc` and :func:`~.feature_vector_mc` are probabilistic and
#     may vary between runs. Increasing the optional ``samples`` parameter will increase accuracy
#     but slow down calculation.
#
# The second method of Monte Carlo approximation is intended for use in scenarios where it is
# computationally intensive to pre-calculate a statistically significant dataset of samples from
# GBS.
#
# Machine learning with GBS graph kernels
Beispiel #8
0
    def test_prob_vacuum_event(self):
        """Tests if the function gives the right probability for an event with zero photons when
        the GBS device has been configured to have zero mean photon number."""
        graph = nx.complete_graph(10)

        assert similarity.prob_event_mc(graph, 0, 0, 0) == 1.0
 def test_low_count(self):
     """Test if function raises a ``ValueError`` if ``max_count_per_mode`` is negative."""
     g = nx.complete_graph(10)
     with pytest.raises(ValueError, match="Maximum number of photons"):
         similarity.prob_event_mc(g, 2, -1)
 def test_invalid_photon_number(self):
     """Test if function raises a ``ValueError`` when a photon number below zero is specified"""
     g = nx.complete_graph(10)
     with pytest.raises(ValueError,
                        match="Photon number must not be below zero"):
         similarity.prob_event_mc(g, -1, 2)