Ejemplo n.º 1
0
    def setUp(self):
        # define prior and model
        sigma = Uniform([[10], [20]])
        mu = Normal([0, 1])
        self.Y = Normal([mu, sigma])

        # define backend
        self.backend = Backend()

        # define statistics
        self.statistics_cal = Identity(degree=3, cross=False)

        if has_torch:
            # Initialize statistics learning
            self.statisticslearning = SemiautomaticNN([self.Y],
                                                      self.statistics_cal,
                                                      self.backend,
                                                      n_samples=100,
                                                      n_samples_per_param=1,
                                                      seed=1,
                                                      n_epochs=10,
                                                      scale_samples=False)
            # with sample scaler:
            self.statisticslearning_with_scaler = SemiautomaticNN(
                [self.Y],
                self.statistics_cal,
                self.backend,
                n_samples=100,
                n_samples_per_param=1,
                seed=1,
                n_epochs=10,
                scale_samples=True)
Ejemplo n.º 2
0
class SemiautomaticNNTests(unittest.TestCase):
    def setUp(self):
        # define prior and model
        sigma = Uniform([[10], [20]])
        mu = Normal([0, 1])
        self.Y = Normal([mu, sigma])

        # define backend
        self.backend = Backend()

        # define statistics
        self.statistics_cal = Identity(degree=3, cross=False)

        if has_torch:
            # Initialize statistics learning
            self.statisticslearning = SemiautomaticNN([self.Y],
                                                      self.statistics_cal,
                                                      self.backend,
                                                      n_samples=100,
                                                      n_samples_per_param=1,
                                                      seed=1,
                                                      n_epochs=10)

    def test_initialization(self):
        if not has_torch:
            self.assertRaises(ImportError, SemiautomaticNN, [self.Y],
                              self.statistics_cal, self.backend)

    def test_transformation(self):
        if has_torch:
            # Transform statistics extraction
            self.new_statistics_calculator = self.statisticslearning.get_statistics(
            )
            # Simulate observed data
            Obs = Normal([2, 4])
            y_obs = Obs.forward_simulate(Obs.get_input_values(), 1)[0].tolist()

            extracted_statistics = self.new_statistics_calculator.statistics(
                y_obs)
            self.assertEqual(np.shape(extracted_statistics), (1, 2))

            self.assertRaises(RuntimeError,
                              self.new_statistics_calculator.statistics,
                              [np.array([1, 2])])
def infer_parameters(steps=2, n_sample=50, n_samples_per_param=1, logging_level=logging.WARN):
    """Perform inference for this example.

    Parameters
    ----------
    steps : integer, optional
        Number of iterations in the sequential PMCABC algorithm ("generations"). The default value is 3
    n_samples : integer, optional
        Number of posterior samples to generate. The default value is 250.
    n_samples_per_param : integer, optional
        Number of data points in each simulated data set. The default value is 10.

    Returns
    -------
    abcpy.output.Journal
        A journal containing simulation results, metadata and optionally intermediate results.
    """
    logging.basicConfig(level=logging_level)
    # define backend
    # Note, the dummy backend does not parallelize the code!
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()

    # define observation for true parameters mean=170, std=15
    height_obs = [160.82499176, 167.24266737, 185.71695756, 153.7045709, 163.40568812, 140.70658699, 169.59102084,
                  172.81041696, 187.38782738, 179.66358934, 176.63417241, 189.16082803, 181.98288443, 170.18565017,
                  183.78493886, 166.58387299, 161.9521899, 155.69213073, 156.17867343, 144.51580379, 170.29847515,
                  197.96767899, 153.36646527, 162.22710198, 158.70012047, 178.53470703, 170.77697743, 164.31392633,
                  165.88595994, 177.38083686, 146.67058471763457, 179.41946565658628, 238.02751620619537,
                  206.22458790620766, 220.89530574344568, 221.04082532837026, 142.25301427453394, 261.37656571434275,
                  171.63761180867033, 210.28121820385866, 237.29130237612236, 175.75558340169619, 224.54340549862235,
                  197.42448680731226, 165.88273684581381, 166.55094082844519, 229.54308602661584, 222.99844054358519,
                  185.30223966014586, 152.69149367593846, 206.94372818527413, 256.35498655339154, 165.43140916577741,
                  250.19273595481803, 148.87781549665536, 223.05547559193792, 230.03418198709608, 146.13611923127021,
                  138.24716809523139, 179.26755740864527, 141.21704876815426, 170.89587081800852, 222.96391329259626,
                  188.27229523693822, 202.67075179617672, 211.75963110985992, 217.45423324370509]

    # define prior
    from abcpy.continuousmodels import Uniform
    mu = Uniform([[150], [200]], name="mu")
    sigma = Uniform([[5], [25]], name="sigma")

    # define the model
    from abcpy.continuousmodels import Normal
    height = Normal([mu, sigma], )

    # 1) generate simulations from prior
    from abcpy.inferences import DrawFromPrior
    draw_from_prior = DrawFromPrior([height], backend=backend)

    # notice the use of the `.sample_par_sim_pairs` method rather than `.sample` to obtain data suitably formatted
    # for the summary statistics learning routines
    parameters, simulations = draw_from_prior.sample_par_sim_pairs(100, n_samples_per_param=1)
    # if you want to use the test loss to do early stopping in the training:
    parameters_val, simulations_val = draw_from_prior.sample_par_sim_pairs(100, n_samples_per_param=1)
    # discard the mid dimension (n_samples_per_param, as the StatisticsLearning classes use that =1)
    simulations = simulations.reshape(simulations.shape[0], simulations.shape[2])
    simulations_val = simulations_val.reshape(simulations_val.shape[0], simulations_val.shape[2])

    # 2) now train the NNs with the different methods with the generated data
    from abcpy.statistics import Identity
    identity = Identity()  # to apply before computing the statistics

    logging.info("semiNN")
    from abcpy.statisticslearning import SemiautomaticNN, TripletDistanceLearning
    semiNN = SemiautomaticNN([height], identity, backend=backend, parameters=parameters,
                             simulations=simulations, parameters_val=parameters_val, simulations_val=simulations_val,
                             early_stopping=True,  # early stopping
                             seed=1, n_epochs=10, scale_samples=False, use_tqdm=False)
    logging.info("triplet")
    triplet = TripletDistanceLearning([height], identity, backend=backend, parameters=parameters,
                                      simulations=simulations, parameters_val=parameters_val,
                                      simulations_val=simulations_val,
                                      early_stopping=True,  # early stopping
                                      seed=1, n_epochs=10, scale_samples=True, use_tqdm=False)

    # 3) save and re-load NNs:
    # get the statistics from the already fit StatisticsLearning object 'semiNN':
    learned_seminn_stat = semiNN.get_statistics()
    learned_triplet_stat = triplet.get_statistics()

    # this has a save net method:
    learned_seminn_stat.save_net("seminn_net.pth")
    # if you used `scale_samples=True` in learning the NNs, need to provide a path where pickle stores the scaler too:
    learned_triplet_stat.save_net("triplet_net.pth", path_to_scaler="scaler.pkl")

    # to reload: need to use the Neural Embedding statistics fromFile; this needs to know which kind of NN it is using;
    # need therefore to pass either the input/output size (it data size and number parameters) or the network class if
    # that was specified explicitly in the StatisticsLearning class. Check the docstring for NeuralEmbedding.fromFile
    # for more details.
    from abcpy.statistics import NeuralEmbedding
    learned_seminn_stat_loaded = NeuralEmbedding.fromFile("seminn_net.pth", input_size=1, output_size=2)
    learned_triplet_stat_loaded = NeuralEmbedding.fromFile("triplet_net.pth", input_size=1, output_size=2,
                                                           path_to_scaler="scaler.pkl")

    # 4) you can optionally rescale the different summary statistics be their standard deviation on a reference dataset
    # of simulations. To do this, it is enough to pass at initialization the reference dataset, and the rescaling will
    # be applied every time the statistics is computed on some simulation or observation.
    learned_triplet_stat_loaded = NeuralEmbedding.fromFile("triplet_net.pth", input_size=1, output_size=2,
                                                           path_to_scaler="scaler.pkl",
                                                           reference_simulations=simulations_val)

    # 5) perform inference
    # define distance
    from abcpy.distances import Euclidean
    distance_calculator = Euclidean(learned_seminn_stat_loaded)

    # define kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([mu, sigma])

    # define sampling scheme
    from abcpy.inferences import PMCABC
    sampler = PMCABC([height], [distance_calculator], backend, kernel, seed=1)

    eps_arr = np.array([500])  # starting value of epsilon; the smaller, the slower the algorithm.
    # at each iteration, take as epsilon the epsilon_percentile of the distances obtained by simulations at previous
    # iteration from the observation
    epsilon_percentile = 10
    journal = sampler.sample([height_obs], steps, eps_arr, n_sample, n_samples_per_param, epsilon_percentile)

    return journal
Ejemplo n.º 4
0
def infer_parameters(steps=3,
                     n_sample=250,
                     n_samples_per_param=10,
                     logging_level=logging.WARN):
    """Perform inference for this example.
    Parameters
    ----------
    steps : integer, optional
        Number of iterations in the sequential PMCABC algoritm ("generations"). The default value is 3
    n_samples : integer, optional
        Number of posterior samples to generate. The default value is 250.
    n_samples_per_param : integer, optional
        Number of data points in each simulated data set. The default value is 10.
    Returns
    -------
    abcpy.output.Journal
        A journal containing simulation results, metadata and optionally intermediate results.
    """
    logging.basicConfig(level=logging_level)
    # define backend
    # Note, the dummy backend does not parallelize the code!
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()

    # define observation for true parameters mean=170, std=15
    height_obs = [
        160.82499176, 167.24266737, 185.71695756, 153.7045709, 163.40568812,
        140.70658699, 169.59102084, 172.81041696, 187.38782738, 179.66358934,
        176.63417241, 189.16082803, 181.98288443, 170.18565017, 183.78493886,
        166.58387299, 161.9521899, 155.69213073, 156.17867343, 144.51580379,
        170.29847515, 197.96767899, 153.36646527, 162.22710198, 158.70012047,
        178.53470703, 170.77697743, 164.31392633, 165.88595994, 177.38083686,
        146.67058471763457, 179.41946565658628, 238.02751620619537,
        206.22458790620766, 220.89530574344568, 221.04082532837026,
        142.25301427453394, 261.37656571434275, 171.63761180867033,
        210.28121820385866, 237.29130237612236, 175.75558340169619,
        224.54340549862235, 197.42448680731226, 165.88273684581381,
        166.55094082844519, 229.54308602661584, 222.99844054358519,
        185.30223966014586, 152.69149367593846, 206.94372818527413,
        256.35498655339154, 165.43140916577741, 250.19273595481803,
        148.87781549665536, 223.05547559193792, 230.03418198709608,
        146.13611923127021, 138.24716809523139, 179.26755740864527,
        141.21704876815426, 170.89587081800852, 222.96391329259626,
        188.27229523693822, 202.67075179617672, 211.75963110985992,
        217.45423324370509
    ]

    # define prior
    from abcpy.continuousmodels import Uniform
    mu = Uniform([[150], [200]], name="mu")
    sigma = Uniform([[5], [25]], name="sigma")

    # define the model
    from abcpy.continuousmodels import Normal
    height = Normal([mu, sigma], )

    # define statistics
    from abcpy.statistics import Identity
    statistics_calculator = Identity(degree=3, cross=True)

    # Learn the optimal summary statistics using Semiautomatic summary selection
    from abcpy.statisticslearning import Semiautomatic
    statistics_learning = Semiautomatic([height],
                                        statistics_calculator,
                                        backend,
                                        n_samples=1000,
                                        n_samples_per_param=1,
                                        seed=1)

    # Redefine the statistics function
    new_statistics_calculator = statistics_learning.get_statistics()

    # Learn the optimal summary statistics using SemiautomaticNN summary selection;
    # we use 200 samples as a validation set for early stopping:
    from abcpy.statisticslearning import SemiautomaticNN
    statistics_learning = SemiautomaticNN([height],
                                          statistics_calculator,
                                          backend,
                                          n_samples=1000,
                                          n_samples_val=200,
                                          n_samples_per_param=1,
                                          seed=1,
                                          early_stopping=True)

    # Redefine the statistics function
    new_statistics_calculator = statistics_learning.get_statistics()

    # define distance
    from abcpy.distances import Euclidean
    distance_calculator = Euclidean(new_statistics_calculator)

    # define kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([mu, sigma])

    # define sampling scheme
    from abcpy.inferences import PMCABC
    sampler = PMCABC([height], [distance_calculator], backend, kernel, seed=1)

    eps_arr = np.array([
        500
    ])  # starting value of epsilon; the smaller, the slower the algorithm.
    # at each iteration, take as epsilon the epsilon_percentile of the distances obtained by simulations at previous
    # iteration from the observation
    epsilon_percentile = 10
    journal = sampler.sample([height_obs], steps, eps_arr, n_sample,
                             n_samples_per_param, epsilon_percentile)

    return journal
Ejemplo n.º 5
0
 def test_errors(self):
     if has_torch:
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations=np.ones((100, 1, 3)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters=np.ones((100, 1, 2)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations=np.ones((100, 1)),
                 parameters=np.zeros((99, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters_val=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations_val=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations_val=np.ones((100, 1, 3)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters_val=np.ones((100, 1, 2)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations_val=np.ones((100, 1)),
                 parameters_val=np.zeros((99, 1)))
         with self.assertRaises(TypeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters=[i for i in range(10)],
                 simulations=[i for i in range(10)])
         with self.assertRaises(TypeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters_val=[i for i in range(10)],
                 simulations_val=[i for i in range(10)])
def infer_parameters():
    # define backend
    # Note, the dummy backend does not parallelize the code!
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()

    # define observation for true parameters mean=170, std=15
    height_obs = [
        160.82499176, 167.24266737, 185.71695756, 153.7045709, 163.40568812,
        140.70658699, 169.59102084, 172.81041696, 187.38782738, 179.66358934,
        176.63417241, 189.16082803, 181.98288443, 170.18565017, 183.78493886,
        166.58387299, 161.9521899, 155.69213073, 156.17867343, 144.51580379,
        170.29847515, 197.96767899, 153.36646527, 162.22710198, 158.70012047,
        178.53470703, 170.77697743, 164.31392633, 165.88595994, 177.38083686,
        146.67058471763457, 179.41946565658628, 238.02751620619537,
        206.22458790620766, 220.89530574344568, 221.04082532837026,
        142.25301427453394, 261.37656571434275, 171.63761180867033,
        210.28121820385866, 237.29130237612236, 175.75558340169619,
        224.54340549862235, 197.42448680731226, 165.88273684581381,
        166.55094082844519, 229.54308602661584, 222.99844054358519,
        185.30223966014586, 152.69149367593846, 206.94372818527413,
        256.35498655339154, 165.43140916577741, 250.19273595481803,
        148.87781549665536, 223.05547559193792, 230.03418198709608,
        146.13611923127021, 138.24716809523139, 179.26755740864527,
        141.21704876815426, 170.89587081800852, 222.96391329259626,
        188.27229523693822, 202.67075179617672, 211.75963110985992,
        217.45423324370509
    ]

    # define prior
    from abcpy.continuousmodels import Uniform
    mu = Uniform([[150], [200]], )
    sigma = Uniform([[5], [25]], )

    # define the model
    from abcpy.continuousmodels import Normal
    height = Normal([mu, sigma], )

    # define statistics
    from abcpy.statistics import Identity
    statistics_calculator = Identity(degree=3, cross=True)

    # Learn the optimal summary statistics using Semiautomatic summary selection
    from abcpy.statisticslearning import Semiautomatic
    statistics_learning = Semiautomatic([height],
                                        statistics_calculator,
                                        backend,
                                        n_samples=1000,
                                        n_samples_per_param=1,
                                        seed=1)

    # Redefine the statistics function
    new_statistics_calculator = statistics_learning.get_statistics()

    # Learn the optimal summary statistics using SemiautomaticNN summary selection
    from abcpy.statisticslearning import SemiautomaticNN
    statistics_learning = SemiautomaticNN([height],
                                          statistics_calculator,
                                          backend,
                                          n_samples=1000,
                                          n_samples_per_param=1,
                                          seed=1)

    # Redefine the statistics function
    new_statistics_calculator = statistics_learning.get_statistics()

    # define distance
    from abcpy.distances import Euclidean
    distance_calculator = Euclidean(new_statistics_calculator)

    # define kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([mu, sigma])

    # define sampling scheme
    from abcpy.inferences import PMCABC
    sampler = PMCABC([height], [distance_calculator], backend, kernel, seed=1)

    # sample from scheme
    T, n_sample, n_samples_per_param = 3, 10, 10
    eps_arr = np.array([500])
    epsilon_percentile = 10
    journal = sampler.sample([height_obs], T, eps_arr, n_sample,
                             n_samples_per_param, epsilon_percentile)

    return journal
Ejemplo n.º 7
0
class SemiautomaticNNTests(unittest.TestCase):
    def setUp(self):
        # define prior and model
        sigma = Uniform([[10], [20]])
        mu = Normal([0, 1])
        self.Y = Normal([mu, sigma])

        # define backend
        self.backend = Backend()

        # define statistics
        self.statistics_cal = Identity(degree=3, cross=False)

        if has_torch:
            # Initialize statistics learning
            self.statisticslearning = SemiautomaticNN([self.Y],
                                                      self.statistics_cal,
                                                      self.backend,
                                                      n_samples=100,
                                                      n_samples_val=100,
                                                      n_samples_per_param=1,
                                                      seed=1,
                                                      n_epochs=2,
                                                      scale_samples=False,
                                                      use_tqdm=False)
            self.statisticslearning2 = SemiautomaticNN([self.Y],
                                                       self.statistics_cal,
                                                       self.backend,
                                                       n_samples=10,
                                                       n_samples_val=10,
                                                       n_samples_per_param=1,
                                                       seed=1,
                                                       n_epochs=5,
                                                       scale_samples=False,
                                                       use_tqdm=False)
            # with sample scaler:
            self.statisticslearning_with_scaler = SemiautomaticNN(
                [self.Y],
                self.statistics_cal,
                self.backend,
                n_samples=100,
                n_samples_per_param=1,
                seed=1,
                n_epochs=2,
                scale_samples=True,
                use_tqdm=False)

    def test_initialization(self):
        if not has_torch:
            self.assertRaises(ImportError, SemiautomaticNN, [self.Y],
                              self.statistics_cal, self.backend)

    def test_transformation(self):
        if has_torch:
            # Transform statistics extraction
            self.new_statistics_calculator = self.statisticslearning.get_statistics(
            )
            self.new_statistics_calculator_with_scaler = self.statisticslearning_with_scaler.get_statistics(
            )
            # Simulate observed data
            Obs = Normal([2, 4])
            y_obs = Obs.forward_simulate(Obs.get_input_values(), 1)[0].tolist()

            extracted_statistics = self.new_statistics_calculator.statistics(
                y_obs)
            self.assertEqual(np.shape(extracted_statistics), (1, 2))

            self.assertRaises(RuntimeError,
                              self.new_statistics_calculator.statistics,
                              [np.array([1, 2])])

            extracted_statistics = self.new_statistics_calculator_with_scaler.statistics(
                y_obs)
            self.assertEqual(np.shape(extracted_statistics), (1, 2))

            self.assertRaises(
                RuntimeError,
                self.new_statistics_calculator_with_scaler.statistics,
                [np.array([1, 2])])

    def test_errors(self):
        if has_torch:
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    parameters=np.ones((100, 1)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    embedding_net=createDefaultNN(1, 2))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    simulations=np.ones((100, 1)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    simulations=np.ones((100, 1, 3)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    parameters=np.ones((100, 1, 2)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    simulations=np.ones((100, 1)),
                    parameters=np.zeros((99, 1)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    parameters_val=np.ones((100, 1)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    simulations_val=np.ones((100, 1)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    simulations_val=np.ones((100, 1, 3)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    parameters_val=np.ones((100, 1, 2)))
            with self.assertRaises(RuntimeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    simulations_val=np.ones((100, 1)),
                    parameters_val=np.zeros((99, 1)))
            with self.assertRaises(TypeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    parameters=[i for i in range(10)],
                    simulations=[i for i in range(10)])
            with self.assertRaises(TypeError):
                self.statisticslearning = SemiautomaticNN(
                    [self.Y],
                    self.statistics_cal,
                    self.backend,
                    n_samples=1000,
                    n_samples_per_param=1,
                    seed=1,
                    parameters_val=[i for i in range(10)],
                    simulations_val=[i for i in range(10)])
            with self.assertRaises(RuntimeError):
                self.statisticslearning2.test_losses = [4, 2, 1]
                self.statisticslearning2.plot_losses()
            with self.assertRaises(NotImplementedError):
                self.statisticslearning.plot_losses(which_losses="foo")

    def test_plots(self):
        if has_torch:
            self.statisticslearning.plot_losses()
            self.statisticslearning.plot_losses(which_losses="train")
            self.statisticslearning.plot_losses(which_losses="test")
Ejemplo n.º 8
0
 def test_errors(self):
     if has_torch:
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 embedding_net=createDefaultNN(1, 2))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations=np.ones((100, 1, 3)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters=np.ones((100, 1, 2)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations=np.ones((100, 1)),
                 parameters=np.zeros((99, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters_val=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations_val=np.ones((100, 1)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations_val=np.ones((100, 1, 3)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters_val=np.ones((100, 1, 2)))
         with self.assertRaises(RuntimeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 simulations_val=np.ones((100, 1)),
                 parameters_val=np.zeros((99, 1)))
         with self.assertRaises(TypeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters=[i for i in range(10)],
                 simulations=[i for i in range(10)])
         with self.assertRaises(TypeError):
             self.statisticslearning = SemiautomaticNN(
                 [self.Y],
                 self.statistics_cal,
                 self.backend,
                 n_samples=1000,
                 n_samples_per_param=1,
                 seed=1,
                 parameters_val=[i for i in range(10)],
                 simulations_val=[i for i in range(10)])
         with self.assertRaises(RuntimeError):
             self.statisticslearning2.test_losses = [4, 2, 1]
             self.statisticslearning2.plot_losses()
         with self.assertRaises(NotImplementedError):
             self.statisticslearning.plot_losses(which_losses="foo")
Ejemplo n.º 9
0
    if train_save_NN:
        # # Define backend
        # from abcpy.backends import BackendDummy
        # backend = BackendDummy()
        if fake:
            parameters = np.load('Data/Pilots/simulation_pilot_fake.npz')['parameters']
            simulations = np.load('Data/Pilots/simulation_pilot_fake.npz')['simulations']
        else:
            parameters = np.load('Data/ailots/simulation_pilot_'+str(whichobs)+'.npz')['parameters']
            simulations = np.load('Data/Pilots/simulation_pilot_'+str(whichobs)+'.npz')['simulations']
        print(parameters.shape, simulations.shape)
        #now train the NNs with the different methods with the generated data
        print("semiNN")
        semiNN = SemiautomaticNN([PD], identity, backend=backend, parameters=parameters, simulations=simulations,
                                  early_stopping=False,  batch_size=32,# early stopping
                                  seed=1, n_epochs=1000, lr=1e-4, scale_samples=False) #1000
        print("triplet")
        triplet = TripletDistanceLearning([PD], identity, backend=backend, parameters=parameters, simulations=simulations,
                                           early_stopping=False, batch_size=32, # early stopping
                                           seed=1, n_epochs=2000, lr=1e-3, scale_samples=False) # 2000
        # get the statistics from the StatisticsLearning object:
        learned_seminn_stat = semiNN.get_statistics()
        learned_triplet_stat = triplet.get_statistics()
        # saving the learned net
        if fake:
            learned_seminn_stat.save_net("Data/Pilots/seminn_net_fake.pth")
            learned_triplet_stat.save_net("Data/Pilots/triplet_net_fake.pth")
        else:
            learned_seminn_stat.save_net("Data/Pilots/seminn_net_"+str(whichobs)+".pth")
            learned_triplet_stat.save_net("Data/Pilots/triplet_net_"+str(whichobs)+".pth")