def go(self): if is_parallel_computation_active(): client = ParallelClient() if self._n_decs % client.get_number_of_engines() != 0: log.warning( "The number of Dec bands is not a multiple of the number of engine. Make it so for optimal performances.", RuntimeWarning) res = client.execute_with_progress_bar( self.worker, list(range(len(self._points))), chunk_size=self._n_ras) else: n_points = len(self._points) p = tqdm(total=n_points) res = np.zeros(n_points) for i, point in enumerate(self._points): res[i] = self.worker(i) p.update(1) TS = 2 * (-np.array(res) - self._like0) #self._debug_map = {k:v for v,k in zip(self._points, TS)} # Get maximum of TS idx = TS.argmax() self._max_ts = (TS[idx], self._points[idx]) log.info("Maximum TS is %.2f at (R.A., Dec) = (%.3f, %.3f)" % (self._max_ts[0], self._max_ts[1][0], self._max_ts[1][1])) self._ts_map = TS.reshape(self._n_decs, self._n_ras) return self._ts_map
def get_simulated_dataset(self, name): """ Return a simulation of this dataset using the current model with current parameters. :param name: new name for the new plugin instance :return: a HAL instance """ # First get expectation under the current model and store them, if we didn't do it yet if self._clone is None: n_point_sources = self._likelihood_model.get_number_of_point_sources() n_ext_sources = self._likelihood_model.get_number_of_extended_sources() expectations = collections.OrderedDict() for bin_id in self._maptree: data_analysis_bin = self._maptree[bin_id] if bin_id not in self._active_planes: expectations[bin_id] = None else: expectations[bin_id] = self._get_expectation(data_analysis_bin, bin_id, n_point_sources, n_ext_sources) + \ data_analysis_bin.background_map.as_partial() if parallel_client.is_parallel_computation_active(): # Do not clone, as the parallel environment already makes clones clone = self else: clone = copy.deepcopy(self) self._clone = (clone, expectations) # Substitute the observation and background for each data analysis bin for bin_id in self._clone[0]._maptree: data_analysis_bin = self._clone[0]._maptree[bin_id] if bin_id not in self._active_planes: continue else: # Active plane. Generate new data expectation = self._clone[1][bin_id] new_data = np.random.poisson(expectation, size=(1, expectation.shape[0])).flatten() # Substitute data data_analysis_bin.observation_map.set_new_values(new_data) # Now change name and return self._clone[0]._name = name # Adjust the name of the nuisance parameter old_name = list(self._clone[0]._nuisance_parameters.keys())[0] new_name = old_name.replace(self.name, name) self._clone[0]._nuisance_parameters[new_name] = self._clone[0]._nuisance_parameters.pop(old_name) # Recompute biases self._clone[0]._compute_likelihood_biases() return self._clone[0]
def _minimize(self): # Gather the setup islands = self._setup_dict['islands'] pop_size = self._setup_dict['population_size'] evolution_cycles = self._setup_dict['evolution_cycles'] # Print some info print("\nPAGMO setup:") print("------------") print("- Number of islands: %i" % islands) print("- Population size per island: %i" % pop_size) print("- Evolutions cycles per island: %i\n" % evolution_cycles) Npar = len(self._internal_parameters) if is_parallel_computation_active(): wrapper = PAGMOWrapper(function=self.function, parameters=self._internal_parameters, dim=Npar) # use the archipelago, which uses the ipyparallel computation archi = pg.archipelago(udi=pg.ipyparallel_island(), n=islands, algo=self._setup_dict['algorithm'], prob=wrapper, pop_size=pop_size) archi.wait() # Display some info print("\nSetup before parallel execution:") print("--------------------------------\n") print(archi) # Evolve populations on islands print( "Evolving... (progress not available for parallel execution)") # For some weird reason, ipyparallel looks for _winreg on Linux (where does # not exist, being a Windows module). Let's mock it with an empty module' mocked = False if os.path.exists("_winreg.py") is False: with open("_winreg.py", "w+") as f: f.write("pass") mocked = True archi.evolve() # Wait for completion (evolve() is async) archi.wait_check() # Now remove _winreg.py if needed if mocked: os.remove("_winreg.py") # Find best and worst islands fOpts = np.array(map(lambda x: x[0], archi.get_champions_f())) xOpts = archi.get_champions_x() else: # do not use ipyparallel. Evolve populations on islands serially wrapper = PAGMOWrapper(function=self.function, parameters=self._internal_parameters, dim=Npar) xOpts = [] fOpts = np.zeros(islands) with progress_bar(iterations=islands, title="pygmo minimization") as p: for island_id in range(islands): pop = pg.population(prob=wrapper, size=pop_size) for i in range(evolution_cycles): pop = self._setup_dict['algorithm'].evolve(pop) # Gather results xOpts.append(pop.champion_x) fOpts[island_id] = pop.champion_f[0] p.increase() # Find best and worst islands min_idx = fOpts.argmin() max_idx = fOpts.argmax() fOpt = fOpts[min_idx] fWorse = fOpts[max_idx] xOpt = np.array(xOpts)[min_idx] # Some information print("\nSummary of evolution:") print("---------------------") print("Best population has minimum %.3f" % (fOpt)) print("Worst population has minimum %.3f" % (fWorse)) print("") # Transform to numpy.array best_fit_values = np.array(xOpt) return best_fit_values, fOpt
def get_simulated_dataset(self, name): # First get expectation under the current model and store them, if we didn't do it yet if self._clone is None: n_point_sources = self._likelihood_model.get_number_of_point_sources( ) n_ext_sources = self._likelihood_model.get_number_of_extended_sources( ) expectations = [] for i, data_analysis_bin in enumerate(self._maptree): if i not in self._active_planes: expectations.append(None) else: expectations.append( self._get_expectation(data_analysis_bin, i, n_point_sources, n_ext_sources) + data_analysis_bin.background_map.as_partial()) if parallel_client.is_parallel_computation_active(): # Do not clone, as the parallel environment already makes clones clone = self else: clone = copy.deepcopy(self) self._clone = (clone, expectations) # Substitute the observation and background for each data analysis bin for i, (data_analysis_bin, orig_data_analysis_bin) in enumerate( zip(self._clone[0]._maptree, self._maptree)): if i not in self._active_planes: continue else: # Active plane. Generate new data expectation = self._clone[1][i] new_data = np.random.poisson( expectation, size=(1, expectation.shape[0])).flatten() # Substitute data data_analysis_bin.observation_map.set_new_values(new_data) # Now change name and return self._clone[0]._name = name # Adjust the name of the nuisance parameter old_name = self._clone[0]._nuisance_parameters.keys()[0] new_name = old_name.replace(self.name, name) self._clone[0]._nuisance_parameters[new_name] = self._clone[ 0]._nuisance_parameters.pop(old_name) # Recompute biases self._clone[0]._compute_likelihood_biases() return self._clone[0]
def _minimize(self): # Gather the setup islands = self._setup_dict['islands'] pop_size = self._setup_dict['population_size'] evolution_cycles = self._setup_dict['evolution_cycles'] # Print some info print("\nPAGMO setup:") print("------------") print("- Number of islands: %i" % islands) print("- Population size per island: %i" % pop_size) print("- Evolutions cycles per island: %i\n" % evolution_cycles) Npar = len(self._internal_parameters) if is_parallel_computation_active(): wrapper = PAGMOWrapper(function=self.function, parameters=self._internal_parameters, dim=Npar) # use the archipelago, which uses the ipyparallel computation archi = pg.archipelago(udi=pg.ipyparallel_island(), n=islands, algo=self._setup_dict['algorithm'], prob=wrapper, pop_size=pop_size) archi.wait() # Display some info print("\nSetup before parallel execution:") print("--------------------------------\n") print(archi) # Evolve populations on islands print("Evolving... (progress not available for parallel execution)") # For some weird reason, ipyparallel looks for _winreg on Linux (where does # not exist, being a Windows module). Let's mock it with an empty module' mocked = False if os.path.exists("_winreg.py") is False: with open("_winreg.py", "w+") as f: f.write("pass") mocked = True archi.evolve() # Wait for completion (evolve() is async) archi.wait_check() # Now remove _winreg.py if needed if mocked: os.remove("_winreg.py") # Find best and worst islands fOpts = np.array(map(lambda x:x[0], archi.get_champions_f())) xOpts = archi.get_champions_x() else: # do not use ipyparallel. Evolve populations on islands serially wrapper = PAGMOWrapper(function=self.function, parameters=self._internal_parameters, dim=Npar) xOpts = [] fOpts = np.zeros(islands) with progress_bar(iterations=islands, title="pygmo minimization") as p: for island_id in range(islands): pop = pg.population(prob=wrapper, size=pop_size) for i in range(evolution_cycles): pop = self._setup_dict['algorithm'].evolve(pop) # Gather results xOpts.append(pop.champion_x) fOpts[island_id] = pop.champion_f[0] p.increase() # Find best and worst islands min_idx = fOpts.argmin() max_idx = fOpts.argmax() fOpt = fOpts[min_idx] fWorse = fOpts[max_idx] xOpt = np.array(xOpts)[min_idx] # Some information print("\nSummary of evolution:") print("---------------------") print("Best population has minimum %.3f" % (fOpt)) print("Worst population has minimum %.3f" % (fWorse)) print("") # Transform to numpy.array best_fit_values = np.array(xOpt) return best_fit_values, fOpt