def runLikelihood(subdir, tpl_file): '''This runction runs the likelihood code on a set of pixels in a subdirectory. It takes as input the subdirectory to work on and a template counts map. It reads it's configuration from a pickle file (par.pck) that should be located in the subdirectory and the pixel locations from another pickle file (pixel.pck). It then creats an overall likelihood object, does a quick global fit and then loops over the pixels. At each pixel, it creats a test source, fits that source, calculates the TS of the source and writes the results to an output file in the subdirectory called ts_results.dat.''' parfile = open("par.pck", "r") pars = pickle.load(parfile) pixelfile = open("pixel.pck", "r") pixels = pickle.load(pixelfile) pixel_coords = PixelCoords(tpl_file) obs = UnbinnedObs(resolve_fits_files(pars['evfile']), resolve_fits_files(pars['scfile']), expMap='../'+pars['expmap'], expCube='../'+pars['expcube'], irfs=pars['irfs']) like = UnbinnedAnalysis(obs, '../'+pars['srcmdl'], pars['optimizer']) like.setFitTolType(pars['toltype']) like.optimize(0) loglike0 = like() test_src = getPointSource(like) target_name = 'testSource' test_src.setName(target_name) outfile = 'ts_results.dat' finished_pixels = [] if os.path.isfile(outfile): input = open(outfile, 'r') for line in input: tokens = line.strip().split() ij = int(tokens[0]), int(tokens[1]) finished_pixels.append(ij) input.close() output = open(outfile, 'a') for indx, i, j in pixels: if (i, j) in finished_pixels: continue ra, dec = pixel_coords(i, j) test_src.setDir(ra, dec, True, False) like.addSource(test_src) like.optimize(0) ts = -2*(like() - loglike0) output.write("%3i %3i %.3f %.3f %.5f\n" % (i, j, ra, dec, ts)) output.flush() like.deleteSource(target_name) output.close()
def runLikelihood(subdir, tpl_file): '''This runction runs the likelihood code on a set of pixels in a subdirectory. It takes as input the subdirectory to work on and a template counts map. It reads it's configuration from a pickle file (par.pck) that should be located in the subdirectory and the pixel locations from another pickle file (pixel.pck). It then creats an overall likelihood object, does a quick global fit and then loops over the pixels. At each pixel, it creats a test source, fits that source, calculates the TS of the source and writes the results to an output file in the subdirectory called ts_results.dat.''' parfile = open("par.pck", "r") pars = pickle.load(parfile) pixelfile = open("pixel.pck", "r") pixels = pickle.load(pixelfile) pixel_coords = PixelCoords(tpl_file) obs = UnbinnedObs(resolve_fits_files(pars['evfile']), resolve_fits_files(pars['scfile']), expMap='../' + pars['expmap'], expCube='../' + pars['expcube'], irfs=pars['irfs']) like = UnbinnedAnalysis(obs, '../' + pars['srcmdl'], pars['optimizer']) like.setFitTolType(pars['toltype']) like.optimize(0) loglike0 = like() test_src = getPointSource(like) target_name = 'testSource' test_src.setName(target_name) outfile = 'ts_results.dat' finished_pixels = [] if os.path.isfile(outfile): input = open(outfile, 'r') for line in input: tokens = line.strip().split() ij = int(tokens[0]), int(tokens[1]) finished_pixels.append(ij) input.close() output = open(outfile, 'a') for indx, i, j in pixels: if (i, j) in finished_pixels: continue ra, dec = pixel_coords(i, j) test_src.setDir(ra, dec, True, False) like.addSource(test_src) like.optimize(0) ts = -2 * (like() - loglike0) output.write("%3i %3i %.3f %.3f %.5f\n" % (i, j, ra, dec, ts)) output.flush() like.deleteSource(target_name) output.close()