def create(self): """Overloaded method. """ self.xref = self.get('xref') self.yref = self.get('yref') self.nxpix = self.get('nxpix') self.binsz = self.get('binsz') # Dimension of the pimap (degrees) self.proj = self.get('proj') sidex = self.nxpix * self.binsz logger.info('Output image dimensions are %.1f x %.1f arcmin.' %\ (sidex*60, sidex*60)) logger.info('Center of the image is in R.A.=%.3f Dec.=%.3f' % (self.xref, self.yref)) # Build the WCS object self.w = wcs.WCS(naxis=2) self.w.wcs.crpix = [(self.nxpix + 1) / 2, (self.nxpix + 1) / 2] self.w.wcs.cdelt = [-self.binsz, self.binsz] self.w.wcs.crval = [self.xref, self.yref] self.w.wcs.ctype = ['RA---%s' % self.proj, 'DEC--%s' % self.proj] self.w.wcs.equinox = 2000.0 #w.wcs.radesys = 'ICRS' self.header = self.w.to_header() self.pol_x = numpy.zeros((self.nxpix, self.nxpix)) self.pol_y = numpy.zeros((self.nxpix, self.nxpix)) pass
def __init__(self, file_path): """Constructor. """ assert (file_path.endswith('.fits')) logger.info('Opening input binned file %s...' % file_path) self.hdu_list = fits.open(file_path) self.hdu_list.info()
def __init__(self, psf_file_path): """Constructor. """ logger.info("Reading PSF data from %s..." % psf_file_path) self.hdu_list = fits.open(psf_file_path) self.hdu_list.info() _data = self.hdu_list["PSF"].data W = _data["W"] sigma = _data["SIGMA"] N = _data["N"] r_c = _data["R_C"] eta = _data["ETA"] self.__params = (W, sigma, N, r_c, eta) # Tabulate the actual PSF values. _r = numpy.linspace(0, self.MAX_RADIUS, 250) _y = gauss_king(_r, *self.__params) fmt = dict(xname="r", xunits="arcsec", yname="PSF", yunits="sr$^{-1}$") xInterpolatedUnivariateSpline.__init__(self, _r, _y, k=2, **fmt) # Include the solid angle for the actual underlying random generator. _y *= 2 * numpy.pi * _r fmt = dict(rvname="r", rvunits="arcsec", pdfname="$2 \\pi r \\times$ PSF", pdfunits="") self.generator = xUnivariateGenerator(_r, _y, k=1, **fmt) # Finally, calculate the self.eef, self.hew = self.build_eef() logger.info(self)
def add_circle(self, ra, dec, radius, pmax, type='radial'): logger.info('add a circle at ra=%f, dec=%f, radius=%f. PMAX=%f' % (ra, dec, rad, pmax)) for i in range(self.nxpix): for j in range(self.nxpix): # radial: #posx = nxpix/2.-i #posy = -nxpix/2.+j # circular: #p_x = i #-nxpix+j #p_y = j #-nxpix+i world = self.w.wcs_pix2world([[i, j]], 0) w_ra = world[0][0] w_dec = world[0][1] dx = (w_ra - ra) * numpy.cos( numpy.deg2rad(dec)) # Effect of the projection dy = w_dec - dec p_x = -dy p_y = +dx dist = numpy.sqrt(dx * dx + dy * dy) #print w_ra, w_dec, ra, dec, dist, radius if (dist > radius): p_x = 0 p_y = 0 pass self.pol_x[i, j] = p_x self.pol_y[i, j] = p_y pass pass pol_deg = numpy.sqrt(self.pol_x * self.pol_x + self.pol_y * self.pol_y) self.pol_x *= pmax / pol_deg.max() self.pol_y *= pmax / pol_deg.max() pass
def bin_(self): """Overloaded method. """ evt_header = self.event_file.hdu_list['PRIMARY'].header num_chans = evt_header['DETCHANS'] total_time = self.event_file.total_good_time() binning = numpy.linspace(-0.5, num_chans -0.5, num_chans+1) n, bins = numpy.histogram(self.event_data['PHA'], bins=binning) primary_hdu = self.build_primary_hdu() data = [numpy.arange(num_chans), n/total_time, numpy.sqrt(n)/total_time ] spec_hdu = xBinTableHDUPHA1(data) spec_hdu.setup_header(self.event_file.primary_keywords()) irf_name = evt_header['IRFNAME'] keywords = [('EXPOSURE', total_time, 'exposure time'), ('RESPFILE', irf_file_path(irf_name, 'rmf')), ('ANCRFILE', irf_file_path(irf_name, 'arf'))] spec_hdu.setup_header(keywords) hdu_list = fits.HDUList([primary_hdu, spec_hdu]) hdu_list.info() logger.info('Writing binned PHA1 data to %s...' % self.get('outfile')) hdu_list.writeto(self.get('outfile'), clobber=True) logger.info('Done.')
def analyze(): """Analyze the data. """ logger.info('Opening output file %s...' % ANALYSIS_FILE_PATH) analysis_file = open(ANALYSIS_FILE_PATH, 'w') for i, (_min, _max) in enumerate(zip(TIME_BINNING[:-1], TIME_BINNING[1:])): _mcube = xBinnedModulationCube(_mcube_file_path(i)) _mcube.fit() _fit_results = _mcube.fit_results[0] _sel_file = xEventFile(_sel_file_path(i)) _time = numpy.average(_sel_file.event_data['TIME']) _sel_file.close() _time_errp = _max - _time _time_errm = _time - _min _pol_deg = _fit_results.polarization_degree _pol_deg_err = _fit_results.polarization_degree_error _pol_angle = _fit_results.phase _pol_angle_err = _fit_results.phase_error _spec_fitter = PIPELINE.xpxspec(_pha1_file_path(i), plot=False) (_index, _index_err), (_norm, _norm_err) = _spec_fitter.fit_parameters() # The division by the phase interval is a workaround and we should # keep track of that in xpselect. _norm /= (_max - _min) _norm_err /= (_max - _min) _data = (_time, _time_errp, _time_errm, _pol_deg, _pol_deg_err, _pol_angle, _pol_angle_err, _index, _index_err, _norm, _norm_err) _fmt = ('%.4e ' * len(_data)).strip() _fmt = '%s\n' % _fmt _line = _fmt % _data analysis_file.write(_line) analysis_file.close()
def add_circle(self,ra,dec,radius,pmax,ptype='circular',angle=0.0): ''' This implement the case of a circular shape''' logger.info('add a circle at ra=%f, dec=%f, radius=%f. PMAX=%f'%(ra,dec,rad,pmax)) for i in range(self.nxpix): for j in range(self.nxpix): world = self.w.wcs_pix2world([[i,j]], 0) w_ra = world[0][0] w_dec = world[0][1] dx = (w_ra-ra)*numpy.cos(numpy.deg2rad(dec)) # Effect of the projection dy = w_dec-dec if ptype is 'linear': p_x = numpy.cos(numpy.deg2rad(angle)) p_y = numpy.sin(numpy.deg2rad(angle)) elif ptype is 'circular': p_x = -dy p_y = +dx elif ptype is 'radial': p_x = dx p_y = dy pass dist = numpy.sqrt(dx*dx+dy*dy) #print w_ra, w_dec, ra, dec, dist, radius if (dist>radius): p_x=0 p_y=0 pass self.pol_x[i,j]=self.pol_x[i,j]+p_x self.pol_y[i,j]=self.pol_y[i,j]+p_y pass pass pol_deg=numpy.sqrt(self.pol_x*self.pol_x+self.pol_y*self.pol_y) self.pol_x*=pmax/pol_deg.max() self.pol_y*=pmax/pol_deg.max() pass
def plot_swift_lc(grb_list,show=True): """Plots Swift GRB light curves. """ plt.figure(figsize=(10, 8), dpi=80) plt.title('Swift XRT light curves') num_grb = 0 for grb_name in grb_list: flux_outfile = download_swift_grb_lc_file(grb_name, min_obs_time=21600) if flux_outfile is not None: integral_flux_spline = parse_light_curve(flux_outfile) if integral_flux_spline is not None: if grb_name == 'GRB 130427A': integral_flux_spline.plot(num_points=1000,logx=True,\ logy=True,show=False,\ color="red",linewidth=1.0) num_grb += 1 else: c = random.uniform(0.4,0.8) integral_flux_spline.plot(num_points=1000,logx=True,\ logy=True,show=False,\ color='%f'%c,linewidth=1.0) num_grb += 1 else: continue logger.info('%i GRBs included in the plot.'%num_grb) if show: plt.show()
def parse_blazar_list(PRIORITY_ONLY): """ """ logger.info("Parsing input file %s..." % BLAZAR_LIST_PATH) src_list = [] input_file = open(BLAZAR_LIST_PATH) for i in range(6): input_file.next() for line in input_file: if line.startswith("-"): if PRIORITY_ONLY is False: for i in range(3): line = input_file.next() PRIORITY_ONLY = True else: return src_list line = line.replace("BL Lac", "BL_Lac") name, line, notes = line[:12].strip(), line[12:95], line[95:] ra, dec, opt_class, sed_class, flux_max, flux_min, p_opt_max, p_opt_min = line.split() flux_max = float(flux_max) flux_min = float(flux_min) p_opt_max = float(p_opt_max) p_opt_min = float(p_opt_min) if p_opt_min < 0.5: p_opt_min = 0.5 if p_opt_max > 0.5: src = { "name": name, "flux_min": flux_min, "flux_max": flux_max, "p_opt_max": p_opt_max, "p_opt_min": p_opt_min, } src_list.append(src)
def __init__(self, file_path): """Constructor. """ assert(file_path.endswith('.fits')) logger.info('Opening input binned file %s...' % file_path) self.hdu_list = fits.open(file_path) self.hdu_list.info()
def create(self): """Overloaded method. """ self.xref = self.get('xref') self.yref = self.get('yref') self.nxpix = self.get('nxpix') self.binsz = self.get('binsz') # Dimension of the pimap (degrees) self.proj = self.get('proj') sidex = self.nxpix*self.binsz logger.info('Output image dimensions are %.1f x %.1f arcmin.' %\ (sidex*60, sidex*60)) logger.info('Center of the image is in R.A.=%.3f Dec.=%.3f' % (self.xref,self.yref)) # Build the WCS object self.w = wcs.WCS(naxis=2) self.w.wcs.crpix = [(self.nxpix+1)/2,(self.nxpix+1)/2] self.w.wcs.cdelt = [-self.binsz, self.binsz] self.w.wcs.crval = [self.xref, self.yref] self.w.wcs.ctype = ['RA---%s' % self.proj, 'DEC--%s' % self.proj] self.w.wcs.equinox = 2000.0 #w.wcs.radesys = 'ICRS' self.header = self.w.to_header() self.pol_x=numpy.zeros((self.nxpix,self.nxpix)) self.pol_y=numpy.zeros((self.nxpix,self.nxpix)) pass
def analyze(): """ """ if os.path.exists(ANALYSIS_FILE_PATH): logger.info('%s exists, delete it if you want to recreate it.' %\ ANALYSIS_FILE_PATH) return logger.info('Opening output file %s...' % ANALYSIS_FILE_PATH) analysis_file = open(ANALYSIS_FILE_PATH, 'w') for i, (_min, _max) in enumerate(PHASE_BINNING): _mcube = xBinnedModulationCube(_mcube_file_path(i)) _mcube.fit() _fit_results = _mcube.fit_results[-1] print _fit_results _phase = 0.5*(_min + _max) _phase_err = 0.5*(_max - _min) _pol_deg = _fit_results.polarization_degree _pol_deg_err = _fit_results.polarization_degree_error _pol_angle = _fit_results.phase _pol_angle_err = _fit_results.phase_error _data = (_phase, _phase_err, _pol_deg, _pol_deg_err, _pol_angle, _pol_angle_err) _fmt = ('%.4e ' * len(_data)).strip() _fmt = '%s\n' % _fmt _line = _fmt % _data analysis_file.write(_line) analysis_file.close()
def add_circle(self,ra,dec,radius,pmax,type='radial'): logger.info('add a circle at ra=%f, dec=%f, radius=%f. PMAX=%f'%(ra,dec,rad,pmax)) for i in range(self.nxpix): for j in range(self.nxpix): # radial: #posx = nxpix/2.-i #posy = -nxpix/2.+j # circular: #p_x = i #-nxpix+j #p_y = j #-nxpix+i world = self.w.wcs_pix2world([[i,j]], 0) w_ra = world[0][0] w_dec = world[0][1] dx = (w_ra-ra)*numpy.cos(numpy.deg2rad(dec)) # Effect of the projection dy = w_dec-dec p_x = -dy p_y = +dx dist = numpy.sqrt(dx*dx+dy*dy) #print w_ra, w_dec, ra, dec, dist, radius if (dist>radius): p_x=0 p_y=0 pass self.pol_x[i,j]=p_x self.pol_y[i,j]=p_y pass pass pol_deg=numpy.sqrt(self.pol_x*self.pol_x+self.pol_y*self.pol_y) self.pol_x*=pmax/pol_deg.max() self.pol_y*=pmax/pol_deg.max() pass
def rvs_event_list(self, aeff, psf, modf, edisp, **kwargs): """Extract an event list for the full ROI. Arguments --------- aeff : :py:class:`ximpol.irf.arf.xEffectiveArea` object. The effective area to be used. psf : :py:class:`ximpol.irf.psf.xPointSpreadFunction` object. The PSF to be used. modf : :py:class:`ximpol.irf.mrf.xModulationFactor` object. The modulation factor to the used. edisp : :py:class:`ximpol.irf.rmf.xEnergyDispersion` object. The energy dispersion to be used. sampling_time : array The array to sample the source light curve. Warning ------- The sampling_time should not be the same for all sources, and each source should be able to decide its own in a sensible way. (See issue #44.) """ event_list = xMonteCarloEventList() for source in self.values(): logger.info('Generating event list for source "%s"...' %\ source.name) event_list += source.rvs_event_list(aeff, psf, modf, edisp, **kwargs) event_list.apply_vignetting(aeff, self.ra, self.dec) event_list.sort() return event_list
def bin_(self): """Overloaded method. """ evt_header = self.event_file.hdu_list['PRIMARY'].header if self.get('mc'): energy = self.event_data['MC_ENERGY'] else: energy = self.event_data['ENERGY'] phi = self.event_data['PE_ANGLE'] counts, xedges, yedges = numpy.histogram2d(energy, phi, bins=self.make_binning()) primary_hdu = self.build_primary_hdu() emin, emax = xedges[:-1], xedges[1:] emean = [] for _emin, _emax in zip(emin, emax): emean.append(numpy.mean(energy[(energy > _emin)*(energy < _emax)])) data = [emin, emax, emean, counts] xBinTableHDUMCUBE.set_phi_spec(self.get('phibins')) mcube_hdu = xBinTableHDUMCUBE(data) mcube_hdu.setup_header(self.event_file.primary_keywords()) gti_hdu = self.event_file.hdu_list['GTI'] hdu_list = fits.HDUList([primary_hdu, mcube_hdu, gti_hdu]) hdu_list.info() logger.info('Writing binned MCUBE data to %s...' % self.get('outfile')) hdu_list.writeto(self.get('outfile'), clobber=True) logger.info('Done.')
def rvs_event_list(self, aeff, psf, modf, edisp, **kwargs): """Extract a random event list for the model component. TODO: here we should pass the sampling phase, instead? TODO: properly take into account the derivatives in the ephemeris. """ # Create the event list and the count spectrum. event_list = xMonteCarloEventList() # Mind the count spectrum is made in phase! sampling_phase = numpy.linspace(0., 1., 100) count_spectrum = xCountSpectrum(self.energy_spectrum, aeff, sampling_phase, self.column_density, self.redshift) # All this is not properly taking into account the ephemeris. min_time = kwargs['tstart'] max_time = kwargs['tstop'] #min_time=sampling_time[0] #max_time = sampling_time[-1] delta_time = (max_time - min_time) period = self.ephemeris.period(min_time) # This is not accurate, as we are effectively discarding the last # fractional period. Need to think about it. num_periods = int(delta_time/period) num_expected_events = delta_time*count_spectrum.light_curve.norm() # Extract the number of events to be generated based on the integral # of the light curve over the simulation time. num_events = numpy.random.poisson(num_expected_events) logger.info('About to generate %d events...' % num_events) # Extract the event phases and sort them. col_phase = count_spectrum.light_curve.rvs(num_events) event_list.set_column('PHASE', col_phase) col_period = numpy.random.randint(0, num_periods, num_events) col_time = (col_period + col_phase)*period event_list.set_column('TIME', col_time) # Extract the MC energies and smear them with the energy dispersion. col_mc_energy = count_spectrum.rvs(col_phase) event_list.set_column('MC_ENERGY', col_mc_energy) col_pha = edisp.matrix.rvs(col_mc_energy) event_list.set_column('PHA', col_pha) event_list.set_column('ENERGY', edisp.ebounds(col_pha)) # Extract the MC sky positions and smear them with the PSF. col_mc_ra, col_mc_dec = self.rvs_sky_coordinates(num_events) event_list.set_column('MC_RA', col_mc_ra) event_list.set_column('MC_DEC', col_mc_dec) col_ra, col_dec = psf.smear(col_mc_ra, col_mc_dec) event_list.set_column('RA', col_ra) event_list.set_column('DEC', col_dec) # Extract the photoelectron emission directions. pol_degree = self.polarization_degree(col_mc_energy, col_phase, col_mc_ra, col_mc_dec) pol_angle = self.polarization_angle(col_mc_energy, col_phase, col_mc_ra, col_mc_dec) col_pe_angle = modf.rvs_phi(col_mc_energy, pol_degree, pol_angle) event_list.set_column('PE_ANGLE', col_pe_angle) # Set the source ID. event_list.set_column('MC_SRC_ID', self.identifier) event_list.sort() return event_list
def xpsrccoords(source_name): """ """ logger.info('Querying CDS name resolver for "%s"...' % source_name) coords = SkyCoord.from_name(source_name) print(coords.icrs) print(coords.galactic) logger.info('Done, bye!')
def parse(file_path, emin=1., emax=15., flux_scale=6.0067e-2): """Parse the input file with the complete spectral and polarization model. """ logger.info('Parsing input file %s...' % file_path) energy, flux, angle, degree = numpy.loadtxt(file_path, unpack=True) flux *= flux_scale _mask = (energy >= emin) * (energy <= emax) return energy[_mask], flux[_mask], angle[_mask], degree[_mask]
def __init__(self, mrf_file_path): """Constructor. """ logger.info('Reading energy dispersion data from %s...' % mrf_file_path) self.hdu_list = fits.open(mrf_file_path) self.hdu_list.info() self.matrix = xEnergyDispersionMatrix(self.hdu_list['MATRIX']) self.ebounds = xEnergyDispersionBounds(self.hdu_list['EBOUNDS'])
def build_grb_fits_file(data,outfile): primary_hdu = xPrimaryHDU() grb_info_hdu = xBinTableGRBmain(data) hdu_list = fits.HDUList([primary_hdu, grb_info_hdu]) hdu_list.info() logger.info('Writing GRB main infos table to %s...' % outfile) hdu_list.writeto(outfile, clobber=True) logger.info('Done.')
def parse(file_path, emin=1., emax=15., flux_scale=4.3576): """Parse the input file with the complete spectral and polarization model. """ logger.info('Parsing input file %s...' % file_path) energy, flux, angle, degree = numpy.loadtxt(file_path, unpack=True) flux *= flux_scale _mask = (energy >= emin)*(energy <= emax) return energy[_mask], flux[_mask], angle[_mask], degree[_mask]
def __init__(self, file_path): """Constructor. """ assert (file_path.endswith('.fits')) logger.info('Opening input event file %s...' % file_path) self.hdu_list = fits.open(file_path) self.hdu_list.info() self.event_data = self.hdu_list['EVENTS'].data self.roi_table = self.build_roi_table()
def parse(file_path, emin=1., emax=10.): """Parse the input file with the complete spectral and polarization model. """ logger.info('Parsing input file %s...' % file_path) energy, flux, degree, angle = numpy.loadtxt(file_path, unpack=True, usecols = (0,1,5,6)) angle += 90. _mask = (energy >= emin)*(energy <= emax) return energy[_mask], flux[_mask], degree[_mask], angle[_mask]
def __init__(self, file_path): """Constructor. """ assert(file_path.endswith('.fits')) logger.info('Opening input event file %s...' % file_path) self.hdu_list = fits.open(file_path) self.hdu_list.info() self.event_data = self.hdu_list['EVENTS'].data self.roi_table = self.build_roi_table()
def fit_bin(self, i): """Fit the azimuthal distribution for the i-th energy slice. """ hist = (self.phi_y[i], self.phi_binning, None) _fit_results = xAzimuthalResponseGenerator.fit_histogram(hist) _fit_results.set_polarization(self.modf(self.emean[i])) logger.info(_fit_results) self.fit_results.append(_fit_results) return _fit_results
def cleanup_dist(): """Cleanup the distribution folder. """ if os.path.exists(XIMPOL_DIST): logger.info('Removing %s altogether...' % XIMPOL_DIST) shutil.rmtree(XIMPOL_DIST) filePath = os.path.join(XIMPOL_ROOT, 'MANIFEST') if os.path.exists(filePath): logger.info('Removing %s...' % filePath) os.remove(filePath)
def cleanup(folder_path, patterns = ['*~', '*.pyc', '*.pyo']): """Cleanup a folder. """ logger.info('Cleaning up folder %s...' % folder_path) fileList = [] for pattern in patterns: fileList += glob.glob(os.path.join(folder_path, pattern)) for filePath in fileList: logger.info('Removing %s...' % filePath) os.remove(filePath)
def run(): PIPELINE.xpobssim(configfile=CFG_FILE, duration=SIM_DURATION, outfile=EVT_FILE_PATH) pha1_file_path = PIPELINE.xpbin(EVT_FILE_PATH, algorithm='PHA1') spec_fitter = PIPELINE.xpxspec(pha1_file_path) (index, index_err), (norm, norm_err) = spec_fitter.fit_parameters() logger.info('Fitted PL norm = %.4f +- %4f (input = %.4f)' %\ (norm, norm_err, PL_NORM)) logger.info('Fitted PL index = %.4f +- %4f (input = %.4f)' %\ (index, index_err, PL_INDEX))
def run(save_plots=False): """Run all the tasks. """ if os.path.exists(ANALYSIS_FILE_PATH): logger.info('%s exists, delete it if you want to recreate it.' %\ ANALYSIS_FILE_PATH) else: generate() prepare() analyze() plot(save_plots)
def rmdir(dir_path): """ Remove an entire (empty or non empty) folder. """ logger.info('About to remove folder %s...' % dir_path) try: shutil.rmtree(dir_path) logger.info('Folder succesfully removed.') status = 0 except Exception as e: logger.error('Could not remove folder (%s)' % e) status = 1 return status
def run(save_plots=False): """Run all the tasks. """ if os.path.exists(ANALYSIS_FILE_PATH): logger.info('%s exists, delete it if you want to recreate it.' %\ ANALYSIS_FILE_PATH) else: calculate_mdp() generate() prepare() analyze() plot(save_plots)
def __init__(self, file_path, build_cdf=True): """Constructor. """ logger.info('Reading FITS image from %s...' % file_path) self.hdu_list = fits.open(file_path) self.hdu_list.info() self.wcs = wcs.WCS(self.hdu_list['PRIMARY'].header) self.data = self.hdu_list['PRIMARY'].data.transpose() self.vmin = None self.vmax = None if build_cdf: self.cdf = self.build_cdf()
def __init__(self, file_path, build_cdf=True): """Constructor. """ logger.info('Reading FITS image from %s...' % file_path) self.hdu_list = fits.open(file_path) self.hdu_list.info() self.wcs = wcs.WCS(self.hdu_list['PRIMARY'].header) self.data = self.hdu_list['PRIMARY'].data.transpose() self.vmin = None self.vmax = None if build_cdf: self.build_cdf()
def save_plot(self, outfile, arg_list=[], device='/cps', title=None, xaxis='keV', logx=True): """Save the plot to the outfile directory. The default format is '.ps'. """ xspec.Plot.device = outfile + device xspec.Plot.xAxis = xaxis xspec.Plot.xLog = logx if title is not None: xspec.Plot.addCommand('label top '+ title) args = ', '.join(x for x in arg_list) logger.info('Saving the plot to %s...' %outfile) xspec.Plot(args)
def parse_spectral_model(file_name, emin=0.9, emax=11.): """Parse the input file with the spectral point. """ file_path = os.path.join(XIMPOL_CONFIG, 'ascii', file_name) logger.info('Parsing input file %s...' % file_path) _energy, _flux, _fluxerr = numpy.loadtxt(file_path, unpack=True) _mask = (_energy >= emin)*(_energy <= emax) _energy = _energy[_mask] _flux = _flux[_mask] fmt = dict(xname='Energy', xunits='keV', yname='Flux', yunits='cm$^{-2}$ s$^{-1}$ keV$^{-1}$') return xInterpolatedUnivariateSplineLinear(_energy, _flux, **fmt)
def select_and_bin(radius = RADIUS): for i,ra in enumerate(_ra): for j,dec in enumerate(_dec): logger.info('Analyzing region at ra = %s, dec = %s' % (ra, dec)) sel_file_path = get_sel_file_path(i,j) mcube_file_path = get_mcube_file_path(i,j) logger.info('Going to use %s and %s for the outputfiles..'%(sel_file_path,mcube_file_path)) pipeline.xpselect(evt_file_path, ra=ra, dec=dec, rad=radius, outfile=sel_file_path) pipeline.xpbin(sel_file_path, algorithm='MCUBE', ebinalg='LIST', ebinning=E_BINNING, outfile = mcube_file_path)
def __init__(self, mrf_file_path): """Constructor. """ logger.info('Reading modulation factor data from %s...' % mrf_file_path) self.hdu_list = fits.open(mrf_file_path) self.hdu_list.info() _data = self.hdu_list['MODFRESP'].data _x = 0.5*(_data.field('ENERG_LO') + _data.field('ENERG_HI')) _y = _data.field('MODFRESP') fmt = dict(xname='Energy', xunits='keV', yname='Modulation factor', optimize=True, tolerance=1e-4) xInterpolatedUnivariateSplineLinear.__init__(self, _x, _y, **fmt) self.generator = xAzimuthalResponseGenerator()
def run(save_plots=False): """Run all the tasks. """ if os.path.exists(ANALYSIS_FILE_PATH): logger.info('%s exists, delete it if you want to recreate it.' %\ ANALYSIS_FILE_PATH) else: generate() global PHASE_BINNING PHASE_BINNING = _phase_binning() prepare() analyze() plot(save_plots)
def parse_spectral_model(file_name, emin=0.5, emax=15.): """Parse the input file with the spectral point. """ file_path = os.path.join(XIMPOL_CONFIG, 'ascii', file_name) logger.info('Parsing input file %s...' % file_path) _energy, _flux = numpy.loadtxt(file_path, delimiter=',', unpack=True) _mask = (_energy >= emin)*(_energy <= emax) _energy = _energy[_mask] _flux = _flux[_mask] _flux /= _energy**2. fmt = dict(xname='Energy', xunits='keV', yname='Flux', yunits='cm$^{-2}$ s$^{-1}$ keV$^{-1}$') return xInterpolatedUnivariateSplineLinear(_energy, _flux, **fmt)
def mv(source, dest): """Move a file. Return 0 upon succesfull operation, 1 otherwise. """ logger.info('About to move %s to %s...' % (source, dest)) try: shutil.move(source, dest) logger.info('File succesfully copied.') status = 0 except Exception as e: logger.error('Could not move file (%s)' % e) status = 1 return status
def select_and_bin(): """ """ logger.info('Creating the mapcube for the entire source...') pipeline.xpbin(evt_file_path, algorithm='MCUBE', ebinalg='LIST', ebinning=E_BINNING) logger.info('Opening region file %s...' % reg_file_path) regions = pyregion.open(reg_file_path) logger.info('Found %d regions...' % len(regions)) for i, region in enumerate(regions): ra, dec, rad = region.coord_list rad *= 60. logger.info('Analyzing region at ra = %s, dec = %s' % (ra, dec)) sel_file_path = get_sel_file_path(i) mcube_file_path = get_mcube_file_path(i) pipeline.xpselect(evt_file_path, ra=ra, dec=dec, rad=rad, outfile=sel_file_path) pipeline.xpbin(sel_file_path, algorithm='MCUBE', ebinalg='LIST', ebinning=E_BINNING, outfile=mcube_file_path)
def optimize_grid_linear(x, y, tolerance=1e-4): """Optimize a pair of (x, y) arrays for the corresponding spline definition. This loops over the input arrays and removes unnecessary data points to minimize the length of the arrays necessary to the spline definition. Args ---- x : array The input x-array. y : array The input y-array. tolerance : float The maximum relative difference between the generic yi value and the\ estrapolation of the two previous optimized data points for the point\ i to be removed. """ assert (len(x) == len(y)) logger.info('Optimizing grid with %d starting points...' % len(x)) # Start a new series with the first two points of the input arrays. _x = [x[0], x[1]] _y = [y[0], y[1]] # Loop over the points 3 ... (N - 1). for i, (_xi, _yi) in enumerate(zip(x, y)[2:-1]): # Extrapolate the last two points of the new series to xi and # see how far we are from the actual yi. delta = interpolate(_x[-2], _y[-2], _x[-1], _y[-1], _xi) - _yi if abs(delta / _yi) > tolerance: # If the difference is larger than the tolerance, add a point. # (This has the drawback that we tend to add pairs of point at # each change of slope.) _x.append(_xi) _y.append(_yi) # Interpolate the points last and (last - 2) to (last - 1). delta = interpolate(_x[-3], _y[-3], _x[-1], _y[-1], _x[-2]) - _y[-2] if abs(delta / _y[-2]) < tolerance: # If the penultimate point was not necessary, remove it. _x.remove(_x[-2]) _y.remove(_y[-2]) # Append the last point of the original array to the list. _x.append(x[-1]) _y.append(y[-1]) _x, _y = numpy.array(_x), numpy.array(_y) logger.info('Done, %d points remaining.' % len(_x)) return _x, _y
def mkdir(dir_path): """Create a directory (unless it already exists). Return 0 upon succesfull operation, 1 otherwise. """ if not os.path.exists(dir_path): logger.info('About to create folder %s...' % dir_path) try: os.makedirs(dir_path) logger.info('Folder succesfully created.') status = 0 except Exception as e: logger.error('Could not create folder (%s)' % e) status = 1 return status
def xpselect(file_path, **kwargs): """Application for data subselection. We want to (loosely) model this on http://fermi.gsfc.nasa.gov/ssc/data/analysis/scitools/help/gtselect.txt """ assert(file_path.endswith('.fits')) event_select = xEventSelect(file_path, **kwargs) outfile = event_select.get('outfile') if os.path.exists(outfile) and not event_select.get('clobber'): logger.info('Output file %s already exists.' % outfile) logger.info('Remove the file or set "clobber = True" to overwite it.') else: event_select.select() return outfile
def distsrc(): """ Create a plain source distribution. """ tag, buildDate = version_info() logger.info('Creating plain source distribution...') distDir = os.path.join(XIMPOL_DIST, 'src') srcLogFilePath = 'src.log' # Create the distribution. cmd('python setup.py sdist --dist-dir=%s --prune' % distDir, verbose=False, logFilePath=srcLogFilePath) # Cleanup. rm(srcLogFilePath) rm(os.path.join(XIMPOL_ROOT, 'MANIFEST')) logger.info('Done.')
def xpbin(file_path, **kwargs): """Application to bin the data. We want to (loosely) model this on http://fermi.gsfc.nasa.gov/ssc/data/analysis/scitools/help/gtbin.txt """ assert (file_path.endswith('.fits')) event_binning = BIN_ALG_DICT[kwargs['algorithm']](file_path, **kwargs) outfile = event_binning.get('outfile') if os.path.exists(outfile) and not event_binning.get('clobber'): logger.info('Output file %s already exists.' % outfile) logger.info('Remove the file or set "clobber = True" to overwite it.') else: event_binning.bin_() return outfile
def write_fits(self, file_path, simulation_info): """Write the event list and associated ancillary information to file. Arguments --------- file_path : str The path to the output file. simulation_info : A generic container with all the relevant information about the simulation. Warning ------- The information about the detector and telescope should be in the primary header of the IRF tables, and that's where we should be retrieving it from. (See issue #49.) """ primary_hdu = xPrimaryHDU() roi_model = simulation_info.roi_model irf_name = simulation_info.irf_name ebounds_header = simulation_info.edisp.hdu_list['EBOUNDS'].header gti_list = simulation_info.gti_list keywords = [('ROIRA', roi_model.ra, 'right ascension of the ROI center'), ('ROIDEC', roi_model.dec, 'declination of the ROI center'), ('EQUINOX', 2000., 'equinox for RA and DEC'), ('IRFNAME', irf_name, 'name of the IRFs used for the MC'), ('TELESCOP', ebounds_header['TELESCOP']), ('INSTRUME', ebounds_header['INSTRUME']), ('DETNAM', ebounds_header['DETNAM']), ('DETCHANS', ebounds_header['DETCHANS'])] primary_hdu.setup_header(keywords) data = [self[name] for name in\ xBinTableHDUMonteCarloEvents.spec_names()] event_hdu = xBinTableHDUMonteCarloEvents(data) _start = numpy.array([gti[0] for gti in gti_list]) _stop = numpy.array([gti[1] for gti in gti_list]) gti_hdu = xBinTableHDUGTI([_start, _stop]) _src_id = numpy.array([src.identifier for src in roi_model.values()]) _src_name = numpy.array([src.name for src in roi_model.values()]) roi_hdu = xBinTableHDURoiTable([_src_id, _src_name]) hdu_list = fits.HDUList([primary_hdu, event_hdu, gti_hdu, roi_hdu]) hdu_list.info() hdu_list.writeto(file_path, clobber=True) logger.info('Event list written to %s...' % file_path)
def bin_(self): """Overloaded method. """ from ximpol.irf import load_mrf from ximpol.irf.mrf import mdp99 modf = load_mrf(self.event_file.irf_name()) evt_header = self.event_file.hdu_list['PRIMARY'].header if self.get('mc'): energy = self.event_data['MC_ENERGY'] else: energy = self.event_data['ENERGY'] phi = self.event_data['PE_ANGLE'] phi_hist, xedges, yedges = numpy.histogram2d(energy, phi, bins=self.make_binning()) primary_hdu = self.build_primary_hdu() emin, emax = xedges[:-1], xedges[1:] emean = [] effmu = [] ncounts = [] mdp = [] for _emin, _emax in zip(emin, emax): _mask = (energy > _emin) * (energy < _emax) _energy = energy[_mask] _emean = numpy.mean(_energy) _effmu = modf.weighted_average(_energy) _ncounts = len(_energy) _mdp = mdp99(_effmu, _ncounts) emean.append(_emean) effmu.append(_effmu) ncounts.append(_ncounts) mdp.append(_mdp) data = [emin, emax, emean, effmu, ncounts, mdp, phi_hist] xBinTableHDUMCUBE.set_phi_spec(self.get('phibins')) mcube_hdu = xBinTableHDUMCUBE(data) mcube_hdu.setup_header(self.event_file.primary_keywords()) gti_hdu = self.event_file.hdu_list['GTI'] hdu_list = fits.HDUList([primary_hdu, mcube_hdu, gti_hdu]) hdu_list.info() logger.info('Writing binned MCUBE data to %s...' % self.get('outfile')) hdu_list.writeto(self.get('outfile'), clobber=True) logger.info('Done.')
def bin_(self): """Overloaded method. """ evt_header = self.event_file.hdu_list['PRIMARY'].header counts, edges = numpy.histogram(self.event_data['PHASE'], bins=self.make_binning()) primary_hdu = self.build_primary_hdu() data = [ self.bin_centers(edges), self.bin_widths(edges), counts, numpy.sqrt(counts) ] rate_hdu = xBinTableHDUPHASG(data) rate_hdu.setup_header(self.event_file.primary_keywords()) gti_hdu = self.event_file.hdu_list['GTI'] hdu_list = fits.HDUList([primary_hdu, rate_hdu, gti_hdu]) hdu_list.info() logger.info('Writing binned PHASG data to %s...' % self.get('outfile')) hdu_list.writeto(self.get('outfile'), clobber=True) logger.info('Done.')
def __init__(self, x, y, xname=None, xunits=None, yname=None, yunits=None, optimize=False, tolerance=1e-4): """ Constructor. """ if optimize: oldx, oldy = x, y x, y = optimize_grid_linear(x, y, tolerance) xInterpolatedUnivariateSpline.__init__(self, x, y, None, [None, None], 1, xname, xunits, yname, yunits) if optimize: dist = self.dist(oldx, oldy) logger.info('Relative (max/ave) dist. to original array: %e/%e' %\ (dist.max(), dist.sum()/len(dist)))
def cp(source, dest, create_tree=False): """Copy a file. Return 0 upon succesfull operation, 1 otherwise. """ logger.info('About to copy %s to %s...' % (source, dest)) destFolder = os.path.dirname(dest) if not os.path.exists(destFolder) and createTree: mkdir(destFolder) try: if os.path.isdir(source): shutil.copytree(source, dest) else: shutil.copy(source, dest) logger.info('File succesfully copied.') status = 0 except Exception as e: logger.error('Could not copy file (%s)' % e) status = 1 return status
def make_psf(irf_name): """Write the XIPE PSF parameters. """ logger.info('Creating XIPE effective area fits file...') output_file_name = '%s.psf' % irf_name output_file_path = os.path.join(XIMPOL_IRF, 'fits', output_file_name) if os.path.exists(output_file_path): rm(output_file_path) logger.info('Creating PRIMARY HDU...') primary_hdu = xPrimaryHDU('ximpol', XIPE_KEYWORDS, XIPE_COMMENTS) print(repr(primary_hdu.header)) logger.info('Creating PSF HDU...') data = PSF_PARAMETERS psf_hdu = xBinTableHDUPSF(data, [], XIPE_COMMENTS) print(repr(psf_hdu.header)) logger.info('Writing output file %s...' % output_file_path) hdulist = fits.HDUList([primary_hdu, psf_hdu]) hdulist.info() hdulist.writeto(output_file_path) logger.info('Done.')
def save_current_figure(file_name, folder=XIMPOL_DOC_FIGURES, clear=True, show=False): """Save the current matplotlib figure in `XIMPOL_DOC_FIGURES`. Arguments --------- file_name : string The name of the output file. clear : bool If `True`, the current image is cleared after the fact. """ file_path = os.path.join(folder, file_name) logger.info('Saving current figure to %s...' % file_path) pyplot.savefig(file_path, transparent=True) if show: pyplot.show() if clear: pyplot.clf()