def computeLOG_SSDCslots(self, tmin, tmax): """ Given tmin e tmax it creates a daytime slot(tmax is included): Es: tmin 16/06/2018 tmax 20/06/2018 return 16/06/2018 17/06/2018 17/06/2018 18/06/2018 18/06/2018 19/06/2018 19/06/2018 20/06/2018 20/06/2018 21/06/2018 """ dt1 = datetime.timedelta(days=1) tmin = AstroUtils.time_mjd_to_fits(tmin) tmax = AstroUtils.time_mjd_to_fits(tmax) tmin = datetime.datetime.strptime(tmin, "%Y-%m-%dT%H:%M:%S.%f") tmax = datetime.datetime.strptime(tmax, "%Y-%m-%dT%H:%M:%S.%f") slots = [] while tmin <= tmax: slot = [tmin, tmin + dt1] tmin = tmin + dt1 slots.append(slot) return pd.DataFrame(slots, columns=["tmin", "tmax"])
def _setTime(confDict): if confDict["selection"]["timetype"] == "MJD": confDict["selection"]["tmax"] = AstroUtils.time_mjd_to_tt( confDict["selection"]["tmax"]) confDict["selection"]["tmin"] = AstroUtils.time_mjd_to_tt( confDict["selection"]["tmin"]) confDict["selection"]["timetype"] = "TT"
def load_and_plot(self, agile, fermi, tstart, tstop, path, lines=[], plotrate=False): """Main function, it loads and plots the data """ #---- Loading data ----- agile_data = pd.read_csv(agile, header=0, sep=" ") fermi_data = pd.read_csv(fermi, header=0, sep=" ") #---Converting times tstart_tt = AstroUtils.time_mjd_to_agile_seconds(tstart) tstop_tt = AstroUtils.time_mjd_to_agile_seconds(tstop) #---- Selecting data agile_data = agile_data[agile_data.tstart >= tstart_tt] agile_data = agile_data[agile_data.tstop <= tstop_tt] fermi_data = fermi_data[fermi_data.tstart >= tstart_tt] fermi_data = fermi_data[fermi_data.tstop <= tstop_tt] #------Plotting data f, (ax1, ax2) = plt.subplots(2, figsize=(12.18,10)) self.plot_offaxis(ax1, ax2, path, tstart, tstop, 60, 1, 0, lines) self.plot(ax2, agile_data, fermi_data, lines, plotrate) plt.show() f.savefig('merged_plot_'+str(tstart)+'_'+str(tstop)+'.'+str('pdf'), format="pdf")
def checkSignificance(self, fermi, tstart, tstop): fermi_data = pd.read_csv(fermi, header=0, sep=" ") ntrials = 0 nsig = 0 for time in range(int(tstart), int(tstop)): #print(time) tstart_tt = AstroUtils.time_mjd_to_agile_seconds(time) tstop_tt = AstroUtils.time_mjd_to_agile_seconds(time+1) fermi_data2 = fermi_data[fermi_data.tstart >= tstart_tt] fermi_data2 = fermi_data2[fermi_data.tstop <= tstop_tt] fermimean = fermi_data2["cts"].mean() fermistd = fermi_data2["cts"].std() n=0 for cts in fermi_data2["cts"]: ntrials = ntrials + 1 #print(time, time+1, cts, fermimean, fermistd, fermimean + 3 * fermistd, cts >= (fermimean + 3 * fermistd)) if cts >= (fermimean + 5 * fermistd): self.logger.info(self, "####") self.logger.info(self, f"{fermi_data2['tstart']}") #print(fermi_data2["tstart"][n]) nsig = nsig + 1 break n = n + 1 self.logger.info(self, f"ntrials {ntrials}") self.logger.info(self, f"nsig {nsig}")
def test_mjd_conversion(input_date, expected): assert AstroUtils.time_mjd_to_agile_seconds( input_date) == expected["agile_seconds"] assert AstroUtils.time_mjd_to_jd(input_date) == pytest.approx( expected["jd"], 0.00001) assert AstroUtils.time_mjd_to_unix(input_date) == expected["unix"] assert AstroUtils.time_mjd_to_fits(input_date) == expected["fits"] assert AstroUtils.time_mjd_to_iso(input_date) == expected["iso"]
def dataIsMissing(self, tmin, tmax, queryFilepath): """ This method can be extended to handle the case of partial missing data """ if not queryFilepath.exists(): self.logger.warning(self, f"Query file {queryFilepath} does not exists") return DataStatus.MISSING tminUtc = AstroUtils.time_mjd_to_fits(tmin) # YYYY-MM-DDTHH:mm:ss tmaxUtc = AstroUtils.time_mjd_to_fits(tmax) tminUtc = datetime.datetime.strptime(tminUtc, "%Y-%m-%dT%H:%M:%S.%f") tmaxUtc = datetime.datetime.strptime(tmaxUtc, "%Y-%m-%dT%H:%M:%S.%f") self.logger.debug(self, f"({tmin}, {tmax}) => ({tminUtc}, {tmaxUtc})") datesDF = pd.read_csv(queryFilepath, header=None, sep=" ", names=["ssdctmin", "ssdctmax"], parse_dates=["ssdctmin", "ssdctmax"]) #self.logger.debug(self, str(datesDF)) self.logger.debug(self, f"{tminUtc}, {tmaxUtc}") # check interval of tmin intervalIndexTmin = self.getInterval(datesDF, tminUtc) # if tmin is not included in any interval: if intervalIndexTmin == -1: self.logger.debug( self, f"tminUtc {tminUtc} not present in any interval!") return DataStatus.MISSING # check interval of tmax: intervalIndexTmax = self.getInterval(datesDF, tmaxUtc) # if tmax is not included in any interval: if intervalIndexTmax == -1: self.logger.debug( self, f"tmaxUtc {tmaxUtc} not present in any interval!") return DataStatus.MISSING self.logger.debug(self, f"intervalIndexTmin: {str(intervalIndexTmin)}") self.logger.debug(self, f"intervalIndexTmax: {str(intervalIndexTmax)}") # check if there's missing data between the 2 intervals if self.gotHole(datesDF, intervalIndexTmin, intervalIndexTmax): self.logger.debug(self, f"Missing data between the 2 intervals!") return DataStatus.MISSING return DataStatus.OK
def gridFiles(self, tmin, tmax): """ https://tools.ssdc.asi.it/AgileData/rest/GRIDFiles/2009-10-20T00:00:00/2009-11-10T00:00:00 The actual data being downloaded could correspond to a bigger interval than tmin and tmax: this is because the SSDC rest service uses the following conventions: * the EVT file always contains 15 days of data * the LOG file always contains 1 day of data * the mapping between tmin,tmax and the actual time span of the data being downloaded can be inferred from the next examples: * tmin=03/01/21 tmax=05/01/21 * 1 evt file: 01/01/21 to 15/01/21 * 3 log files: 03/01/21, 04/01/21, 05/01/21 * tmin=14/01/21 tmax=18/01/21 * 2 evt files: 01/01/21 to 15/01/21 and 15/01/21 to 31/01/21 * 5 log files: 14/01/21, 15/01/21, 16/01/21, 17/01/21, 18/01/21 """ tmin_utc = AstroUtils.time_mjd_to_fits(tmin) tmax_utc = AstroUtils.time_mjd_to_fits(tmax) api_url = f"https://tools.ssdc.asi.it/AgileData/rest/GRIDFiles/{tmin_utc}/{tmax_utc}" self.logger.info(self, f"Downloading data ({tmin},{tmax}) from {api_url}..") start = time() response = self.http.get(api_url, stream=True) outpath = f"/tmp/agile_{str(uuid.uuid4())}.tar.gz" with open(outpath, "wb") as f: # Writing chunks for large downloads for chunk in tqdm( response.iter_content(chunk_size=1024 * 1024 * 10)): f.write(chunk) end = time() - start outpath_size = os.stat(outpath).st_size self.logger.info( self, f"Took {end} seconds. Downloaded {outpath_size} bytes.") if not Path(outpath).is_file(): raise FileNotFoundError if outpath_size == 0: self.logger.warning(self, f"The downloaded data {outpath} is empty.") return outpath
def test_astro_utils_time_fits_to_mjd_2(self): sec_tol = 0.00000001 mjd = AstroUtils.time_fits_to_mjd("2020-01-23T10:56:53.000") assert abs(58871.45616898 - mjd) <= sec_tol
def test_astro_utils_time_utc_to_mjd(self): sec_tol = 0.00000001 mjd = AstroUtils.time_utc_to_mjd("2020-01-23T10:56:53") self.assertEqual(True, abs(58871.45616898 - mjd) <= sec_tol)
def test_astro_utils_time_agile_seconds_to_utc(self): sec_tol = 1 """ utc = AstroUtils.time_tt_to_utc(506861813) dt = datetime.strptime(utc, '%Y-%m-%dT%H:%M:%S.%f') assert dt.year == 2020 assert dt.month == 1 assert dt.day == 23 assert dt.hour == 10 assert dt.minute == 56 assert abs(53 - dt.second) <= sec_tol """ # This date would result in "0 days" sec_tolerance = 0.0000001 fitstime = AstroUtils.time_agile_seconds_to_fits(449582332) dt = datetime.strptime(fitstime, '%Y-%m-%dT%H:%M:%S.%f') assert dt.year == 2018 assert dt.month == 3 assert dt.day == 31 assert dt.hour == 11 assert dt.minute == 58 assert abs(52 - dt.second) <= sec_tol
def test_astro_utils_time_utc_to_tt(self): tol = 0.0001 tt = AstroUtils.time_utc_to_tt("2020-01-23T10:56:53") self.assertEqual(True, abs(506861813 - tt) <= tol)
def computeEVT_SSDCslots(self, tmin, tmax): """ Funzione che dato un tmin e tmax crea slot di 15 gg: Es: tmin 16/06/2018 tmax 25/09/2018 return 15/06/2018 30/06/2018 30/07/2018 15/07/2018 15/07/2018 31/07/2018 31/08/2018 15/08/2018 15/08/2018 31/08/2018 31/09/2018 15/09/2018 15/09/2018 30/09/2018 """ tmin = AstroUtils.time_mjd_to_fits(tmin) tmax = AstroUtils.time_mjd_to_fits(tmax) tmin = datetime.datetime.strptime(tmin, "%Y-%m-%dT%H:%M:%S.%f") tmax = datetime.datetime.strptime(tmax, "%Y-%m-%dT%H:%M:%S.%f") dt1 = datetime.timedelta(days=1) dt14 = datetime.timedelta(days=14) dt15 = datetime.timedelta(days=15) slots = [] while tmin <= tmax: #print("start tmin:",tmin) firstDayOfMonth = tmin.replace(day=1) lastDay = calendar.monthrange(tmin.year, tmin.month)[-1] lastDayOfMonth = datetime.date(tmin.year, tmin.month, lastDay) lastDayOfMonth = datetime.datetime.combine( lastDayOfMonth, datetime.datetime.min.time()) if tmin >= firstDayOfMonth and tmin <= firstDayOfMonth + dt14: slot = [firstDayOfMonth - dt1, firstDayOfMonth + dt14] tmin = firstDayOfMonth + dt15 elif tmin > firstDayOfMonth + dt14 and tmin <= lastDayOfMonth: slot = [firstDayOfMonth + dt14, lastDayOfMonth] tmin = lastDayOfMonth + dt1 slots.append(slot) return pd.DataFrame(slots, columns=["tmin", "tmax"])
def test_astro_utils_time_jd_to_civil(self): tol = 0.044 civ = AstroUtils.jd_to_civil(2458871.95616898) self.assertEqual(civ[0], 2020) self.assertEqual(civ[1], 1) self.assertEqual(True, abs(23 - civ[2]) <= tol)
def test_astro_utils_time_utc_to_tt(self): tol = 0.0001 agileseconds = AstroUtils.time_fits_to_agile_seconds( "2020-01-23T10:56:53.000") assert abs(506861813 - agileseconds) <= tol
def gridList(self, tmin, tmax): """ {'Response': {'message': None, 'statusCode': 'OK'} 'AgileFiles': [ {'filename': 'ag-182087934_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182087934_STD0P.LOG.gz'}, {'filename': 'ag-182174334_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182174334_STD0P.LOG.gz'}, {'filename': 'ag-182260734_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182260734_STD0P.LOG.gz'}, {'filename': 'ag-182347134_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182347134_STD0P.LOG.gz'}, {'filename': 'ag-182433534_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182433534_STD0P.LOG.gz'}, {'filename': 'ag-182519934_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182519934_STD0P.LOG.gz'}, {'filename': 'ag-182606334_STD0P.LOG.gz', 'absolutePath': 'std/0909301200_0910151200-86596/STD0P_LOG/ag-182606334_STD0P.LOG.gz'}, {'filename': 'ag0909301200_0910151200_STD0P_FM.EVT.gz', 'absolutePath': 'std/0909301200_0910151200-86596/ag0909301200_0910151200_STD0P_FM.EVT.gz'}, {'filename': 'ag-182692734_STD0P.LOG.gz', 'absolutePath': 'std/0910151200_0910311200-86597/STD0P_LOG/ag-182692734_STD0P.LOG.gz'}, {'filename': 'ag0910151200_0910311200_STD0P_FM.EVT.gz', 'absolutePath': 'std/0910151200_0910311200-86597/ag0910151200_0910311200_STD0P_FM.EVT.gz'} ] """ tmin_utc = AstroUtils.time_mjd_to_fits(tmin) tmax_utc = AstroUtils.time_mjd_to_fits(tmax) api_url = f"https://tools.ssdc.asi.it/AgileData/rest/GRIDList/{tmin_utc}/{tmax_utc}" self.logger.info( self, f"Downloading filelist to download ({tmin},{tmax}) ({tmin_utc}, {tmax_utc}) from {api_url}.." ) start = time() response = self.http.get(api_url) json_data = json.loads(response.text) end = time() - start self.logger.info(self, f"Took {end} seconds") if json_data["Response"]["statusCode"] != "OK": raise SSDCRestErrorDownload(json_data["Response"]["message"]) if json_data["Response"]["statusCode"] == "OK" and json_data[ "Response"]["message"] == "No data found.": raise SSDCRestErrorDownload(json_data["Response"]["message"]) return json_data["AgileFiles"]
def test_astro_utils_time_utc_to_mjd(self): tol = 0.00000001 dt = "2020-01-23T10:56:53.000" mjd = AstroUtils.time_fits_to_mjd(dt) assert abs(58871.45616898 - mjd) <= tol
def test_astro_utils_time_utc_to_mjd(self): tol = 0.00000001 dt = datetime.strptime("2020-01-23T10:56:53", '%Y-%m-%dT%H:%M:%S') mjd = AstroUtils.to_jd(dt, fmt="mjd") self.assertEqual(True, abs(58871.45616898 - mjd) <= tol)
def test_getInterval(self, logger, datacoveragepath): agdataset = AGDataset(logger, datacoveragepath) queryEVTPath = Path(__file__).absolute().parent.joinpath( "test_data", "getinterval_EVT.qfile") queryLOGPath = Path(__file__).absolute().parent.joinpath( "test_data", "getinterval_LOG.qfile") datesEVTDF = pd.read_csv(queryEVTPath, header=None, sep=" ", names=["ssdctmin", "ssdctmax"], parse_dates=["ssdctmin", "ssdctmax"]) datesLOGDF = pd.read_csv(queryLOGPath, header=None, sep=" ", names=["ssdctmin", "ssdctmax"], parse_dates=["ssdctmin", "ssdctmax"]) t = 58053 #2017-10-27T00:00:00.000 tfits = AstroUtils.time_mjd_to_fits(t) tfits = datetime.strptime(tfits, "%Y-%m-%dT%H:%M:%S.%f") intervalIndexEVT = agdataset.getInterval(datesEVTDF, tfits) intervalIndexLOG = agdataset.getInterval(datesLOGDF, tfits) assert intervalIndexEVT == 0 assert intervalIndexLOG == 2 t = 59003 #2020-06-03T00:00:00 tfits = AstroUtils.time_mjd_to_fits(t) tfits = datetime.strptime(tfits, "%Y-%m-%dT%H:%M:%S.%f") intervalIndexEVT = agdataset.getInterval(datesEVTDF, tfits) intervalIndexLOG = agdataset.getInterval(datesLOGDF, tfits) assert intervalIndexEVT == -1 assert intervalIndexLOG == -1
def test_astro_utils_time_tt_to_utc(self): sec_tol = 1 utc = AstroUtils.time_tt_to_utc(506861813) dt = datetime.strptime(utc, '%Y-%m-%dT%H:%M:%S') self.assertEqual(dt.year, 2020) self.assertEqual(dt.month, 1) self.assertEqual(dt.day, 23) self.assertEqual(dt.hour, 10) self.assertEqual(dt.minute, 56) self.assertEqual(True, abs(53 - dt.second) <= sec_tol)
def test_filterAP(self): print(self.datadir + "/E1q1_604800s_emin100_emax10000_r2.ap") print(self.currentDirPath) product = AstroUtils.AP_filter( self.datadir + "/E1q1_604800s_emin100_emax10000_r2.ap", 1, 174142800, 447490800, self.currentDirPath) with open(product, "r") as f: linesNumber = len(f.readlines()) assert 4 == linesNumber os.remove(os.path.join(self.currentDirPath, "result.txt")) os.remove(os.path.join(self.currentDirPath, product))
def test_astro_utils_time_mjd_to_fits(self): sec_tol = 1 fitstime = AstroUtils.time_mjd_to_fits(58871.45616898) dt = datetime.strptime(fitstime, '%Y-%m-%dT%H:%M:%S.%f') assert dt.year == 2020 assert dt.month == 1 assert dt.day == 23 assert dt.hour == 10 assert dt.minute == 56 assert abs(53 - dt.second) <= sec_tol
def test_setOptionTimeMJD(self): test_out_dir = self.set_outputfolder("test_setOptionTimeMJD") ag = AGAnalysis(self.agilepyConf) tmin1 = 58030.0 tmax1 = 58035.0 tmintt = AstroUtils.time_mjd_to_agile_seconds(tmin1) tmaxtt = AstroUtils.time_mjd_to_agile_seconds(tmax1) ag.setOptionTimeMJD(tmin=tmin1, tmax=tmax1) tmin2 = ag.getOption("tmin") tmax2 = ag.getOption("tmax") self.assertEqual(tmintt, tmin2) self.assertEqual(tmaxtt, tmax2) ag.destroy()
def _validateTimeInIndex(confDict): errors = {} if (confDict["input"]["userestapi"] == True): return errors (first, last) = Utils._getFirstAndLastLineInFile(confDict["input"]["evtfile"]) idxTmin = Utils._extractTimes(first)[0] idxTmax = Utils._extractTimes(last)[1] userTmin = confDict["selection"]["tmin"] userTmax = confDict["selection"]["tmax"] timetype = confDict["selection"]["timetype"] if timetype == "MJD": userTmin = AstroUtils.time_mjd_to_agile_seconds(userTmin) userTmax = AstroUtils.time_mjd_to_agile_seconds(userTmax) if float(userTmin) < float(idxTmin): errors["input/tmin"]="tmin: {} is outside the time range of {} (tmin < indexTmin). Index file time range: [{}, {}]" \ .format(userTmin, confDict["input"]["evtfile"], idxTmin, idxTmax) if float(userTmin) > float(idxTmax): errors["input/tmin"]="tmin: {} is outside the time range of {} (tmin > indexTmax). Index file time range: [{}, {}]" \ .format(userTmin, confDict["input"]["evtfile"], idxTmin, idxTmax) if float(userTmax) > float(idxTmax): errors["input/tmax"]="tmax: {} is outside the time range of {} (tmax > indexTmax). Index file time range: [{}, {}]" \ .format(userTmax, confDict["input"]["evtfile"], idxTmin, idxTmax) if float(userTmax) < float(idxTmin): errors["input/tmax"]="tmax: {} is outside the time range of {} (tmax < indexTmin). Index file time range: [{}, {}]" \ .format(userTmax, confDict["input"]["evtfile"], idxTmin, idxTmax) return errors
def setDistanceFromMapCenter(self, mapCenterL, mapCenterB): if self.multiAnalysis.multiDate["value"] is not None: sourceL = self.multiAnalysis.multiL["value"] sourceB = self.multiAnalysis.multiB["value"] if sourceL == -1: sourceL = self.multiAnalysis.multiStartL["value"] if sourceB == -1: sourceB = self.multiAnalysis.multiStartB["value"] else: sourceL = self.spatialModel.pos["value"][0] sourceB = self.spatialModel.pos["value"][1] self.spatialModel.dist["value"] = AstroUtils.distance( sourceL, sourceB, mapCenterL, mapCenterB)
def test_extract_data_log(self, logger, datacoveragepath): queryLOGPath = Path(__file__).absolute().parent.joinpath( "test_data", "test_extract_data_LOG.qfile") agdataset = AGDataset(logger, datacoveragepath=datacoveragepath) # Test - inside of multiple lines # ================= # ^ # ================= # ^ tmin = "2017-10-27T00:00:00" tmax = "2017-10-30T00:00:00" assert DataStatus.OK == agdataset.dataIsMissing( AstroUtils.time_fits_to_mjd(tmin), AstroUtils.time_fits_to_mjd(tmax), queryLOGPath) # Test - inside range # ================= # ^ ^ tmin = "2017-10-27T00:00:00" tmax = "2017-10-27T05:00:00" assert DataStatus.OK == agdataset.dataIsMissing( AstroUtils.time_fits_to_mjd(tmin), AstroUtils.time_fits_to_mjd(tmax), queryLOGPath) # Test - totally missing data # ================= # ^ ^ tmin = "2022-01-27T00:00:00" # 2015-01-10T00:00:00 tmax = "2022-01-30T00:00:00" # 2015-01-20T00:00:00 assert DataStatus.MISSING == agdataset.dataIsMissing( AstroUtils.time_fits_to_mjd(tmin), AstroUtils.time_fits_to_mjd(tmax), queryLOGPath) # Test - partial missing data on multiple lines # ================= # ^ # ================= # ^ tmin = "2018-02-09T00:00:00" tmax = "2018-02-25T00:00:00" assert DataStatus.MISSING == agdataset.dataIsMissing( AstroUtils.time_fits_to_mjd(tmin), AstroUtils.time_fits_to_mjd(tmax), queryLOGPath)
def getSourceDistance(self, source): mapCenterL = float(self.config.getOptionValue("glon")) mapCenterB = float(self.config.getOptionValue("glat")) if source.multi: sourceL = source.multi.get("multiL") sourceB = source.multi.get("multiB") if sourceL == -1: sourceL = source.multi.get("multiStartL") if sourceB == -1: sourceB = source.multi.get("multiStartB") else: pos = source.spatialModel.get("pos") sourceL = pos[0] sourceB = pos[1] self.logger.debug(self, "sourceL %f, sourceB %f, mapCenterL %f, mapCenterB %f", sourceL, sourceB, mapCenterL, mapCenterB) return AstroUtils.distance(sourceL, sourceB, mapCenterL, mapCenterB)
def test_astro_utils_time_agile_seconds_to_mjd(self): sec_tolerance = 0.0000001 mjd = AstroUtils.time_agile_seconds_to_mjd(507391426.9447) assert abs(58877.58595999 - mjd) <= sec_tolerance
def downloadData(self, tmin, tmax, dataPath, evtIndex, logIndex): """ It downloads EVT and LOG data that the user requires to perform a scientific analysis from tmin to tmax (in MJD format). If the data is already present on disk, the download will be skipped. The actual data being downloaded could correspond to a bigger interval than tmin and tmax: this is because the SSDC rest service. @param tmin: mjd @param tmax: mjd """ # print(tmax, self.coverage_tmax) if tmax > AstroUtils.time_fits_to_mjd(self.coverage_tmax): raise NoCoverageDataError("tmax exceeds AGILE data coverage") dataPath = Path(dataPath) evtPath = dataPath.joinpath("EVT") logPath = dataPath.joinpath("LOG") evtQfile = dataPath.joinpath("EVT.qfile") logQfile = dataPath.joinpath("LOG.qfile") evtDataMissing = False logDataMissing = False if self.dataIsMissing(tmin, tmax, evtQfile) == DataStatus.MISSING: self.logger.info( self, f"EVT data in interval {tmin} {tmax} is missing!") evtDataMissing = True else: self.logger.info(self, f"Local data for EVT already in dataset") if self.dataIsMissing(tmin, tmax, logQfile) == DataStatus.MISSING: self.logger.info( self, f"LOG data in interval {tmin} {tmax} is missing!") logDataMissing = True else: self.logger.info(self, f"Local data for LOG already in dataset") if evtDataMissing or logDataMissing: self.logger.info(self, f"Downloading data from ssdc..") _ = self.agrest.gridList(tmin, tmax) tarFilePath = self.agrest.gridFiles(tmin, tmax) self.logger.info(self, f"Extracting data from the tarball..") if evtDataMissing: extractedFiles = self.extractData("EVT", tarFilePath, dataPath) self.logger.debug(self, f"Extracted files: {extractedFiles}") self.updateQFile(evtQfile, tmin, tmax, evtQfile) self.generateIndex(evtPath, "EVT", evtIndex) if logDataMissing: extractedFiles = self.extractData("LOG", tarFilePath, dataPath) self.logger.debug(self, f"Extracted files: {extractedFiles}") self.updateQFile(logQfile, tmin, tmax, logQfile) self.generateIndex(logPath, "LOG", logIndex) if evtDataMissing or logDataMissing: os.remove(tarFilePath) return evtDataMissing or logDataMissing
def plot(self, ax, agile_data, fermi_data, arg_lines, plotrate): """It plots aperture photometry data """ #---AGILE---- tm = (AstroUtils.time_agile_seconds_to_mjd(agile_data["tstart"]) + AstroUtils.time_agile_seconds_to_mjd(agile_data["tstop"])) / 2 #agile_data.loc[agile_data['cts'] == 0, 'rateError'] = 0 #yerr = agile_data["rateError"]*1e8 if plotrate: yerr = agile_data["rateError"]*1e8 #print(agile_data["tstart"], agile_data["tstop"], agile_data["cts"], agile_data["exp"], agile_data["rate"]*1e8, agile_data["rateError"]*1e8) tw = tm - AstroUtils.time_agile_seconds_to_mjd(agile_data["tstart"]) ax.errorbar(tm, agile_data["rate"]*1e8, color="b", label="AGILE", fmt='.', yerr=yerr, xerr=tw, linewidth=0.8) self.logger.info(self, f"AGILE mean {agile_data['rate'].mean()*1e8}") self.logger.info(f"AGILE median {agile_data['rate'].median()*1e8}") self.logger.info(self, f"AGILE std {agile_data['rate'].std()*1e8}") agilemean = agile_data["rate"].median()*1e8 agilestd = agile_data["rate"].std()*1e8 else: yerr = agile_data["rateError"]*agile_data["exp"] #print(agile_data["tstart"], agile_data["tstop"], agile_data["cts"], agile_data["exp"], agile_data["rate"]*1e8, agile_data["rateError"]*1e8) tw = tm - AstroUtils.time_agile_seconds_to_mjd(agile_data["tstart"]) ax.errorbar(tm, agile_data["cts"], color="b", label="AGILE", fmt='.', yerr=yerr, xerr=tw, linewidth=0.8) self.logger.info(self, f"AGILE mean, {agile_data['cts'].mean()}") self.logger.info(self, f"AGILE median {agile_data['cts'].median()}") self.logger.info(self, f"AGILE std {agile_data['cts'].std()}") agilemean = agile_data["cts"].median() agilestd = agile_data["cts"].std() ax.axhline(agilemean, linestyle='solid', color='b', linewidth=0.5) ax.axhline(agilemean + 1 * agilestd, linestyle='dotted', color='b', linewidth=0.5) ax.axhline(agilemean + 2 * agilestd, linestyle='dashed', color='b', linewidth=0.5) ax.axhline(agilemean + 3 * agilestd, linestyle='dashdot', color='b', linewidth=1) #---Fermi---- tmFermi = (AstroUtils.time_agile_seconds_to_mjd(fermi_data["tstart"]) + AstroUtils.time_tt_to_mjd(fermi_data["tstop"])) / 2 #fermi_data.loc[fermi_data['cts'] == 0, 'rateError'] = 0 if plotrate: fermi_data.loc[fermi_data['rateError'] > 1000e-08, 'rateError'] = 0 #fermi_data.loc[fermi_data['rateError'] > 1000e-08, 'rate'] = 0 fermi_data.loc[fermi_data['rate'] > 10000e-08, 'rate'] = 0 yerrFermi = fermi_data["rateError"]*1e8 #print(fermi_data["tstart"], fermi_data["tstop"], fermi_data["rateError"], fermi_data["rate"]) twFermi = tmFermi - AstroUtils.time_agile_seconds_to_mjd(fermi_data["tstart"]) #ax.errorbar(fermi_data["Time_MJD"], fermi_data["count_rate_(cts/s)"]*1e8, color="r", label="FERMI", fmt="none", xerr=[fermi_data["Time_MJD"] - tstart,tstop - fermi_data["Time_MJD"]], yerr=fermi_yerr) ax.errorbar(tmFermi, fermi_data["rate"]*1e8, color="r", label="FERMI", fmt="none", yerr=yerrFermi, xerr=twFermi, linewidth=0.8) self.logger.info(self, f"Fermi mean {fermi_data['rate'].mean()*1e8}") self.logger.info(self, f"Fermi median {fermi_data['rate'].median()*1e8}") self.logger.info(self, f"Fermi std {fermi_data['rate'].std()*1e8}") fermimean = fermi_data["rate"].median()*1e8 fermistd = fermi_data["rate"].std()*1e8 else: #fermi_data.loc[fermi_data['rateError'] > 1000e-08, 'rateError'] = 0 #fermi_data.loc[fermi_data['rateError'] > 1000e-08, 'rate'] = 0 yerrFermi = fermi_data["rateError"]*fermi_data["exp"] #print(fermi_data["tstart"], fermi_data["tstop"], fermi_data["rateError"], fermi_data["rate"]) twFermi = tmFermi - AstroUtils.time_agile_seconds_to_mjd(fermi_data["tstart"]) #ax.errorbar(fermi_data["Time_MJD"], fermi_data["count_rate_(cts/s)"]*1e8, color="r", label="FERMI", fmt="none", xerr=[fermi_data["Time_MJD"] - tstart,tstop - fermi_data["Time_MJD"]], yerr=fermi_yerr) ax.errorbar(tmFermi, fermi_data["cts"], color="r", label="FERMI", fmt="none", yerr=yerrFermi, xerr=twFermi, linewidth=0.8) self.logger.info(self, f"Fermi mean {fermi_data['cts'].mean()}") self.logger.info(self, f"Fermi median {fermi_data['cts'].median()}") self.logger.info(self, f"Fermi std' {fermi_data['cts'].std()}") fermimean = fermi_data["cts"].mean() fermistd = fermi_data["cts"].std() ax.axhline(fermimean, linestyle='solid', color='r', linewidth=0.5) ax.axhline(fermimean + 1 * fermistd, linestyle='dotted', color='r', linewidth=0.5) ax.axhline(fermimean + 2 * fermistd, linestyle='dashed', color='r', linewidth=0.5) ax.axhline(fermimean + 3 * fermistd, linestyle='dashdot', color='r', linewidth=1) time_diff = fermi_data["tstop"] - fermi_data["tstart"] self.logger.info(self, f"Total time in GTI(bottom plot) {time_diff.sum()}") ax.ticklabel_format(axis="x", useOffset=False) if plotrate: ax.set_ylabel('Photon counts') else: ax.set_ylabel('Rate') ax.set_xlabel("MJD") ax.legend(loc='upper right', shadow=True, fontsize='xx-small')
def test_astro_utils_time_agile_seconds_to_jd(self): jd = AstroUtils.time_agile_seconds_to_jd(449582332) assert jd == pytest.approx(2458208.99921296, 0.00001)