def combine_fits_files(flist, outname='ztemp.fits'): """ combine data of fits files in the given fits list input: flist --- a list of fits files output: outname --- a name of combined table data fits; defalut: ztemp.fits """ flen = len(flist) if flen == 1: cmd = 'cp ' + flist[0] + ' ' + outname os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], outname) if flen > 2: for k in range(2, flen): mfo.appendFitsTable(outname, flist[k], 'out.fits') cmd = 'mv out.fits ' + outname os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd)
def combine_fits(flist, outname): """ combine fits files in the list input: flist --- a list of fits file names outname --- a outputfits file name output: outname --- a combined fits file """ mcf.rm_files(outname) cmd = 'mv ' + flist[0] + ' ' + outname os.system(cmd) for k in range(1, len(flist)): try: mfits.appendFitsTable(outname, flist[k], 'temp.fits') except: continue cmd = 'mv temp.fits ' + outname os.system(cmd) cmd = 'rm -f ' + flist[k] os.system(cmd) cmd = 'rm -rf *fits.gz' os.system(cmd) return outname
def update_grad_data(): for k in range(0, len(catg_list)): catg = catg_list[k] print(catg) mlist = g_msid_list[k] cmd = ' mkdir -p ./Out/' + catg os.system(cmd) for year in range(1999, 2020): start = str(year) + ':001:00:00:00' stop = str(year + 1) + ':001:00:00:00' tstart = Chandra.Time.DateTime(start).secs tstop = Chandra.Time.DateTime(stop).secs for msid in mlist: for dtype in ['long', 'short']: if dtype == 'short' and year < 2018: continue print(" Year: " + str(year) + " MSID: " + str(msid) + ' Dtype: ' + dtype) alimit = lim_dict[msid] fits = grad_dir + catg + '/' + msid + '_full_data_' + str( year) + '.fits.gz' out = extract_data_from_deposit(msid, fits, tstart, tstop, dtype, alimit) if out == False: print("Something went wrong for " + msid + ' in year: ' + str(year)) else: bfits = './Recover/' + catg + '/' + out if os.path.isfile(bfits): mfo.appendFitsTable(bfits, out, './temp.fits') cmd = 'mv -f ' + bfits + ' ' + bfits + '~' os.system(cmd) cmd = 'mv ./temp.fits ' + bfits os.system(cmd) os.system("rm -rf " + out) else: cmd = 'mv ' + out + ' ./Recover/' + catg + '/' + out os.system(cmd)
def combine_fits(flist): outname = 'comb_data.fits' cmd = 'mv -f ' + flist[0] + ' ' + outname os.system(cmd) for k in range(1, len(flist)): try: mfits.appendFitsTable(outname, flist[k], 'temp.fits') except: continue cmd = 'mv temp.fits ' + outname os.system(cmd) cmd = 'rm -f ' + flist[k] os.system(cmd) cmd = 'rm -rf *fits.gz' os.system(cmd) return outname
def update_data_file(dfile, msid, dtype): """ update data file input: dfile --- fits data file name msid --- msid dtype --- data type: week, short or long output: dfile --- updated fits data file """ # #--- the name of the fits file containing the new data section # if dtype == 'week': lfile = msid + '_week_data.fits' elif dtype == 'short': lfile = msid + '_short_data.fits' # #--- for the short time data, remove data older than 1.5 years #--- before appending the new data # if os.path.isfile(dfile): today = today_date_chandra() cut = today - 86400 * 548 remove_old_data_from_fits(dfile, cut) else: lfile = msid + '_data.fits' # #--- week data is just replaced, but others are appended if the past data exists # if (dtype != 'week') and os.path.isfile(dfile): mcf.rm_files('./ztemp.fits') mfo.appendFitsTable(dfile, lfile, './ztemp.fits') cmd = 'mv -f ./ztemp.fits ' + dfile os.system(cmd) mcf.rm_files(lfile) else: cmd = 'mv ' + lfile + ' ' + dfile os.system(cmd)
def extract_data_arc5gl(detector, level, filetype, tstart, tstop, sub=''): """ extract data using arc5gl input: detector --- detector name level --- level filetype --- file name tstart --- starting time tstop --- stopping time sub --- subdetector name; defalut "" --- no sub detector output: cols --- a list of col name tdata --- a list of arrays of data """ # #--- extract ephin hk lev 0 fits data # line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = ' + detector + '\n' if sub != '': line = line + 'subdetector = ' + sub + '\n' line = line + 'level = ' + level + '\n' line = line + 'filetype = ' + filetype + '\n' line = line + 'tstart = ' + str(tstart) + '\n' line = line + 'tstop = ' + str(tstop) + '\n' line = line + 'go\n' flist = mcf.run_arc5gl_process(line) if len(flist) < 1: print("\t\tNo data") return [[], []] # #--- combined them # flen = len(flist) if flen == 0: return [[], []] elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- return data # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) return [cols, tbdata]
def update_simdiag_data(date=''): """ collect sim diag msids input: date ---- the date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # #sfile = house_keeping + 'msid_list_simdiag' sfile = './msid_list_simsupple' data = mcf.read_data_file(sfile) cols = [] g_dir = {} for ent in data: atemp = re.split('\s+', ent) cols.append(atemp[0]) g_dir[atemp[0]] = atemp[1] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() day_list = [] for year in range(1999, 2021): cyear = str(year) for mon in range(1, 13): if year == 1999: if mon < 8: continue if year == 2020: if mon > 1: break cmon = str(mon) if mon < 10: cmon = '0' + cmon if mcf.is_leapyear(year): lday = mday_list2[mon - 1] else: lday = mday_list[mon - 1] for day in range(1, lday + 1): cday = str(day) if day < 10: cday = '0' + cday sday = cyear + '-' + cmon + '-' + cday day_list.append(sday) for sday in day_list: if sday == '2020-01-17': break print("Date: " + sday) start = sday + 'T00:00:00' stop = sday + 'T23:59:59' line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = sim\n' line = line + 'level = 0\n' line = line + 'filetype = sim\n' line = line + 'tstart = ' + start + '\n' line = line + 'tstop = ' + stop + '\n' line = line + 'go\n' flist = mcf.run_arc5gl_process(line) if len(flist) < 1: print("\t\tNo data") continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data for the full day # [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(0, len(cols)): # #---- extract data in a list form # col = cols[k] data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # tstart = convert_time_format(start) tstop = convert_time_format(stop) update_database(msid, g_dir[msid], dtime, data, glim, pstart=tstart, pstop=tstop)
def update_eph_data(date=''): """ collect grad and comp data for trending input: date ---- the date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # #sfile = 'eph_list' #glist = ecf.read_file_data(sfile) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = read_cross_check_table() day_list = [] for year in range(1999, 2018): #---- CHANGE CHANGE CHAGE!!!!! lyear = year for mon in range(1, 13): if year == 2016 and mon < 9: continue if year == 2017 and mon > 10: continue cmon = str(mon) if mon < 10: cmon = '0' + cmon nmon = mon + 1 if nmon > 12: nmon = 1 lyear += 1 clmon = str(nmon) if nmon < 10: clmon = '0' + clmon start = str(year) + '-' + cmon + '-01T00:00:00' stop = str(lyear) + '-' + clmon + '-01T00:00:00' print "Period: " + str(start) + "<--->" + str(stop) for group in glist: print "Group: " + group # #---CHANGE THE DETECTOR/FILETYPE BEFORE RUNNING IF IT IS DIFFERENT FROM EPHHK # line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=ephin\n' line = line + 'level=0\n' line = line + 'filetype=epheio\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" continue if len(flist) < 1: print "\t\tNo data" continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(1, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] mc = re.search('ST_', col) if mc is not None: continue mc = re.search('quality', col, re.IGNORECASE) if mc is not None: continue mc = re.search('mjf', col, re.IGNORECASE) if mc is not None: continue mc = re.search('gap', col, re.IGNORECASE) if mc is not None: continue mc = re.search('dataqual', col, re.IGNORECASE) if mc is not None: continue mc = re.search('tlm_fmt', col, re.IGNORECASE) if mc is not None: continue # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def update_grad_and_comp_data(date=''): """ collect grad and comp data for trending input: date ---- the data colletion end date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # sfile = house_keeping + 'mp_process_list' glist = ecf.read_file_data(sfile) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() # #--- find date to read the data # if date == '': yesterday = datetime.date.today() - datetime.timedelta(1) yesterday = str(yesterday).replace('-', '') date_list = find_the_last_entry_time(yesterday) else: date_list = [date] for day in date_list: # #--- find the names of the fits files of the day of the group # print "Date: " + str(day) for group in glist: print "Group: " + str(group) cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/mta*fits* > ' + zspace os.system(cmd) flist = ecf.read_file_data(zspace, remove=1) # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- read out the data for the full day # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(1, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] mc = re.search('ST_', col) if mc is not None: continue # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def update_grad_and_comp_data(date=''): """ collect grad and comp data for trending input: date ---- the date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # sfile = 'grad_special_list' glist = ecf.read_file_data(sfile) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = read_cross_check_table() day_list = [] for year in range(1999, 2019): cyear = str(year) for mon in range(1, 13): if year == 1999: if mon < 8: continue if year == 2018: if mon > 2: break cmon = str(mon) if mon < 10: cmon = '0' + cmon if tcnv.isLeapYear(year) == 1: lday = mday_list2[mon - 1] else: lday = mday_list[mon - 1] for day in range(1, lday + 1): cday = str(day) if day < 10: cday = '0' + cday sday = cyear + '-' + cmon + '-' + cday day_list.append(sday) for sday in day_list: print "Date: " + sday start = sday + 'T00:00:00' stop = sday + 'T23:59:59' for group in glist: print "Group: " + group line = 'operation=retrieve\n' line = line + 'dataset = mta\n' line = line + 'detector = grad\n' line = line + 'level = 0.5\n' line = line + 'filetype = ' + group + '\n' line = line + 'tstart = ' + start + '\n' line = line + 'tstop = ' + stop + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" continue if len(flist) < 1: print "\t\tNo data" continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data for the full day # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(1, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] mc = re.search('ST_', col) if mc is not None: continue # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def recover_hrcveto_data(): """ recover hrc veto data input: none output: fits file data related to grad and comp """ # #--- read group names which need special treatment # #sfile = 'eph_list' #glist = ecf.read_file_data(sfile) glist = ['Hrcveto'] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = read_cross_check_table() day_list = [] for year in range(1999, 2018): lyear = year cyear = str(year) for mon in range(1, 13): if year == 1999: if mon < 8: continue if year == 2017: if mon > 10: break cmon = str(mon) if mon < 10: cmon = '0' + cmon nmon = mon + 1 if nmon > 12: nmon = 1 lyear += 1 cnmon = str(nmon) if nmon < 10: cnmon = '0' + cnmon start = str(year) + '-' + cmon + '-01T00:00:00' stop = str(lyear) + '-' + cnmon + '-01T00:00:00' for group in glist: print "Group: " + group + ' : ' + str(start) + '<-->' + str( stop) line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = hrc\n' line = line + 'level = 0\n' line = line + 'filetype = hrcss\n' line = line + 'tstart = ' + start + '\n' line = line + 'tstop = ' + stop + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" continue if len(flist) < 1: print "\t\tNo data" continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data for the full day # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cols = ['TLEVART', 'VLEVART', 'SHEVART'] cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for col in cols: # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def update_simsuppl_data(date=''): """ collect sim diag msids input: date ---- the date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to simdiag """ # #--- read group names which need special treatment # sfile = house_keeping + 'msid_list_simactu_supple' data = ecf.read_file_data(sfile) cols = [] g_dir = {} for ent in data: atemp = re.split('\s+', ent) cols.append(atemp[0]) g_dir[atemp[0]] = atemp[1] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = read_cross_check_table() # #--- find date to read the data # if date == '': yesterday = datetime.date.today() - datetime.timedelta(1) yesterday = str(yesterday).replace('-', '') date_list = find_the_last_entry_time(yesterday) else: date_list = [date] for sday in date_list: print "Date: " + sday start = sday + 'T00:00:00' stop = sday + 'T23:59:59' line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = sim\n' line = line + 'level = 0\n' line = line + 'filetype = sim\n' line = line + 'tstart = ' + start + '\n' line = line + 'tstop = ' + stop + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" continue if len(flist) < 1: print "\t\tNo data" continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data for the full day # [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(0, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, g_dir[msid], dtime, data, glim)
def extract_data_arc5gl(detector, level, filetype, tstart, tstop, sub=''): """ extract data using arc5gl input: detector --- detector name level --- level filetype --- file name tstart --- starting time tstop --- stopping time sub --- subdetector name; defalut "" --- no sub detector output: cols --- a list of col name tdata --- a list of arrays of data """ # #--- extract ephin hk lev 0 fits data # line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = ' + detector + '\n' if sub != '': line = line + 'subdetector = ' + sub + '\n' line = line + 'level = ' + level + '\n' line = line + 'filetype = ' + filetype + '\n' line = line + 'tstart = ' + str(tstart) + '\n' line = line + 'tstop = ' + str(tstop) + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" #continue return [[], []] if len(flist) < 1: print "\t\tNo data" #continue return [[], []] # #--- combined them # flen = len(flist) if flen == 0: #continue return [[], []] elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- return data # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) return [cols, tbdata]
def update_comp_data(gname, msid_list, eyear, etime): """ update comp/grad related msid data input: gname --- group name msid_list --- a list of comp msids eyear --- the last data entry year etime --- today's date in seconds from 1998.1.1 output: <data_dir>/<gname>/<msid>_<dtye>_data.fits """ for msid in msid_list: # #--- set sub-directory depending on msids # mc = re.search('grad', gname.lower()) mc2 = re.search('comp', gname.lower()) if mc is not None: if mc2 is not None: sub_dir = comp_dir else: sub_dir = grad_dir else: sub_dir = comp_dir # #--- set limit data (a list of lists of limit values) # alimit = lim_dict[msid] for dtype in ['long', 'short', 'week']: if dtype == 'long': ofile = msid + '_data.fits' else: ofile = msid + '_' + dtype + '_data.fits' # #--- database file name # dfile = data_dir + gname + '/' + ofile # #--- find the last entry time # tstart = find_last_entry_time(dfile, dtype, etime) out = Chandra.Time.DateTime(tstart).date atemp = re.split(':', out) syear = int(float(atemp[0])) # #--- to cover the case that the data collection time goes over two year, go between syear and eyear # for year in range(syear, eyear + 1): print(" Year: " + str(year) + " MSID: " + str(msid) + ' Dtype: ' + dtype) # #--- set the input fits file name # fits = sub_dir + gname + '/' + msid + '_full_data_' + str( year) + '.fits' if not os.path.isfile(fits): fits = sub_dir + gname + '/' + msid + '_full_data_' + str( year) + '.fits.gz' if not os.path.isfile(fits): continue # #--- extract the data part needed and save in a fits file # out = extract_data_from_deposit(msid, fits, tstart, etime, dtype, alimit) if out == False: print("Something went wrong for " + msid + ' in year: ' + str(year)) continue else: # #--- append the new data part to the database # if os.path.isfile(dfile): mfo.appendFitsTable(dfile, out, './temp.fits') cmd = 'mv -f ' + dfile + ' ' + dfile + '~' os.system(cmd) try: cmd = 'mv ./temp.fits ' + dfile os.system(cmd) except: pass # #--- check the file is actually updated. if not put back the old one # if os.path.isfile(dfile): cmd = 'rm -rf ' + dfile + '~' os.system(cmd) else: cmd = 'mv -f ' + dfile + '~ ' + dfile os.system(cmd) os.system('rm -rf ' + out) # #--- for the short time data, remove data older than 1.5 years #--- for the week data, remove data older than 7 days # if dtype == 'short': cut = etime - 86400 * 548 remove_old_data_from_fits(dfile, cut) elif dtype == 'week': cut = etime - 86400 * 7 remove_old_data_from_fits(dfile, cut) else: cmd = 'mv ' + out + ' ' + dfile os.system(cmd)
def extract_hrcveto_data(): """ extract hrc veto data input: none output: fits file data related to grad and comp """ # #--- read group names which need special treatment # glist = ['Hrcveto'] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = read_cross_check_table() # #--- find the date to be filled # day_list = find_the_last_entry_time() for sday in day_list: print "Date: " + sday start = sday + 'T00:00:00' stop = sday + 'T23:59:59' for group in glist: print "Group: " + group line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = hrc\n' line = line + 'level = 0\n' line = line + 'filetype = hrcss\n' line = line + 'tstart = ' + start + '\n' line = line + 'tstop = ' + stop + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" continue if len(flist) < 1: print "\t\tNo data" continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data for the full day # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cols = ['TLEVART', 'VLEVART', 'SHEVART'] cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for col in cols: # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def update_simdiag_data(date=''): """ collect sim diag msids input: date ---- the date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # #sfile = house_keeping + 'msid_list_simdiag' sfile = './msid_list_simdiag' data = ecf.read_file_data(sfile) cols = [] g_dir = {} for ent in data: atemp = re.split('\s+', ent) cols.append(str(atemp[0])) g_dir[atemp[0]] = atemp[1] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = read_cross_check_table() day_list = [] for year in range(1999, 2018): cyear = str(year) for mon in range(1, 13): if year == 1999: if mon < 8: continue #if year == 2018: # if mon > 1: # break cmon = str(mon) if mon < 10: cmon = '0' + cmon if tcnv.isLeapYear(year) == 1: lday = mday_list2[mon - 1] else: lday = mday_list[mon - 1] for day in range(1, lday + 1): cday = str(day) if day < 10: cday = '0' + cday sday = cyear + '-' + cmon + '-' + cday day_list.append(sday) for sday in day_list: if sday == '2018-01-18': break print "Date: " + sday start = sday + 'T00:00:00' stop = sday + 'T23:59:59' line = 'operation=retrieve\n' line = line + 'dataset = flight\n' line = line + 'detector = sim\n' line = line + 'level = 0\n' line = line + 'filetype = simdiag\n' line = line + 'tstart = ' + start + '\n' line = line + 'tstop = ' + stop + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) except: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out' os.system(cmd) mcf.rm_file(zspace) # #--- find the names of the fits files of the day of the group # try: flist = ecf.read_file_data('ztemp_out', remove=1) flist = flist[1:] except: print "\t\tNo data" continue if len(flist) < 1: print "\t\tNo data" continue # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- remove indivisual fits files # cmd = 'chmod 777 *.fits.gz' os.system(cmd) for ent in flist: cmd = 'rm -rf ' + ent os.system(cmd) # #--- read out the data for the full day # [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(0, 1): # #--- select col name without ST_ (which is standard dev) # col = cols[0] # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # tstart = convert_time_format(start) tstop = convert_time_format(stop) update_database(msid, g_dir[msid], dtime, data, glim, pstart=tstart, pstop=tstop)