def compute_sim_flex(): """ compute the difference between sim flex temp and set point input: none, but read data from achieve output: <msid>_data.fits/<msid>_short_data.fits/<msid>_week_data.fits """ # #--- set a couple of values/lists # group = 'Compsimoffset' msid_list = ['flexadif', 'flexbdif', 'flexcdif'] msid_sub = [['flexadif', '3faflaat', '3sflxast', '-'],\ ['flexbdif', '3faflbat', '3sflxbst', '-'],\ ['flexcdif', '3faflcat', '3sflxcst', '-']] mta_db = ecf.read_mta_database() for msid in msid_list: # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit_for_acis_power(msid, mta_db) # #--- update database # uds.run_update_with_ska(msid, group, msid_sub_list=msid_sub, glim=glim)
def compute_acis_power(): """ compute acis power from existing msid values and update database input: none, but read data from achieve output: <msid>_data.fits/<msid>_short_data.fits/<msid>_week_data.fits """ # #--- set a couple of values/list # group = 'Compacispwr' msid_list = ['1dppwra', '1dppwrb'] msid_sub = [['1dppwra', '1dp28avo', '1dpicacu', '*'], ['1dppwrb', '1dp28bvo', '1dpicbcu', '*']] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() for msid in msid_list: # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit_for_acis_power(msid, mta_db) # #--- update database # uds.update_week_database(msid, group, glim, msid_sub = msid_sub)
def run_update_with_ska(msid, group, msid_sub_list=[], glim=''): """ extract data from ska database and update the data for the msids in the msid_list input: msid --- a list of msids group --- the group of the msids msid_sub_list --- a list of lists of: [msid, msid_1, msid_2, operand] this is used to compute the first msid from following two msid values with operand (+/-/*) glim --- glim usually found in this function, but you can give it; default: '' output: <msid>_data.fits, <msid>_short_data,fits, <msid>_week_data.fits """ # #--- get basic information dict/list # [udict, ddict, mta_db, mta_cross] = ecf.get_basic_info_dict() # #--- set starting and stopping data period # test_fits = data_dir + group.capitalize() + '/' + msid + '_data.fits' if os.path.isfile(test_fits): tstart = ecf.find_the_last_entry_time(test_fits) ttemp = time.strftime("%Y:%j:00:00:00", time.gmtime()) tstop = Chandra.Time.DateTime(ttemp).secs - 86400.0 if tstop < tstart: exit(1) if len(msid_sub_list) != 0: [dtime, tdata] = compute_sub_msid(msid, msid_sub_list, tstart, tstop) else: out = fetch.MSID(msid, tstart, tstop) ok = ~out.bads dtime = out.times[ok] tdata = out.vals[ok] # #--- fetch occasionally adds -999.xxx to the output data of some msids; remove them (Jun 13, 2018) # tind = [(tdata > -999) | (tdata <= -1000)] dtime = dtime[tind] tdata = tdata[tind] # #--- get limit data table for the msid # if glim == '': try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 if msid in sp_limt_case_c: tchk = 1 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, tdata, glim)
def extract_hrcveto_data(): """ extract hrc veto data input: none output: fits file data related to grad and comp """ # #--- set basic information # group = 'Hrcveto' cols = ['TLEVART', 'VLEVART', 'SHEVART'] [udict, ddict, mta_db, mta_cross] = ecf.get_basic_info_dict() # #--- find the date to be filled # ctime = ecf.find_the_last_entry_time(testfits) start = Chandra.Time.DateTime(ctime).date today = time.strftime("%Y:%j:00:00:00", time.gmtime()) ctime = Chandra.Time.DateTime(today).secs - 43200.0 stop = Chandra.Time.DateTime(ctime).date print("Group: " + group + ': ' + str(start) + '<-->' + str(stop)) [xxx, tbdata] = uds.extract_data_arc5gl('hrc', '0', 'hrcss', start, stop) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for col in cols: # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # uds.update_database(msid, group, dtime, data, glim)
def get_data(msid, start, stop): """ create an interactive html page for a given msid input: msid --- msid oup --- group name start --- start time stop --- stop time output: ttime --- a list of time data tdata --- a list of data """ # start = ecf.check_time_format(start) # stop = ecf.check_time_format(stop) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() # #--- get limit data table for the msid # try: uck = udict[msid] if uck.lower() == 'k': tchk = 1 else: tchk = ecf.convert_unit_indicator(uchk) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- extract data from archive # chk = 0 try: out = fetch.MSID(msid, start, stop) tdata = out.vals ttime = out.times except: tdata = [] ttime = [] return [ttime, tdata]
def run_update_with_archive(msid_list, group, date_list, detector, level, filetype, tstart, tstop, sub=''): """ extract data using arc5gl and update the data for the msids in the msid_list input: msid_list --- the name of the list of msids group --- a group name date_list --- a list of date to be processed in the form of <yyyy>:<ddd> detector --- detector name level --- level filetype --- file name tstart --- starting time tstop --- stopping time sub --- subdetector name; defalut "" --- no sub detector output: <msid>_data.fits, <msid>_short_data,fits, <msid>_week_data.fits """ # #--- get basic information dict/list # [udict, ddict, mta_db, mta_cross] = ecf.get_basic_info_dict() # #--- extract data using arc5gl # [cols, tbdata] = extract_data_arc5gl(detector, level, filetype, tstart, tstop, sub='') # #--- get time data in the list form # dtime = list(tbdata.field('time')) for col in msid_list: # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def update_simsuppl_data(date=''): """ collect sim msids data input: date ---- the date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to sim """ # #--- read group names which need special treatment # sfile = house_keeping + 'msid_list_simactu_supple' data = mcf.read_data_file(sfile) cols = [] g_dir = {} for ent in data: atemp = re.split('\s+', ent) cols.append(atemp[0]) g_dir[atemp[0]] = atemp[1] # #--- get the basic information # [udict, ddict, mta_db, mta_cross] = ecf.get_basic_info_dict() # #--- find date to read the data # if date == '': date_list = ecf.create_date_list_to_yestaday(testfits) else: date_list = [date] for sday in date_list: sday = sday[:4] + '-' + sday[4:6] + '-' + sday[6:] print("Date: " + sday) start = sday + 'T00:00:00' stop = sday + 'T23:59:59' [xxx, tbdata] = extract_data_arc5gl('sim', '0', 'sim', start, stop) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(0, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, g_dir[msid], dtime, data, glim)
def update_grad_and_comp_data(date=''): """ collect grad and comp data for trending input: date ---- the data colletion end date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # sfile = house_keeping + 'mp_process_list' glist = ecf.read_file_data(sfile) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() # #--- find date to read the data # if date == '': yesterday = datetime.date.today() - datetime.timedelta(1) yesterday = str(yesterday).replace('-', '') date_list = find_the_last_entry_time(yesterday) else: date_list = [date] for day in date_list: # #--- find the names of the fits files of the day of the group # print "Date: " + str(day) for group in glist: print "Group: " + str(group) cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/mta*fits* > ' + zspace os.system(cmd) flist = ecf.read_file_data(zspace, remove=1) # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- read out the data for the full day # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(1, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] mc = re.search('ST_', col) if mc is not None: continue # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # update_database(msid, group, dtime, data, glim)
def update_eph_data_from_comm(date = ''): """ collect eph data for trending input: date ---- the data collection end date in yyyymmdd format. if not given, yesterday's date is used output: fits file data related to grad and comp """ # #--- read group names which need special treatment # #sfile = house_keeping + 'eph_list' #glist = mcf.read_data_file(sfile) glist = ['ephhk',] # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() # #--- find date to read the data # if date == '': yesterday = datetime.date.today() - datetime.timedelta(1) yesterday = str(yesterday).replace('-', '') date_list = create_date_list(yesterday) else: date_list = [date] error_message = '' for day in date_list: # #--- find the names of the fits files of the day of the group # dline = "Date: " + str(day) print(dline) for group in glist: print("Group: " + str(group)) cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/* > ' + zspace os.system(cmd) tlist = mcf.read_data_file(zspace, remove=1) flist = [] for ent in tlist: mc = re.search('_STephhk_static_eio0.fits', ent) if mc is not None: flist.append(ent) # #--- combined them # flen = len(flist) if flen == 0: continue elif flen == 1: cmd = 'cp ' + flist[0] + ' ./ztemp.fits' os.system(cmd) else: mcf.rm_files('ztemp.fits') mfo. appendFitsTable(flist[0], flist[1], 'ztemp.fits') if flen > 2: for k in range(2, flen): mfo. appendFitsTable('ztemp.fits', flist[k], 'out.fits') cmd = 'mv out.fits ztemp.fits' os.system(cmd) # #--- read out the data for the full day # [cols, tbdata] = ecf.read_fits_file('ztemp.fits') cmd = 'rm -f ztemp.fits out.fits' os.system(cmd) # #--- get time data in the list form # dtime = list(tbdata.field('time')) for k in range(1, len(cols)): # #--- select col name without ST_ (which is standard dev) # col = cols[k] mc = re.search('ST_', col) if mc is not None: continue mc = re.search('quality', col, re.IGNORECASE) if mc is not None: continue # #---- extract data in a list form # data = list(tbdata.field(col)) # #--- change col name to msid # msid = col.lower() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # wline = uds.update_database(msid, group.capitalize(), dtime, data, glim) if wline != "": error_message = error_message + dline + '\n' + wline # #--- if there are errors, sending error message # if error_message != "": error_message = 'MTA limit trend EPH got problems: \n' + error_message fo = open(zspace, 'w') fo.write(error_message) fo.close() cmd = 'cat ' + zspace + ' | mailx -s "Subject: EPH data update problem "' cmd = cmd + '*****@*****.**' os.system(cmd) mcf.rm_files(zspace)
def create_interactive_page(msid, group, mtype, start, stop, step): """ create an interactive html page for a given msid input: msid --- msid group --- group name mtype --- mid, mde, min, or max start --- start time stop --- stop time step --- bin size in seconds """ start = ecf.check_time_format(start) stop = ecf.check_time_format(stop) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() # #--- get limit data table for the msid # try: uck = udict[msid] if uck.lower() == 'k': tchk = 1 else: tchk = ecf.convert_unit_indicator(uchk) except: tchk = 0 glim = make_glim(msid) # #--- extract data from archive # chk = 0 try: [ttime, tdata] = rf.get_data(msid, start, stop) except: # #--- if no data in archive, try mta local database # try: [ttime, tdata] = get_mta_fits_data(msid, group, start, stop) # #--- if it is also failed, return the empty data set # except: chk = 1 ttime = [] tdata = [] # #--- only short_p can change step size (by setting "step") # if chk == 0: data_p = process_day_data(msid, ttime, tdata, glim, step=step) else: data_p = 'na' # #--- create interactive html page # create_html_page(msid, group, data_p, mtype, step)
def create_interactive_page(msid, group, start, stop, step): """ create an interactive html page for a given msid input: msid --- msid group --- group name start --- start time stop --- stop time step --- bin size in seconds """ start = ecf.check_time_format(start) stop = ecf.check_time_format(stop) # #--- create msid <---> unit dictionary # [udict, ddict] = ecf.read_unit_list() # #--- read mta database # mta_db = ecf.read_mta_database() # #--- read mta msid <---> sql msid conversion list # mta_cross = ecf.read_cross_check_table() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- extract data from archive # chk = 0 try: out = fetch.MSID(msid, start, stop) tdata = out.vals ttime = out.times except: # #--- if no data in archive, try mta local database # try: [ttime, tdata] = uds.get_mta_fits_data(msid, start, stop) # #--- if it is also failed, return the empty data set # except: chk = 1 # #--- only short_p can change step size (by setting "step") # if chk == 0: [week_p, short_p, long_p] = uds.process_day_data(msid, ttime, tdata, glim, step=step) # #--- try to find data from ska or mta local data base # try: fits_data = create_inter_fits(msid, short_p) # #--- for the case, the data is mta special cases # except: fits_data = 'na' else: fits_data = 'na' # #--- create interactive html page # create_html_page(msid, fits_data, step) # #--- remove fits file # if fits_data != 'na': cmd = 'rm -rf ' + fits_data os.system(cmd)
def tephin_leak_data_update(year=''): """ update tephin - ephin rate/leak current data input: year --- year of the data to be updated. if it is '', the current year is used output: <data_dir>/<msid>/<msid>_data_<year>.fits """ # #--- set data extraction period # tout = set_time_period(year) if len(tout) == 6: [ltstart, ltstop, lyear, tstart, tstop, year] = tout chk = 1 else: [tstart, tstop, year] = tout chk = 0 # #--- get the basic information # [udict, ddict, mta_db, mta_cross] = ecf.get_basic_info_dict() # #--- extract tephin data # tchk = ecf.convert_unit_indicator(udict['tephin']) glim = ecf.get_limit('tephin', tchk, mta_db, mta_cross) # #--- for the case the time span goes over the year boundary # if chk == 1: ltephin = update_database('tephin', 'Eleak', glim, ltstart, ltstop, lyear) tephin = update_database('tephin', 'Eleak', glim, tstart, tstop, year) # #--- read msid list # mfile = house_keeping + 'msid_list_eph_tephin' data = mcf.read_data_file(mfile) for ent in data: # #--- find msid and group name # mc = re.search('#', ent) if mc is not None: continue try: [msid, group] = re.split('\s+', ent) except: atemp = re.split('\s+', ent) msid = atemp[0] group = atemp[1] msid.strip() group.strip() # #--- get limit data table for the msid # try: tchk = ecf.convert_unit_indicator(udict[msid]) except: tchk = 0 glim = ecf.get_limit(msid, tchk, mta_db, mta_cross) # #--- update database # try: out = fetch.MSID(msid, '2017:001:00:00:00', '2017:002') print("MSID: " + msid) except: missed = house_keeping + '/missing_data' with open(missed, 'a') as fo: fo.write(msid + '\n') continue # #--- for the case, the time span goes over the year boundary # if chk == 1: update_database(msid, group, glim, ltstart, ltstop, lyear, sdata=ltephin) try: update_database(msid, group, glim, tstart, tstop, year, sdata=tephin) except: pass