def make_glim(msid): """ create limit list in glim format input: msid --- msid output: glim --- a list of list of [<start time> <stop time> <lower yellow> <upper yellow> <lower red> <upper red>] time is in seconds from 1998.1.1 """ msid = msid.lower() [limit_dict, cnd_dict] = rlt.get_limit_table() out = limit_dict[msid] glim = [] for ent in out: try: lim_list = ent[3]['none'] except: continue temp = [ ent[0], ent[1], lim_list[0], lim_list[1], lim_list[2], lim_list[3] ] glim.append(temp) if len(glim) == 0: glim = [[0, 3218831995, -9e6, 9e6, -9e6, 9e6]] return glim
def run_data_update(mtype, catg_dict): """ extract data for the specified limit category type input: mtype --- limit state type; m: multi state/n: no state catg_dict --- a dictionary of msid <---> cateogry output: updated data fits files """ [lim_dict, cnd_dict] = rlt.get_limit_table() # if mtype == 'm': # ifile = limit_dir + 'Limit_data/multi_switch_limit' # else: # ifile = limit_dir + 'Limit_data/trend_limit' ifile = limit_dir + 'Limit_data/op_limits_new.db' # #--- first find which msids are in that category, and extract data # data = mcf.read_data_file(ifile) for ent in data: if ent[0] == '#': continue atemp = re.split('\s+', ent) msid = atemp[0] catg = catg_dict[msid] # #--- just in a case the data category directory does not exist # cmd = 'mkdir -p ' + data_dir + atemp[1] os.system(cmd) print("MSID: " + catg + '/' + msid) # #--- three different data length # for dtype in ['week', 'short', 'long']: # #--- set data period # [dfile, start, stop] = find_data_collection_period(msid, catg, dtype) # #--- extract new data part; saved as a local fits file # alimit = lim_dict[msid] cnd_msid = cnd_dict[msid] out = extract_data_from_ska(msid, start, stop, dtype, alimit, cnd_msid) # #--- update the main fits file, either move the local file or append the new part # if out == True: update_data_file(dfile, msid, dtype)
def set_warning_area(msid, xmin, xmax, ymin, ymax, byear): """ create warning area for plotting input: msid --- msid xmin --- min x xmax --- max x ymin --- min y ymax --- max y byear --- the base year output: t_save --- a list of starting and stopping times in ydate bt_lim --- a list of bottom; usually 0, but can be ymin lr_lim --- a list of lower red limit ly_lim --- a list of lower yellow limit uy_lim --- a list of upper yellow limit ur_lim --- a list of upper red limit tp_lim --- a list of top: usually 9e10, but can be ymax """ msid = msid.lower() [limit_dict, cnd_dict] = rlt.get_limit_table() bval = 0. if bval > ymin: bval = ymin tval = 9e9 if tval < ymax: tval = ymax try: out = limit_dict[msid] cnd_msid = cnd_dict[msid] t_save = [] bt_lim = [] lr_lim = [] ly_lim = [] uy_lim = [] ur_lim = [] tp_lim = [] chk = 0 dlen = len(out) for ent in out: try: lim_list = ent[3]['none'] except: continue # x1 = chandratime_to_yday(ent[0], byear) x2 = chandratime_to_yday(ent[1] - 1.0, byear) if x2 < xmin: continue if x1 < xmin: x1 = xmin if x1 < xmax and x2 >= xmax: x2 = xmax chk = 1 t_save.append(x1) t_save.append(x2) for k in range(0, 2): bt_lim.append(bval) ly_lim.append(lim_list[0]) uy_lim.append(lim_list[1]) lr_lim.append(lim_list[2]) ur_lim.append(lim_list[3]) tp_lim.append(tval) if chk == 1: break except: t_save = [xmin, xmax] bt_lim = [-9e10, -9e10] lr_lim = [-9e10, -9e10] ly_lim = [-9e10, -9e10] uy_lim = [9e10, 9e10] ur_lim = [9e10, 9e10] tp_lim = [9e10, 9e10] return [t_save, bt_lim, lr_lim, ly_lim, uy_lim, ur_lim, tp_lim]
def run_for_msid_list(msid_list, dtype): """ extract data from ska database for a given msid_list input: misd_list --- the file name of the msid_list dtype --- data type , week, short, or long output: updated data fits files """ [lim_dict, cnd_dict] = rlt.get_limit_table() ifile = house_keeping + msid_list data = mcf.read_data_file(ifile) for ent in data: if ent[0] == '#': continue elif ent.strip() == '': continue atemp = re.split('\s+', ent) msid = atemp[0].strip() catg = atemp[1].strip() print("MSID: " + catg + '/' + msid) # #--- just in a case the data category directory does not exist # cmd = 'mkdir -p ' + data_dir + atemp[1] os.system(cmd) # #--- set data period # [dfile, start, stop] = find_data_collection_period(msid, catg, dtype) # #--- extract new data part; saved as a local fits file # try: alimit = lim_dict[msid] cnd_msid = cnd_dict[msid] # #--- if the collection time is larger than a month, extract data for 30 day chunk # diff = stop - start if diff > a_month: mcnt = int(diff / a_month) for m in range(0, mcnt): mstart = start + a_month * m mstop = mstart + a_month lstart = "%4.2f" % mcf.chandratime_to_fraq_year(mstart) lstop = "%4.2f" % mcf.chandratime_to_fraq_year(mstop) print("Computing: " + str(lstart) + '<-->' + str(lstop)) # #--- extract data and make a local fits file # out = extract_data_from_ska(msid, mstart, mstop, dtype, alimit, cnd_msid) # #--- update the main fits file, either move the local file or append the new part # if out == True: update_data_file(dfile, msid, dtype) out = extract_data_from_ska(msid, mstop, stop, dtype, alimit, cnd_msid) if out == True: update_data_file(dfile, msid, dtype) # #--- the data collection period is < 30 days # else: out = extract_data_from_ska(msid, start, stop, dtype, alimit, cnd_msid) if out == True: update_data_file(dfile, msid, dtype) except: #print(msid + ' is not in glimmon database') print(msid + ' is not in ska fetch database') continue
catg_dict[atemp[0]] = atemp[1] return catg_dict #-------------------------------------------------------------------------------- if __name__ == "__main__": if len(sys.argv) > 4: msid = sys.argv[1].strip() #--- msid start = float(sys.argv[3]) #--- start time in seconds from 1998.1.1 stop = float(sys.argv[4]) #--- stop time in seconds from 1998.1.1 dtype = sys.argv[3].strip() #--- week, short, long [lim_dict, cnd_dict] = rlt.get_limit_table() alimit = lim_dict[msid] cnd_msid = cnd_dict[msid] extract_data_from_ska(msid, start, stop, dtype, alimit, cnd_msid) elif len(sys.argv) == 3: msid_list = sys.argv[1].strip( ) #--- name of a file contain a msid list dtype = sys.argv[2].strip() #--- week, short, long run_for_msid_list(msid_list, dtype) else: run_glimmon_trend_data_update()