def completeTask(temp_dir, outdir, lname): """ mv a list to a proper location, and send out email if there is new obsid input: temp_dir: a dir where a file is created outdir: a dir where the files are kept, usually too_dir lname: a file name, e.g., too_list, ddt_list, new_obs_list, and obs_in_30days """ # #--- change the old file name to that with "~" # test = outdir + lname if mcf.chkFile(test) == 1: oname = lname + '~' cmd = 'mv ' + outdir + lname + ' ' + outdir + oname os.system(cmd) # #--- move the new one to the too_dir # test = temp_dir + lname if mcf.chkFile(test) == 1: cmd = 'mv ' + temp_dir + lname + ' ' + outdir + lname os.system(cmd) # #--- if the file is empty or does not exist, copy back the old one # test = outdir + lname if mcf.isFileEmpty(test) == 0: cmd = 'cp ' + outdir + oname + ' ' + outdir + lname os.system(cmd) # #--- check whether there are any new observations. if so, send out notification email # lname2 = outdir + lname oname2 = outdir + oname new_ent = tdfnc.comp_two_record_lists(lname2, oname2) # #--- only "unobserved" is truely new # cleaned_list = [] for ent in new_ent: atemp = re.split('\s+', ent) if atemp[3] == 'unobserved': cleaned_list.append(ent) if len(cleaned_list) > 0 and lname != 'obs_in_30days': atemp = re.split('_list', lname) tdfnc.send_email(atemp[0], cleaned_list)
def combine_image(fits1, fits2): """ combine two fits image files. input :fits1 fits2. a combined fits file is moved to fits2. """ chk = mtac.chkFile('./', fits2) #--- check the second fits file exist if chk == 0: cmd = 'mv ' + fits1 + ' ' + fits2 os.system(cmd) else: try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgcalc infile=' + fits1 + ' infile2=' + fits2 + ' outfile=mtemp.fits operation=add clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd = 'rm ' + fits1 os.system(cmd) # #--- rename the combined fits image to "fits2" # cmd = 'mv mtemp.fits ' + fits2 os.system(cmd) except: cmd = 'rm ' + fits1 os.system(cmd)
def update_record_file(cfile, lname): """ for each cron job, find the last updated time and the current file length (in line #) Input: cfile --- output file name <house_keeping>/Records/<machine>_<user> lname --- a list of cron jobs Output: cfile --- an updated recorded file in <house_keeping>/Records/ cname --- a list of the current file names ctime --- a list of the last updated time of each cron job csize --- a list of the current file length in line # of each cron record file """ cname = [] ctime = [] csize = [] fo = open(cfile, 'w') for ent in lname: file = '/home/' + user + '/Logs/' + ent chk = mcf.chkFile(file) if chk > 0: time = modification_date(file) fsize = file_length(file) cname.append(ent) ctime.append(time) csize.append(fsize) line = file + ' : '+ str(time) + ' : ' + str(fsize) + '\n' fo.write(line) fo.close() return [cname, ctime, csize]
def update_record_file(cfile, lname): """ for each cron job, find the last updated time and the current file length (in line #) Input: cfile --- output file name <house_keeping>/Records/<machine>_<user> lname --- a list of cron jobs Output: cfile --- an updated recorded file in <house_keeping>/Records/ cname --- a list of the current file names ctime --- a list of the last updated time of each cron job csize --- a list of the current file length in line # of each cron record file """ cname = [] ctime = [] csize = [] fo = open(cfile, 'w') for ent in lname: file = '/home/' + user + '/Logs/' + ent chk = mcf.chkFile(file) if chk > 0: time = modification_date(file) fsize = file_length(file) cname.append(ent) ctime.append(time) csize.append(fsize) line = file + ' : ' + str(time) + ' : ' + str(fsize) + '\n' fo.write(line) fo.close() return [cname, ctime, csize]
def check_and_create_dir(dir): """ check whether a directory exist, if not, create one Input: dir --- directory name Output: directory created if it was not there. """ chk = mcf.chkFile(dir) if chk == 0: cmd = 'mkdir ' + dir os.system(cmd)
def prep_test(): # #--- if this is a test case, check whether output file exists. If not, creaete it # for ent in (test_web_dir, test_data_dir, test_plot_dir, test_html_dir, test_stat_dir, test_ephin_dir, test_goes_dir, test_note_dir, test_intro_dir): chk = mcf.chkFile(ent) if chk == 0: cmd = 'mkdir ' + ent os.system(cmd) # #--- prepare for test # cmd = 'cp ' + house_keeping + 'Test_prep/test_date ' + test_web_dir + '.' os.system(cmd)
def check_cron_records(): """ driving script: reads cron job file and find newly recorded error message of each job Input: none but use cronjob listing for the <user> on <manchine> it also reads <house_keeping>/Records/<machine>_<user> for the past record the errors are read from /home/<user>/Logs/xxx.cron files Output: <house_keeping>/Records/<machine>_<user> --- updated <house_keeping>/Records/<machine>_<user>_error_list --- a list of the errors """ # #--- setup a record file name depending on the user and the machine # cfile = house_keeping + 'Records/' + machine + '_' + user chk = mcf.chkFile(cfile) # #--- if this is the first time, just create a record file # if chk == 0: # #--- crate a list of cron jobs # lname = extract_cron_file_name() # #--- find the last update time and the file size of the files in the list # [cname, ctime, csize] = update_record_file(cfile, lname) else: # #--- if there is the past record, read it # [pname, ptime, psize] = get_prev_data(cfile) # #--- move the previous record # cmd = 'mv ' + cfile + ' ' + cfile + '~' os.system(cmd) lname = extract_cron_file_name() [cname, ctime, csize] = update_record_file(cfile, lname) # #--- find error messages and create error list # compare_and_find(cname, ctime, csize, pname, ptime, psize)
def make_too_obs_list(): """ create a table of too obsids for corresponding proporsal numbers input: none output: <too_dir> + too_prop_obsid_list prop_list --- a list of proposal numbers """ return_prop_list = [] fo = open(outdir, 'w') for cycle in range(13, 100): file = uspp_dir + "cycle" + str(cycle) + "_toos.html" if mcf.chkFile(file) == 0: break [prop_list, prop_dict] = find_too_observations(file) obsid_status = find_obsid_status() for prop_num in prop_list: obsid_list = [] for obsid in prop_dict[prop_num]: try: status = obsid_status[obsid] except: status = '' if (status == 'archived') or (status == 'canceled'): continue else: obsid_list.append(obsid) if len(obsid_list) > 0: return_prop_list.append(prop_num) line = prop_num + '<>' + obsid_list[0] for k in range(1, len(obsid_list)): line = line + ':' + obsid_list[k] line = line + '\n' fo.write(line) else: continue fo.close() return return_prop_list
def generate_ephin_rate_plot(directory): """ create ephin rate plots Input: directory --- a directory where the data is kept and the plot will be created <directory>/ephin_rate --- ephin data file Ouput: <directory>/ephin_rate.png """ xname = 'Time (DOM)' yname = 'Count/Sec' file = directory + '/ephin_rate' chk = mcf.chkFile(file) if chk == 0: return "" f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() dom = [] p4 = [] e150 = [] e300 = [] e1300 = [] for ent in data: atemp = re.split('\s+', ent) if mcf.chkNumeric(atemp[0]) and mcf.chkNumeric(atemp[1]): dom.append(float(atemp[0])) p4.append(float(atemp[1]) / 300.0) e150.append(float(atemp[2]) / 300.0) e300.append(float(atemp[3]) / 300.0) e1300.append(float(atemp[4]) / 300.0) x_set_list = [dom, dom, dom, dom] y_set_list = [p4, e150, e300, e1300] yname_list = [yname, yname, yname, yname] title_list = ['P4', 'E150', 'E300', 'E1300'] outname = directory + '/ephin_rate.png' plot_multi_panel(x_set_list, y_set_list, xname, yname_list, title_list, outname)
def prep_output_dir(year, month): """ THIS IS NOT USED IN THIS SCRIPT (NOV 15, 2012) check whether required directories exist and if not create them input: year and month output: output directories """ lmon = str(int(month)) if int(month) < 10: lmon = '0' + lmon dir_name = data_dir + 'Data_' + str(year) + '_' + lmon # #--- check whether the directory is already created # chk = mtac.chkFile(dir_name) if chk == 0: cmd = 'mkdir ' + dir_name os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD0' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD1' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD2' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD3' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD4' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD5' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD6' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD7' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD8' os.system(cmd) cmd = 'mkdir ' + dir_name + '/CCD9' os.system(cmd) return dir_name
def combine_image(fits1, fits2): """ combine two fits image files. input :fits1 fits2. a combined fits file is moved to fits2. """ chk = mtac.chkFile('./', fits2) #--- check the second fits file exist if chk == 0: cmd = 'mv ' + fits1 + ' ' + fits2 os.system(cmd) else: cmd = 'dmimgcalc infile=' + fits1 + ' infile2=' + fits2 + ' outfile=mtemp.fits operation=add clobber=yes' os.system(cmd) cmd = 'rm ' + fits1 os.system(cmd) # #--- rename the combined fits image to "fits2" # cmd = 'mv mtemp.fits ' + fits2 os.system(cmd)
def compute_bias_data(): """ the calling function to extract bias data Input: None but read from local files (see find_today_data) Output: bias data (see extract_bias_data) """ chk = mcf.chkFile('./','Working_dir') if chk == 0: cmd = 'mkdir ./Working_dir' os.system(cmd) else: cmd = 'rm -rf ./Working_dir/*' os.system(cmd) # #--- find which data to use # today_data = cbd.find_today_data() # #--- extract bias reated data # cbd.extract_bias_data(today_data)
def extract_hist_data(file): ''' extracting acis hist data from fits file input: fits file name output: one cloumn histgram data ''' # #--- check whether the temp file exists. if so, remove it # cfile = exc_dir + 'zout' chk = mtac.chkFile(cfile) if chk > 0: cmd = 'rm ' + exc_dir + 'zout' os.system(cmd) # #--- extract data # cmd = 'dmlist "' + file + '[cols counts]" outfile = ' + exc_dir + 'zout opt=data' os.system(cmd) file = exc_dir + 'zout' f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + exc_dir + 'zout' os.system(cmd) hist_data = [] for ent in data: atemp = re.split('\s+|\t+', ent) if mtac.chkNumeric(atemp[0]): hist_data.append(float(atemp[1])) return hist_data
def extract_head_info(file): ''' extreact information about the data from the fits file input: fits file name output: head_info =[fep, ccd, node, pblock, tstart, tstop, expcount, date_obs, date_end] ''' # #--- check whether the temp file exists. if so, remove it # cfile = exc_dir + 'zout' chk = mcf.chkFile(cfile) if chk > 0: cmd = 'rm ' + exc_dir + 'zout' os.system(cmd) # #--- extract head information # header = pyfits.getheader(file, 1) fep = header['FEP_ID'] ccd = header['CCD_ID'] node = header['NODE_ID'] pblock = header['PBLOCK'] tstart = header['TSTART'] tstop = header['TSTOP'] expcount = header['EXPCOUNT'] date_obs = header['DATE-OBS'] date_end = header['DATE-END'] # #--- return the info # head_info = [ fep, ccd, node, pblock, tstart, tstop, expcount, date_obs, date_end ] return head_info
def accumulate_data(inlist, file): """ combine the data in the given period Input: inlist: a list of data directories to extract data file: a file name of the data Output: a list of combined data lst: [atime, assoft, asoft, amed, ahard, aharder, ahardest] """ atime = [] assoft = [] asoft = [] amed = [] ahard = [] aharder = [] ahardest = [] for dname in inlist: infile = dname + '/' + file chk = mcf.chkFile(infile) if chk == 0: infile = infile + '.gz' try: [time, ssoft, soft, med, hard, harder, hardest] = read_data_file(infile) atime = atime + time assoft = assoft + ssoft asoft = asoft + soft amed = amed + med ahard = ahard + hard aharder = aharder + harder ahardest = ahardest + hardest except: pass return [atime, assoft, asoft, amed, ahard, aharder, ahardest]
#-------------------------------------------------------------------------------------------------------------- if __name__ == "__main__": if len(sys.argv) == 2: if sys.argv[1] == 'test': #---- this is a test case comp_test = 'test' else: comp_test = 'real' else: comp_test = 'real' # #--- if this is a test case, check whether output file exists. If not, creaete it # if comp_test == 'test': chk = mcf.chkFile(test_web_dir) if chk == 0: cmd = 'mkdir ' + test_web_dir os.system(cmd) # #--- prepare for test # chk = mcf.chkFile(test_web_dir, "NOAO_data") if chk == 0: cmd = 'mkdir ' + test_web_dir + 'NOAO_data/' os.system(cmd) cmd = 'cp ' + house_keeping + 'NOAO_data/past_goes_list ' + test_web_dir + 'NOAO_data/.' os.system(cmd) # #--- now call the main function
def print_html_page(comp_test, in_year=1, in_mon=1): """ driving function to print all html pages for ACIS Dose Plots Input: comp_test --- test indicator. if it is "test", it will run the test version in_year/in_mon --- if in_year and in_mon are given, the file is created for that year/month, otherwise, the files are created in the current year/month Output: html pages in <web_dir> and <web_dir>/<mon_dir_name> (e.g. JAN2013) """ # #--- find today's date and convert them appropriately # if comp_test == 'test': bchk = 0 tday = 13 umon = 2 uyear = 2013 cmon = tcnv.changeMonthFormat(umon) cmon = cmon.upper() ldate = str(uyear) + '-' + str(umon) + '-' + str(tday) #-- update date else: # #--- find today's date # [uyear, umon, tday, hours, min, sec, weekday, yday, dst] = tcnv.currentTime() # #--- change month in digit into letters # cmon = tcnv.changeMonthFormat(umon) cmon = cmon.upper() ldate = str(uyear) + '-' + str(umon) + '-' + str(tday) #-- update date # #--- if year and month is given, create for that month. otherwise, create for this month # bchk = 0 if mcf.chkNumeric(in_year) and mcf.chkNumeric(in_mon): if in_year > 1900 and (in_mon > 0 and in_mon < 13): bchk = 1 if bchk > 0: uyear = in_year umon = in_mon cmon = tcnv.changeMonthFormat(umon) cmon = cmon.upper() mon_dir_name = cmon + str(uyear) # #--- check whether this monnth web page already opens or not # dname = web_dir + mon_dir_name chk = mcf.chkFile(dname) if chk > 0: if bchk == 0: # #-- create only when it is working for the current month # print_main_html(ldate, uyear, umon) print_month_html(mon_dir_name, ldate, uyear, umon) print_png_html(mon_dir_name, ldate, uyear, umon) # #--- change permission level and the owner of the files # cmd = 'chgrp mtagroup ' + web_dir + '/* ' + web_dir + '/*/*' os.system(cmd) cmd = 'chmod 755 ' + web_dir + '/* ' + web_dir + '/*/*' os.system(cmd)
def acis_sci_run_get_data(): """ this function is a driving fuction which extracts mit data and updates acis science run data and plots Input: none but read the data from mit site Output: updated data tables and plots in web_dir/Year<this_year> data_<year> --- this contains all data sub data sets are: te1_3_out te3_3_out te5_5_out cc2_3_out drop_<year> drop5x5_<year> high_error_<year> high_error5x5_<year> high_event_<year> high_event5x5_<year> there are a few other files, but they are mostly empty and ignored. plots: te3_3_out.png te5_5_out.png cc3_3_out.png """ # #---check whether "Working_dir" exists # chk = mcf.chkFile('./','Working_dir') if chk > 0: cmd = 'rm ./Working_dir/*' else: cmd = 'mkdir ./Working_dir' os.system(cmd) # #--- check current_dir exists; if this is the first of the year, analyze data in the last year's frame # chk_new = mcf.chkFile(web_dir, current_dir) # #---- get data from MIT # if chk_new == 0: last_year = int(year) -1 mit_data = get_mit_data(last_year) else: mit_data = get_mit_data(year) # #---- check whether mit_data goes over two years # ychk = checkYearChange(mit_data) #--- if ychk > 1, the mit_data contains year changes #--------------- #---- for the case that there is year change during this data #--------------- if ychk > 0: #---- previous year's data # last_year = year -1 name = web_dir + 'Year' + str(last_year) + '/data' + str(last_year) addToPastData(mit_data, name) lastYear_dir = 'Year' + str(last_year) + '/' separate_data(name, lastYear_dir) # #---- print a html page # asrf.acis_sci_run_print_html(web_dir, last_year, 12, 31, 'yes') # #---- make plots (only when the day this year's directory is created) # if chk_new == 0: plot_events(lastYear_dir) # #--- check whether there are any high events # chkHighEvent(last_year) # #---- now work on this year's data # if chk_new == 0: cmd = 'mkdir ' + web_dir + current_dir + '/' os.system(cmd) name = web_dir + 'Year' + str(year) + '/data' + str(year) fout = open(name, 'w') for ent in mit_data: atemp = re.split('\t+|\s+', ent) btemp = re.split(':', atemp[1]) val = float(btemp[0]) if val < 100: fout.write(ent) fout.write('\n') fout.close() asrf.removeDuplicated(name) # #--- separate data into each category # separate_data(name, current_dir) asrf.acis_sci_run_print_html(web_dir, year, month, day, 'no') # #--- plot data # plot_events(current_dir) # #--- check whether there are any high events # chkHighEvent(year) # #--- update long term data tables # longTermTable(year) # #--- plot long term trends # plot_events('Long_term/') # #--- update all_data file # update_all_data(mit_data, year) #--------- #--- no year change... business as usual #--------- else: name = web_dir + 'Year' + str(year) + '/data' + str(year) chk = mcf.chkFile('name') if chk == 0: fout = open(name, 'w') else: fout = open(name, 'a') for ent in mit_data: fout.write(ent) fout.write('\n') fout.close() # #--- remove duplicated lines # asrf.removeDuplicated(name) # #--- separate data into each category # separate_data(name, current_dir) # #--- update html pages # asrf.acis_sci_run_print_html(web_dir, year, month, day, 'yes') # #--- plot trends # plot_events(current_dir) # #--- check whether there are any high events # chkHighEvent(year) # #--- update long term data tables # longTermTable(year) # #--- plot long term trends # plot_events('Long_term/') # #--- update all_data file # update_all_data(mit_data, year)
def report_error(): """ read errors from <cup_usr_list>_error_list, sort it out, clean, and send out email Input: none but read from <cup_usr_list>_error_list Output: email sent out """ # #--- find the current time # [year, mon, day, hours, min, sec, weekday, yday, dst] = tcnv.currentTime("Local") # #--- create surfix for files which will be saved in Past_errors directory # smon = str(mon) if mon < 10: smon = '0' + smon sday = str(day) if day < 10: sday = '0' + sday tail = str(year) + smon + sday for tag in cpu_usr_list: efile = house_keeping + 'Records/' + tag + '_error_list' pfile = house_keeping + 'Records/Past_errors/' + tag + '_error_list_' + tail prev_line = '' chk = mcf.chkFile(efile) if chk > 0: # #--- read error messages from the file # f = open(efile, 'r') data = [line.strip() for line in f.readlines()] f.close() # #--- sort the data so that we can correct messages to each cron job together # data.sort() task_list = [] time_list = [] mssg_list = [] for ent in data: atemp = re.split(' : ', ent) task_list.append(atemp[0]) stime = int(atemp[1]) dtime = tcnv.axTimeMTA(stime) time_list.append(dtime) mssg_list.append(atemp[2]) # #--- write out cron job name # fo = open(zspace, 'w') cname = task_list[0] line = '\n\n' + cname + '\n____________________\n\n' fo.write(line) for i in range(0, len(mssg_list)): if task_list[i] != cname: cname = task_list[i] line = '\n\n' + cname + '\n____________________\n\n' fo.write(line) # #--- create each line. if it is exactly same as one line before, skip it # line = time_list[i] + ' : ' + mssg_list[i] + '\n' if line != prev_line: fo.write(line) prev_line = line fo.close() # #--- send email out # # cmd = 'cp ' + zspace + ' ' + '/data/mta/Script/Cron_check/Scripts/' + tag # os.system(cmd) send_mail(tag, email_list) # #--- move the error list to Past_errors directory # cmd = 'mv ' + efile + ' ' + pfile os.system(cmd)
def print_html_page(comp_test, in_year=1, in_mon=1): """ driving function to print all html pages for ACIS Dose Plots Input: comp_test --- test indicator. if it is "test", it will run the test version in_year/in_mon --- if in_year and in_mon are given, the file is created for that year/month, otherwise, the files are created in the current year/month Output: html pages in <web_dir> and <web_dir>/<mon_dir_name> (e.g. JAN2013) """ # #--- find today's date and convert them appropriately # if comp_test == 'test': bchk = 0 tday = 13; umon = 2; uyear = 2013; cmon = tcnv.changeMonthFormat(umon) cmon = cmon.upper() ldate = str(uyear) + '-' + str(umon) + '-' + str(tday) #-- update date else: # #--- find today's date # [uyear, umon, tday, hours, min, sec, weekday, yday, dst] = tcnv.currentTime() # #--- change month in digit into letters # cmon = tcnv.changeMonthFormat(umon) cmon = cmon.upper() ldate = str(uyear) + '-' + str(umon) + '-' + str(tday) #-- update date # #--- if year and month is given, create for that month. otherwise, create for this month # bchk = 0 if mcf.chkNumeric(in_year) and mcf.chkNumeric(in_mon): if in_year > 1900 and (in_mon >0 and in_mon < 13): bchk = 1 if bchk > 0: uyear = in_year umon = in_mon cmon = tcnv.changeMonthFormat(umon) cmon = cmon.upper() mon_dir_name = cmon + str(uyear); # #--- check whether this monnth web page already opens or not # dname = web_dir + mon_dir_name chk = mcf.chkFile(dname) if chk > 0: if bchk == 0: # #-- create only when it is working for the current month # print_main_html(ldate, uyear, umon); print_month_html(mon_dir_name, ldate, uyear, umon); print_png_html(mon_dir_name, ldate, uyear, umon); # #--- change permission level and the owner of the files # cmd = 'chgrp mtagroup ' + web_dir + '/* ' + web_dir + '/*/*' os.system(cmd) cmd = 'chmod 755 '+ web_dir + '/* ' + web_dir + '/*/*' os.system(cmd)
def get_mit_data(tyear): """ this function extracts data from the MIT web site and select out the newest part by comparing the data to the current data saved locally. Input: tyear --- year of the last save data, data will read the data from MIT web site and a locat data (data_<tyear>) Output: mit_data """ # #--- first find out the latest version of phase by reading main html page #--- here is the lnyx script to obtain web page data # if comp_test == 'test': last_phase = 71 first_phase = 71 elif comp_test == 'test2': last_phase = 70 first_phase = 70 else: phase_list = createPhaseList() plen = len(phase_list) if plen > 3: last_phase = phase_list[plen -1] first_phase = last_phase - 3 else: exit(1) # #--- extract data needed # new_data = getNewData(first_phase, last_phase) # #--- if there is no new data, stop the entire operation # if len(new_data) == 0: exit(1) # #--- read column names --- this is the name of columns we need to save # f = open(col_names, 'r') col_list = [line.strip() for line in f.readlines()] f.close() # #---extract specified column data # new_data_save = extractElements(new_data, col_list) # #---- read the past data # pname = web_dir + 'Year' + str(tyear) + '/data' + str(tyear) chk = mcf.chkFile(pname) # #--- if there is no data_<tyear> existed, create an empty file for convenience. # if chk == 0: fo = open(pname, 'w') fo.close() old_data = [] else: f = open(pname, 'r') old_data = [line.strip() for line in f.readlines()] f.close() # #--- adjust the last few entries of the old_data as they might be modified while new data come in # adjstPastData(old_data, new_data_save) # #--- clean up old and new data files just created (removing duplicate and sorting) # cleanup('./Working_dir/old_data', 1) cleanup('./Working_dir/zdata_out', 1) name2 = pname + '~' cmd = 'mv ./Working_dir/old_data ' + name2 os.system(cmd) # #--- read cleaned current mit data # f = open('./Working_dir/zdata_out', 'r') mit_data = [line.strip() for line in f.readlines()] f.close() return mit_data
def generate_count_rate_plot(directory): """ create count rate plots Input: directory --- the directory where data is located and the plot will be created <directory>/ccd<ccd> --- count rate data file Output: <directory>/acis_dose_ccd<ccd>.png <directory>/acis_dose_ccd_5_7.png """ xname = 'Time (DOM)' yname = 'Count/Sec' data1_x = [] data1_y = [] data2_x = [] data2_y = [] data3_x = [] data3_y = [] # #--- plot count rates for each ccd # for ccd in range(0, 10): file = directory + '/ccd' + str(ccd) chk = mcf.chkFile(file) if chk == 0: continue f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() xdata = [] ydata = [] for ent in data: atemp = re.split('\s+', ent) if mcf.chkNumeric(atemp[0]) and mcf.chkNumeric(atemp[1]): xdata.append(float(atemp[0])) # #--- normalized to cnts/sec # ydata.append(float(atemp[1]) / 300.0) title = 'ACIS Count Rate: CCD' + str(ccd) outname = directory + '/acis_dose_ccd' + str(ccd) + '.png' plot_panel(xdata, ydata, xname, yname, title, outname) # #--- save data for three panel plot # if ccd == 5: data1_x = xdata data1_y = ydata elif ccd == 6: data2_x = xdata data2_y = ydata elif ccd == 7: data3_x = xdata data3_y = ydata # #--- create three panel plot for ccd5, ccd6, and ccd7 # title1 = 'ACIS Count Rate: CCD5' title2 = 'ACIS Count Rate: CCD6' title3 = 'ACIS Count Rate: CCD7' outname = directory + '/acis_dose_ccd_5_7.png' x_set_list = [data1_x, data2_x, data3_x] y_set_list = [data1_y, data2_y, data3_y] yname_list = [yname, yname, yname] title_list = [title1, title2, title3] plot_multi_panel(x_set_list, y_set_list, xname, yname_list, title_list, outname)
def hrc_stowed_background(syear, smonth, eyear, emonth): """ controlling script to set up directory and run all script input: syear --- starting year smonth --- starting month eyear --- ending year emonth --- ending month output: evt0, evt1, ss0, and hk00 fits files corresponding to next_in_line condition """ # #--- update hrc gain list # rhp.update_gain_selection_file() # #--- run over the given time period # for year in range(syear, eyear+1): for month in range(1, 13): if year == syear and month < smonth: continue elif year == eyear and month > emonth: break # #--- start and stop time in mm/dd/yy,hh:mm:ss format # begin = hcf.conv_time_format(year, month) end = hcf.conv_time_format(year, month, next=1) # #--- start and stop time in seconds from 1998.1.1 # start = hcf.convertto1998sec(begin) stop = hcf.convertto1998sec(end) # #--- make saving directory # lmon = hcf.find_month(month) outdir = data_dir + str(year) + lmon + '/' if mcf.chkFile(outdir) == 0: cmd = 'mkdir ' + outdir os.system(cmd) for ent in comb_dirs: udir = outdir + ent if mcf.chkFile(udir) == 0: cmd = 'mkdir ' + udir os.system(cmd) # #--- now run the main script # #try: rnl.extract_next_in_line_data(begin, end, start, stop, outdir) #except: # print 'Year: ' + str(year) + ' Month: ' + str(month) + '--- The process failed. ' # cmd = 'mv tscpos_positive *period *fits ' + outdir + ' 2> /dev/null' # os.system(cmd) # cmd = 'gzip ' + outdir + '*fits ' + outdir + '*/*fits 2>/dev/null' # os.system(cmd) # continue # #--- move other files to appropriated directories # cmd = 'mv tscpos_positive ' + outdir os.system(cmd) #cmd = 'ls *period > ' + zspace cmd = 'ls * > ' + zspace os.system(cmd) data = hcf.read_file_data(zspace, remove=1) for ent in data: mc = re.search('period', ent) if mc is None: continue atemp = re.split('hrc_', ent) btemp = re.split('_', atemp[1]) dname = 'hrc_' + btemp[0] + '_' + btemp[1] mc = re.search('_hi', ent) if mc is not None: dname = dname + '_hi' cmd = 'mv ' + ent + ' ' + outdir + dname + '/' os.system(cmd) cmd = 'rm -f *fits ./Temp_dir/* 2> /dev/null' os.system(cmd) cmd = 'gzip -f ' + outdir + '*/*.fits 2> /dev/null' os.system(cmd) # #---- update stat tables # hnt.hrc_nil_table(year, month) hnt.status_bit_report(year, month) # #---- clean up stat files # clean_up_stat_lists()
def comp_stat(file, year, month, out): """ compute statistics for the hrc image and print out the result: input: hrc image file, year, month, output file name. """ chk = mcf.chkFile(file) #--- checking whether the file exists if chk > 0: # #--- to avoid getting min value from the outside of the frame edge of a CCD, set threshold # cmd = '/bin/nice -n15 dmimgthresh infile=' + file + ' outfile=zcut.fits cut="0:1.e10" value=0 clobber=yes' os.system(cmd) cmd = 'dmstat infile=zcut.fits centroid=no >' + zspace os.system(cmd) mcf.rm_file('./zcut.fits') f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() val = 'NA' for ent in data: ent.lstrip() m = re.search('mean', ent) if m is not None: atemp = re.split('\s+|\t', ent) val = atemp[1] break if val != 'NA': (mean, dev, min, max, min_pos_x, min_pos_y, max_pos_x, max_pos_y) = readStat(zspace) mcf.rm_file(zspace) (sig1, sig2, sig3) = find_two_sigma_value(file) else: (mean, dev, min, max, min_pos_x, min_pos_y, max_pos_x, max_pos_y) = ('NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA') (sig1, sig2, sig3) = ('NA', 'NA', 'NA') else: (mean, dev, min, max, min_pos_x, min_pos_y, max_pos_x, max_pos_y) = ('NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA') (sig1, sig2, sig3) = ('NA', 'NA', 'NA') # #--- print out the results # chk = mcf.chkFile(out) #--- checking whether the file exists if chk > 0: f = open(out, 'a') else: f = open(out, 'w') if mean == 'NA': line = '%d\t%d\t' % (year, month) f.write(line) f.write('NA\tNA\tNA\tNA\tNA\tNA\tNA\tNA\tNA\n') else: line = '%d\t%d\t' % (year, month) line = line + '%5.6f\t%5.6f\t%5.1f\t(%d,%d)\t' % (float( mean), float(dev), float(min), float(min_pos_x), float(min_pos_y)) line = line + '%5.1f\t(%d,%d)\t%5.1f\t%5.1f\t%5.1f\n' % ( float(max), float(max_pos_x), float(max_pos_y), float(sig1), float(sig2), float(sig3)) f.write(line) f.close()
def acis_sci_run_get_data(): """ this function is a driving fuction which extracts mit data and updates acis science run data and plots Input: none but read the data from mit site Output: updated data tables and plots in web_dir/Year<this_year> data_<year> --- this contains all data sub data sets are: te1_3_out te3_3_out te5_5_out cc2_3_out drop_<year> drop5x5_<year> high_error_<year> high_error5x5_<year> high_event_<year> high_event5x5_<year> there are a few other files, but they are mostly empty and ignored. plots: te3_3_out.png te5_5_out.png cc3_3_out.png """ # #---check whether "Working_dir" exists # chk = mcf.chkFile('./', 'Working_dir') if chk > 0: cmd = 'rm ./Working_dir/*' else: cmd = 'mkdir ./Working_dir' os.system(cmd) # #--- check current_dir exists; if this is the first of the year, analyze data in the last year's frame # chk_new = mcf.chkFile(web_dir, current_dir) # #---- get data from MIT # if chk_new == 0: last_year = int(year) - 1 mit_data = get_mit_data(last_year) else: mit_data = get_mit_data(year) # #---- check whether mit_data goes over two years # ychk = checkYearChange( mit_data) #--- if ychk > 1, the mit_data contains year changes #--------------- #---- for the case that there is year change during this data #--------------- if ychk > 0: #---- previous year's data # last_year = year - 1 name = web_dir + 'Year' + str(last_year) + '/data' + str(last_year) addToPastData(mit_data, name) lastYear_dir = 'Year' + str(last_year) + '/' separate_data(name, lastYear_dir) # #---- print a html page # asrf.acis_sci_run_print_html(web_dir, last_year, 12, 31, 'yes') # #---- make plots (only when the day this year's directory is created) # if chk_new == 0: plot_events(lastYear_dir) # #--- check whether there are any high events # chkHighEvent(last_year) # #---- now work on this year's data # if chk_new == 0: cmd = 'mkdir ' + web_dir + current_dir + '/' os.system(cmd) name = web_dir + 'Year' + str(year) + '/data' + str(year) fout = open(name, 'w') for ent in mit_data: atemp = re.split('\t+|\s+', ent) btemp = re.split(':', atemp[1]) val = float(btemp[0]) if val < 100: fout.write(ent) fout.write('\n') fout.close() asrf.removeDuplicated(name) # #--- separate data into each category # separate_data(name, current_dir) asrf.acis_sci_run_print_html(web_dir, year, month, day, 'no') # #--- plot data # plot_events(current_dir) # #--- check whether there are any high events # chkHighEvent(year) # #--- update long term data tables # longTermTable(year) # #--- plot long term trends # plot_events('Long_term/') # #--- update all_data file # update_all_data(mit_data, year) #--------- #--- no year change... business as usual #--------- else: name = web_dir + 'Year' + str(year) + '/data' + str(year) chk = mcf.chkFile('name') if chk == 0: fout = open(name, 'w') else: fout = open(name, 'a') for ent in mit_data: fout.write(ent) fout.write('\n') fout.close() # #--- remove duplicated lines # asrf.removeDuplicated(name) # #--- separate data into each category # separate_data(name, current_dir) # #--- update html pages # asrf.acis_sci_run_print_html(web_dir, year, month, day, 'yes') # #--- plot trends # plot_events(current_dir) # #--- check whether there are any high events # chkHighEvent(year) # #--- update long term data tables # longTermTable(year) # #--- plot long term trends # plot_events('Long_term/') # #--- update all_data file # update_all_data(mit_data, year)
def check_date(comp_test=''): """ check wether there is an output directory and if it is not, create one Input: comp_test --- if it is "test", the test data is used Output: uyear --- the current year umon --- the current month mon_name --- the current output direcotry (if it is not there, created) """ start_year = [] start_month = [] start_date = [] end_year = [] end_month = [] end_date = [] tot_ent = 1 if comp_test == 'test': # #--- test case, date is fixed # tyear = 2013 tmon = 2 tday = 13 uyear = tyear umon = tmon else: # #--- find today's date # [uyear, umon, uday, hours, min, sec, weekday, yday, dst] = tcnv.currentTime() tyear = uyear tmon = umon tday = uday end_year.append(tyear) end_month.append(tmon) end_date.append(tday) # #--- check 10 days ago # lday = tday - 10 lmon = tmon lyear = tyear if lday < 1: # #--- if 10 days ago is the last month, set starting time in the last month # tot_ent = 2 start_year.append(tyear) start_month.append(tmon) start_date.append(1) if tmon == 5 or tmon == 7 or tmon == 10 or tmon == 12: lday += 30 lmon = tmon - 1 end_year.append(tyear) end_month.append(lmon) end_date.append(30) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) elif tmon == 2 or tmon == 4 or tmon == 6 or tmon == 8 or tmon == 9 or tmon == 11: lday += 31 lmon = tmon - 1 end_year.append(tyear) end_month.append(lmon) end_date.append(31) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) elif tmon == 3: # #--- last month is in Feb # fday = 28 if tcnv.isLeapYear(tyear) > 0: fday = 29 lday += fday lmon = tmon -1 end_year.append(tyear) end_month.append(lmon) end_date.append(fday) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) elif tmon == 1: # #--- last month is the year before # lday += 31 lmon = 12 lyear = tyear -1 end_year.append(tyear) end_month.append(lmon) end_date.append(31) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) else: # #--- 10 days ago is in the same month # start_year.append(lyear) start_month.append(lmon) start_date.append(lday) # #--- reverse the list # start_year.reverse() start_month.reverse() start_date.reverse() end_year.reverse() end_month.reverse() end_date.reverse() # #--- start checking whether directory exists. if not create it # no = 0 for dmon in(start_month): cmonth = tcnv.changeMonthFormat(dmon) #--- convert digit to letter month ucmon = cmonth.upper() mon_name = web_dir + '/' + ucmon + str(start_year[no]) no += 1 chk = mcf.chkFile(mon_name) if chk == 0: cmd = 'mkdir ' + mon_name os.system(cmd) return (uyear, umon, mon_name)
def createGroupHtmlPage(): """ create html pages to display trend plots under each group input: none, but it will create plot lists from plot_dir output: html_dir/limit_trend.html and plot_dir/<gourp name>.html """ # #--- read group names # cmd = 'ls -d ' + plot_dir + '* >' + ztemp os.system(cmd) f = open(ztemp, 'r') dlist = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + ztemp os.system(cmd) # #--- create/update the top html page # out_name1 = html_dir + 'limit_trend.html' fo = open(out_name1, 'w') line = '<!DOCTYPE html>\n<html>\n' fo.write(line) line = '<head>\n<title>MTA Trending Page</title>\n' fo.write(line) line = "<meta http-equiv='Content-Type' content='text/html; charset=utf-8' />\n" fo.write(line) line = '<link rel="stylesheet" type="text/css" href="/mta/REPORTS/Template/mta_monthly.css" />\n\n' # line = '<link rel="stylesheet" type="text/css" href="/data/mta4/www/REPORTS/Template/mta_monthly.css" />\n\n' fo.write(line) line = '<style type="text/css">\n' fo.write(line) line = 'table{text-align:center;margin-left:auto;margin-right:auto;border-style:solid;border-spacing:8px;border-width:2px;border-collapse:separate}\n' fo.write(line) line = 'td{text-align:center;padding:8px}\n' fo.write(line) line = '</style>\n' fo.write(line) line = '</head>\n<body>\n\n' fo.write(line) line = '<h2 style="padding-bottom:20px">MTA Trending</h2>\n\n' fo.write(line) line = '<p style="padding-bottom:15px">The following pages show three trending plots of MSID values for each subsystem ' fo.write(line) line = 'as they have evolved over the course of the mission.' fo.write(line) line = 'The left plot is MTA Trends/Derivatives Plot. For more details, please go to <a href="http://cxc.cfa.harvard.edu/mta/DAILY/mta_deriv/">MTA Trends/Derivatives</a> page. ' fo.write(line) line = 'The center plot is MTA Envelope Trending. For more detials, please go to <a href="http://asc.harvard.edu/mta_days/mta_envelope_trend/">MTA Trending: Envelope Trending</a> page. ' fo.write(line) line = 'The right plot is history of upper and lower limits of each msid for monitoring and trending purposes. These limits are, however, not used prior to XXX of 2012. \n' fo.write(line) line = 'The limits of each MSID are created as following:</p>\n' fo.write(line) line = '<ul>\n' fo.write(line) line = '<li>The average and standard deviation of each MSID are computed for 6 month periods for the entire period.</li>\n' fo.write(line) line = '<li>The standard deviations (but not average) are further smoothed by taking past 2 year moving averages. \n' fo.write(line) line = '(For example, the value given for January 2003 is the average of the 6 month averages from January 2001 to January 2003.)</li>\n' fo.write(line) line = '<li><em style="color:yellow">Yellow lines</em> are set at the center value (the average) plus or minus 4 standard deviation aways.</li>\n' fo.write(line) line = '<li><em style="color:red">Red lines</em> are set at the center value (the average) plus or minus 5 standard deviation aways.</li>\n' fo.write(line) line = '<li>Most recent 6 month values of each MSID are taken as MTA Limits.</li>\n' line = '</ul>' fo.write(line) line = '<p style="padding-top:10px;padding-bottom:25px">You can find the most recent MTA limit table at <a href="./Data/os_limit_table" target="blank">MTA Limit Table</a></p>.\n' fo.write(line) # #--- check each group # line = '<h2 style="padding-bottom:20px">MTA Trending Plots</h2>\n\n' fo.write(line) line = '<p>The following table lists three trend plots of each msid in the named groups. To see the plots, ' fo.write(line) line = 'please click the group name. It will open the trend plot page of the group.</p> ' fo.write(line) line = '<p>The top panel of the trending plot shows thedata and its trend and the bottom panel shows the deviation. ' fo.write(line) line = 'If you click the plot, you can enlarge the plot.</p> ' fo.write(line) line = '<p>A green line of the envelope plot is a moving average of the data and blue lines are estimated outer limits of the data range. ' fo.write(line) line = 'If the data points are colored in magenda, the data points are in yellow limits, and if they are in red, they are in red limits.</p>' fo.write(line) line = '<p style="padding-bottom:40px">In each limit plot, the blue line indicates the (moving) average of the value of the msid, the yellow lines indicate lower and ' fo.write(line) line = 'upper yellow limits, and red lines indicate lower and upper red limits. ' fo.write(line) line = 'Note that if the plotting range of the limit plot is smaller than 1, it plots with fractinal value and shows the base ' fo.write(line) line = 'value to add to convert back the original range.\n\n' fo.write(line) line = '<div><table border=1>\n' fo.write(line) ecnt = 0 for group in dlist: m1 = re.search('.html', group) #---- ignore the name ends with "html" if m1 is None: temp = re.split(plot_dir, group) gname = temp[1] # #--- create indivisual html pages # # out_name1 = group + '.html' out_name1 = './Plots/' + gname + '.html' #---- THIS IS THE LIVE ONE !!!!!! ### out_name1 = './Plots_test/' + gname + '.html' if ecnt == 0: fo.write('<tr>\n') line = '<td><a href="' + out_name1 + '">' + gname + '</a></td>\n' #--- add line to the top html page fo.write(line) # #--- 4 entries per raw # if ecnt > 2: ecnt = 0 line = '</tr>\n' fo.write(line) else: ecnt += 1 # #--- creating a html page for each group # out_name2 = html_dir + out_name1 fo2 = open(out_name2, 'w') line = '<!DOCTYPE html>\n<html>\n<head>\n<title>' + gname + '</title>\n' line = line + "<meta http-equiv='Content-Type' content='text/html; charset=utf-8' />\n" line = line + '<link rel="stylesheet" type="text/css" href="http://asc.harvard.edu/mta/REPORTS/Template/mta.css" />\n' line = line + '<script>\n' line = line + 'function WindowOpener(imgname) {\n' line = line + ' msgWindow = open("","displayname","toolbar=no,directories=no,menubar=no,location=no,scrollbars=no,status=no,width=720,height=550,resize=no");\n' line = line + ' msgWindow.document.clear();\n' line = line + ' msgWindow.document.write("<html><title>Trend plot: "+imgname+"</title>");\n' line = line + ' msgWindow.document.write("<body bgcolor=\'black\'>");\n' line = line + ' msgWindow.document.write("<img src=\'http://cxc.cfa.harvard.edu/mta/DAILY/mta_deriv/"+imgname+"\' border=0 width=720 height=550><P></body></html>")\n' line = line + ' msgWindow.document.close();\n' line = line + ' msgWindow.focus();\n' line = line + '}\n' line = line + 'function WindowOpener2(imgname) {\n' line = line + ' msgWindow = open("","displayname","toolbar=no,directories=no,menubar=no,location=no,scrollbars=no,status=no,width=720,height=570,resize=no");\n' line = line + ' msgWindow.document.clear();\n' line = line + ' msgWindow.document.write("<html><title>Envelope plot: "+imgname+"</title>");\n' line = line + ' msgWindow.document.write("<body bgcolor=\'black\'>");\n' line = line + ' msgWindow.document.write("<img src=\'http://cxc.cfa.harvard.edu/mta_days/mta_envelope_trend/Full_range/"+imgname+"\' border=0 width=720 height=550><P></body></html>")\n' line = line + ' msgWindow.document.close();\n' line = line + ' msgWindow.focus();\n' line = line + '}\n' line = line + '</script>\n' line = line + '</head>\n<body>\n\n' fo2.write(line) line = '<h2> Group: ' + gname + '</h2>\n\n' fo2.write(line) line = '<a href="http://cxc.cfa.harvard.edu/mta_days/mta_limit_table/"><strong>Back To The Main Page</strong></a><br /><br />' fo2.write(line) line = '<h3 style="padding-top:15px;padding-bottom:15px">Data Table: <a href="' + data_dir + gname + '">' + gname + '</a></h3>\n\n' # #--- find out plot names # cmd = 'ls ' + group + '/* >' + ztemp os.system(cmd) f = open(ztemp, 'r') plist = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + ztemp os.system(cmd) # #--- create a table with plots: three column format # ## line = '<table style="padding-top:30px;border-width:0px;border-spacing:10px">\n' line = '<table style="border-width:0px">\n' fo2.write(line) j = 0 tot = len(plist) for ent in plist: m2 = re.search('png', ent) if m2 is not None: temp = re.split(group, ent) pname = temp[1] # #--- trending plot # oname = pname.replace('/', '') oname = oname.replace('.png', '_avgA.gif') try: n = int(oname[0]) oname = '_' + oname except: pass ptitle = oname.replace('_avgA.gif', '') ptitle = ptitle.upper() line = '<tr><th style="font-size:140%;text-align:left" colspan=3>' + ptitle + '</th></tr>\n' fo2.write(line) line = '<tr><th>Trending Plot</th><th>Envelope Plot</th><th>Limit Plot</tr>\n' fo2.write(line) # #--- envelope plot # ename = pname.replace('/', '') ename = ename.replace('.png', '_plot.gif') if ename[0] == '_': ename = ename[1:] pdir = gname.upper() try: n = int(pdir[len(pdir) - 1]) pdir = pdir[:-1] except: pass m1 = re.search('2A', pdir) m2 = re.search('2B', pdir) if m1 is not None or m2 is not None: pdir = 'SPCELECA' jline = '<a href="javascript:WindowOpener(\'' + oname + '\')">' line = '<tr><td style="text-align:center">' ftest = mcf.chkFile('/data/mta4/www/DAILY/mta_deriv/', oname) if ftest > 0: line = line + jline + '<img src="http://cxc.cfa.harvard.edu/mta/DAILY/mta_deriv/' + oname + '" alt="' + oname + '" style="width:450px" ></a><br />\n' line = line + jline + '<strong style="padding-right:10px">Enlarge Trend Plot</strong></a>\n</td>\n' else: line = line + '<td style="background-color:black"><img src="http://cxc.cfa.harvard.edu/mta_days/mta_limit_table/no_data.png" alt="no data" style="width:500px"></td>\n' ftest = mcf.chkFile( '/data/mta/www/mta_envelope_trend/Full_range/', pdir) if ftest > 0: line = line + '<td><img src="http://cxc.cfa.harvard.edu/mta_days/mta_envelope_trend/Full_range/' + pdir + '/Plots/' + ename + '" alt="' + ename + '" style="width:500px"></td>\n' else: line = line + '<td style="background-color:black"><img src="http://cxc.cfa.harvard.edu/mta_days/mta_limit_table/no_data.png" alt="no data" style="width:500px"></td>\n' line = line + '<td><img src="./' + gname + '/' + pname + '" alt ="' + pname + '" style="width:500px"></td></tr>\n' fo2.write(line) line = '</table>\n' fo2.write(line) line = '<a href="http://cxc.cfa.harvard.edu/mta_days/mta_limit_table/" style="padding-top:20px"><strong>Back To The Main Page</strong></a><br /><br />' fo2.write(line) line = '</body>\n</html>\n' fo2.write(line) fo2.close() if ecnt == 0: line = '</table></div>\n</div>\n<div style="padding-top:20px;padding-bottom:10px">\n<hr />\n</div>\n' fo.write(line) else: for k in range(ecnt, 4): line = '<td> </td>' fo.write(line) line = '</tr>\n</table></div>\n<div style="padding-top:20px;padding-bottom:10px">\n<hr />\n</div>\n' fo.write(line) # #--- Today's date # dtime = tcnv.currentTime('Display') line = 'Last Update: ' + dtime fo.write(line) line = '<p style="padding-top:10px">If you have any questions about this page, please contact <a href="mailto:[email protected]">[email protected]</a>.</p>' fo.write(line) line = '</body>\n</html>\n' fo.write(line) fo.close()
def createCumulative(year, month, detector, type, arch_dir, i=0): 'create cumulative hrc data for a given year and month' # #--- find the previous period # pyear = year pmonth = month -1 if pmonth < 1: pmonth = 12 pyear -= 1 syear = str(year) smonth = str(month) if month < 10: smonth = '0' + smonth spyear = str(pyear) spmonth = str(pmonth) if pmonth < 10: spmonth = '0' + spmonth if detector == 'HRC-I': inst = 'HRCI' else: inst = 'HRCS' # #--- set file names # if type == 'center': hrc = inst + '_' + smonth + '_' + syear + '.fits.gz' chrc = inst + '_08_1999_' + spmonth + '_' + spyear + '.fits.gz' chrc2 = inst + '_08_1999_' + smonth + '_' + syear + '.fits' else: hrc = inst + '_' + smonth + '_' + syear + '_' + str(i) + '.fits.gz' chrc = inst + '_09_1999_' + spmonth + '_' + spyear + '_' + str(i) + '.fits.gz' chrc2 = inst + '_09_1999_' + smonth + '_' + syear + '_' + str(i) + '.fits' # #---- if the monthly file exists, reduce the size of the file before combine it into a cumulative data # cdir = arch_dir + '/Month_hrc/' chk = mtac.chkFile(cdir, hrc) #---- checking hrc exisits or not if chk > 0: line = arch_dir + '/Month_hrc/' + hrc + '[opt type=i2,null=-99]' cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmcopy infile="' + line + '" outfile="./ztemp.fits" clobber="yes"' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgcalc infile=' + arch_dir + 'Cumulative_hrc/' + chrc + ' infile2=ztemp.fits outfile =' + chrc2 + ' operation=add clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) os.system('rm ./ztemp.fits') cmd = 'gzip ' + chrc2 os.system(cmd) cmd = 'mv ' + chrc2 + '.gz ' + arch_dir + 'Cumulative_hrc/.' os.system(cmd) # #--- if the monthly fie does not exist, just copy the last month's cumulative data # else: try: cmd = 'cp ' + arch_dir + 'Cumulative_hrc/' + chrc + ' ' + arch_dir + 'Cumulative_hrc/' + chrc2 + '.gz' os.system(cmd) except: pass
#-------------------------------------------------------------------- if __name__ == '__main__': # #--- check whether this is a test case # if len(sys.argv) == 2: if sys.argv[1] == 'test': #---- this is a test case comp_test = 'test' else: comp_test = 'real' else: comp_test = 'real' # #--- if this is a test case, check whether output file exists. If not, creaete it # if comp_test == 'test': chk = mcf.chkFile(test_web_dir) if chk == 0: cmd = 'mkdir ' + test_web_dir os.system(cmd) # #--- now call the main function # sci_run_get_radiation_data()
def get_mit_data(tyear): """ this function extracts data from the MIT web site and select out the newest part by comparing the data to the current data saved locally. Input: tyear --- year of the last save data, data will read the data from MIT web site and a locat data (data_<tyear>) Output: mit_data """ # #--- first find out the latest version of phase by reading main html page #--- here is the lnyx script to obtain web page data # if comp_test == 'test': last_phase = 71 first_phase = 71 elif comp_test == 'test2': last_phase = 70 first_phase = 70 else: phase_list = createPhaseList() plen = len(phase_list) if plen > 3: last_phase = phase_list[plen - 1] first_phase = last_phase - 3 else: exit(1) # #--- extract data needed # new_data = getNewData(first_phase, last_phase) # #--- if there is no new data, stop the entire operation # if len(new_data) == 0: exit(1) # #--- read column names --- this is the name of columns we need to save # f = open(col_names, 'r') col_list = [line.strip() for line in f.readlines()] f.close() # #---extract specified column data # new_data_save = extractElements(new_data, col_list) # #---- read the past data # pname = web_dir + 'Year' + str(tyear) + '/data' + str(tyear) chk = mcf.chkFile(pname) # #--- if there is no data_<tyear> existed, create an empty file for convenience. # if chk == 0: fo = open(pname, 'w') fo.close() old_data = [] else: f = open(pname, 'r') old_data = [line.strip() for line in f.readlines()] f.close() # #--- adjust the last few entries of the old_data as they might be modified while new data come in # adjstPastData(old_data, new_data_save) # #--- clean up old and new data files just created (removing duplicate and sorting) # cleanup('./Working_dir/old_data', 1) cleanup('./Working_dir/zdata_out', 1) name2 = pname + '~' cmd = 'mv ./Working_dir/old_data ' + name2 os.system(cmd) # #--- read cleaned current mit data # f = open('./Working_dir/zdata_out', 'r') mit_data = [line.strip() for line in f.readlines()] f.close() return mit_data
def report_error(): """ read errors from <cup_usr_list>_error_list, sort it out, clean, and send out email Input: none but read from <cup_usr_list>_error_list Output: email sent out """ # #--- find the current time # [year, mon, day, hours, min, sec, weekday, yday, dst] = tcnv.currentTime("Local") # #--- create surfix for files which will be saved in Past_errors directory # smon = str(mon) if mon < 10: smon = '0' + smon sday = str(day) if day < 10: sday = '0' + sday tail = str(year) + smon + sday for tag in cpu_usr_list: efile = house_keeping + 'Records/' + tag + '_error_list' pfile = house_keeping + 'Records/Past_errors/' + tag + '_error_list_' + tail prev_line = '' chk = mcf.chkFile(efile) if chk > 0: # #--- read error messages from the file # f = open(efile, 'r') data = [line.strip() for line in f.readlines()] f.close() # #--- sort the data so that we can correct messages to each cron job together # data.sort() task_list = [] time_list = [] mssg_list = [] for ent in data: atemp = re.split(' : ' , ent) task_list.append(atemp[0]) stime = int(atemp[1]) dtime = tcnv.axTimeMTA(stime) time_list.append(dtime) mssg_list.append(atemp[2]) # #--- write out cron job name # fo = open(zspace, 'w') cname = task_list[0] line = '\n\n' + cname + '\n____________________\n\n' fo.write(line) for i in range(0, len(mssg_list)): if task_list[i] != cname: cname = task_list[i] line = '\n\n' + cname + '\n____________________\n\n' fo.write(line) # #--- create each line. if it is exactly same as one line before, skip it # line = time_list[i] + ' : ' + mssg_list[i] + '\n' if line != prev_line: fo.write(line) prev_line = line fo.close() # #--- send email out # # cmd = 'cp ' + zspace + ' ' + '/data/mta/Script/Cron_check/Scripts/' + tag # os.system(cmd) send_mail(tag, email_list) # #--- move the error list to Past_errors directory # cmd = 'mv ' + efile + ' ' + pfile os.system(cmd)
def plotPanel(xmin, xmax, yMinSets, yMaxSets, xSets, ySets, xname, yname, entLabels, mksize=1.0, lwidth=1.5): """ This function plots multiple data in separate panels Input: xmin, xmax, ymin, ymax: plotting area xSets: a list of lists containing x-axis data ySets: a list of lists containing y-axis data yMinSets: a list of ymin yMaxSets: a list of ymax entLabels: a list of the names of each data mksize: a size of maker lwidth: a line width Output: a png plot: out.png return 1 if the plot is crated, if not 0 """ # #--- set line color list # colorList = ('blue', 'green', 'red', 'aqua', 'lime', 'fuchsia', 'maroon', 'black', 'yellow', 'olive') # #--- close all opened plot # plt.close('all') # #---- set a few parameters # mpl.rcParams['font.size'] = 9 props = font_manager.FontProperties(size=9) plt.subplots_adjust(hspace=0.08) tot = len(entLabels) # #--- start plotting each data # for i in range(0, len(entLabels)): axNam = 'ax' + str(i) # #--- setting the panel position # j = i + 1 if i == 0: line = str(tot) + '1' + str(j) else: line = str(tot) + '1' + str(j) + ', sharex=ax0' line = str(tot) + '1' + str(j) exec "%s = plt.subplot(%s)" % (axNam, line) exec "%s.set_autoscale_on(False)" % ( axNam) #---- these three may not be needed for the new pylab, but exec "%s.set_xbound(xmin,xmax)" % ( axNam) #---- they are necessary for the older version to set exec "%s.set_xlim(xmin=xmin, xmax=xmax, auto=False)" % (axNam) exec "%s.set_ylim(ymin=yMinSets[i], ymax=yMaxSets[i], auto=False)" % ( axNam) xdata = xSets[i] ydata = ySets[i] # #---- actual data plotting # p, = plt.plot(xdata, ydata, color=colorList[i], marker='.', markersize=mksize, lw=lwidth) # #--- add legend # leg = legend([p], [entLabels[i]], prop=props, loc=2) leg.get_frame().set_alpha(0.5) exec "%s.set_ylabel(yname, size=8)" % (axNam) # #--- add x ticks label only on the last panel # for i in range(0, tot): ax = 'ax' + str(i) if i != tot - 1: exec "line = %s.get_xticklabels()" % (ax) for label in line: label.set_visible(False) else: pass xlabel(xname) # #--- set the size of the plotting area in inch (width: 10.0in, height 2.08in x number of panels) # fig = matplotlib.pyplot.gcf() height = (2.00 + 0.08) * tot fig.set_size_inches(10.0, height) # #--- save the plot in png format # plt.savefig('out.png', format='png', dpi=100) return mcf.chkFile('./out.png')
def createGroupHtmlPage(): """ create html pages to display trend plots under each group input: none, but it will create plot lists from plot_dir output: html_dir/limit_trend.html and plot_dir/<gourp name>.html """ # #--- read group names # cmd = 'ls -d ' + plot_dir + '* >' + ztemp os.system(cmd) f = open(ztemp, 'r') dlist = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + ztemp os.system(cmd) # #--- create/update the top html page # out_name1 = html_dir + 'limit_trend.html' fo = open(out_name1, 'w') line = '<!DOCTYPE html>\n<html>\n' fo.write(line) line = '<head>\n<title>MTA Trending Page</title>\n' fo.write(line) line = '<link rel="stylesheet" type="text/css" href="/mta/REPORTS/Template/mta_monthly.css" />\n\n' # line = '<link rel="stylesheet" type="text/css" href="/data/mta4/www/REPORTS/Template/mta_monthly.css" />\n\n' fo.write(line) line = '<style type="text/css">\n' fo.write(line) line = 'table{text-align:center;margin-left:auto;margin-right:auto;border-style:solid;border-spacing:8px;border-width:2px;border-collapse:separate}\n' fo.write(line) line = 'td{text-aligne:center;padding:8px}\n' fo.write(line) line = '</style>\n' fo.write(line) line = '</head>\n<body>\n\n' fo.write(line) line = '<h2 style="padding-bottom:20px">MTA Trending</h2>\n\n' fo.write(line) line = '<p style="padding-bottom:15px">The following pages show three trending plots of MSID values for each subsystem ' fo.write(line) line = 'as they have evolved over the course of the mission.' fo.write(line) line = 'The left plot is MTA Trends/Derivatives Plot. For more details, please go to <a href="http://cxc.cfa.harvard.edu/mta/DAILY/mta_deriv/">MTA Trends/Derivatives</a> page. ' fo.write(line) line = 'The center plot is MTA Envelope Trending. For more detials, please go to <a href="http://asc.harvard.edu/mta_days/mta_envelope_trend/">MTA Trending: Envelope Trending</a> page. ' fo.write(line) line = 'The right plot is history of upper and lower limits of each msid for monitoring and trending purposes. These limits are, however, not used prior to XXX of 2012. \n' fo.write(line) line = 'The limits of each MSID are created as following:</p>\n' fo.write(line) line = '<ul>\n' fo.write(line) line = '<li>The average and standard deviation of each MSID are computed for 6 month periods for the entire period.</li>\n' fo.write(line) line = '<li>The standard deviations (but not average) are further smoothed by taking past 2 year moving averages. \n' fo.write(line) line = '(For example, the value given for January 2003 is the average of the 6 month averages from January 2001 to January 2003.)</li>\n' fo.write(line) line = '<li><em style="color:yellow">Yellow lines</em> are set at the center value (the average) plus or minus 4 standard deviation aways.</li>\n' fo.write(line) line = '<li><em style="color:red">Red lines</em> are set at the center value (the average) plus or minus 5 standard deviation aways.</li>\n' fo.write(line) line = '<li>Most recent 6 month values of each MSID are taken as MTA Limits.</li>\n' line = '</ul><br /><br />' fo.write(line) line = '<p style="padding-bottom:25px">You can find the most recent MTA limit table at <a href="./Data/os_limit_table" target="blank">MTA Limit Table</a></p>.\n' fo.write(line) # #--- check each group # line = '<h2 style="padding-bottom:20px">MTA Trending Plots</h2>\n\n' fo.write(line) line = '<p>The following table lists three trend plots of each msid in the named groups. To see the plots, ' fo.write(line) line = 'please click the group name. It will open the trend plot page of the group.</p> ' fo.write(line) line = '<p>The top panel of the trending plot shows thedata and its trend and the bottom panel shows the deviation. ' fo.write(line) line = 'If you click the plot, you can enlarge the plot.</p> ' fo.write(line) line = '<p>A green line of the envelope plot is a moving average of the data and blue lines are estimated outer limits of the data range. ' fo.write(line) line = 'If the data points are colored in magenda, the data points are in yellow limits, and if they are in red, they are in red limits.</p>' fo.write(line) line = '<p style="padding-bottom:40px">In each limit plot, the blue line indicates the (moving) average of the value of the msid, the yellow lines indicate lower and ' fo.write(line) line = 'upper yellow limits, and red lines indicate lower and upper red limits. ' fo.write(line) line = 'Note that if the plotting range of the limit plot is smaller than 1, it plots with fractinal value and shows the base ' fo.write(line) line = 'value to add to convert back the original range.\n\n' fo.write(line) line ='<div><table border=1>\n' fo.write(line) ecnt = 0 for group in dlist: m1 = re.search('.html', group) #---- ignore the name ends with "html" if m1 is None: temp = re.split(plot_dir, group) gname = temp[1] # #--- create indivisual html pages # # out_name1 = group + '.html' out_name1 = './Plots/' + gname + '.html' #---- THIS IS THE LIVE ONE !!!!!! ### out_name1 = './Plots_test/' + gname + '.html' if ecnt == 0: fo.write('<tr>\n') line = '<td><a href="' + out_name1 + '">' + gname + '</a></td>\n' #--- add line to the top html page fo.write(line) # #--- 4 entries per raw # if ecnt > 2: ecnt = 0 line = '</tr>\n' fo.write(line) else: ecnt += 1 # #--- creating a html page for each group # out_name2 = html_dir + out_name1 fo2 = open(out_name2, 'w') line = '<!DOCTYPE html>\n<html>\n<head>\n<title>' + gname + '</title>\n' line = line + '<script type="text/javascript">\n' line = line + 'function WindowOpener(imgname) {\n' line = line + ' msgWindow = open("","displayname","toolbar=no,directories=no,menubar=no,location=no,scrollbars=no,status=no,width=720,height=550,resize=no");\n' line = line + ' msgWindow.document.clear();\n' line = line + ' msgWindow.document.write("<html><title>Trend plot: "+imgname+"</title>");\n' line = line + ' msgWindow.document.write("<body bgcolor=\'black\'>");\n' line = line + ' msgWindow.document.write("<img src=\'http://cxc.cfa.harvard.edu/mta/DAILY/mta_deriv/"+imgname+"\' border=0 width=720 height=550><P></body></html>")\n' line = line + ' msgWindow.document.close();\n' line = line + ' msgWindow.focus();\n' line = line + '}\n' line = line + 'function WindowOpener2(imgname) {\n' line = line + ' msgWindow = open("","displayname","toolbar=no,directories=no,menubar=no,location=no,scrollbars=no,status=no,width=720,height=570,resize=no");\n' line = line + ' msgWindow.document.clear();\n' line = line + ' msgWindow.document.write("<html><title>Envelope plot: "+imgname+"</title>");\n' line = line + ' msgWindow.document.write("<body bgcolor=\'black\'>");\n' line = line + ' msgWindow.document.write("<img src=\'http://cxc.cfa.harvard.edu/mta_days/mta_envelope_trend/Full_range/"+imgname+"\' border=0 width=720 height=550><P></body></html>")\n' line = line + ' msgWindow.document.close();\n' line = line + ' msgWindow.focus();\n' line = line + '}\n' line = line + '</script>\n' line = line + '</head>\n<body>\n\n' fo2.write(line) line = '<h2> Group: ' + gname + '</h2>\n\n' fo2.write(line) line = '<h3 style="padding-top:15px;padding-bottom:15px">Data Table: <a href="' + data_dir + gname + '">' + gname + '</a></h3>\n\n' # #--- find out plot names # cmd = 'ls ' + group + '/* >' + ztemp os.system(cmd) f = open(ztemp, 'r') plist = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + ztemp os.system(cmd) # #--- create a table with plots: three column format # ## line = '<table style="padding-top:30px;border-width:0px;border-spacing:10px">\n' line = '<table>\n' fo2.write(line) j = 0 tot = len(plist) for ent in plist: m2 = re.search('png', ent) if m2 is not None: temp = re.split(group, ent) pname = temp[1] # #--- trending plot # oname = pname.replace('/', '') oname = oname.replace('.png', '_avgA.gif') try: n = int(oname[0]) oname = '_' + oname except: pass ptitle = oname.replace('_avgA.gif', '') ptitle = ptitle.upper() line = '<tr><th style="font-size:140%;text-align:left" colspan=3>' + ptitle + '</th></tr>\n' fo2.write(line) line = '<tr><th>Trending Plot</th><th>Envelope Plot</th><th>Limit Plot</tr>\n' fo2.write(line) # #--- envelope plot # ename = pname.replace('/', '') ename = ename.replace('.png', '_plot.gif') if ename[0] == '_': ename = ename[1:] pdir = gname.upper() try: n = int(pdir[len(pdir)-1]) pdir = pdir[:-1] except: pass m1 = re.search('2A', pdir); m2 = re.search('2B', pdir); if m1 is not None or m2 is not None: pdir = 'SPCELECA' jline = '<a href="javascript:WindowOpener(\'' + oname + '\')">' line = '<tr><td style="text-align:center">' ftest = mcf.chkFile('/data/mta4/www/DAILY/mta_deriv/',oname) if ftest > 0: line = line + jline + '<img src="http://cxc.cfa.harvard.edu/mta/DAILY/mta_deriv/' + oname + '" style="width:450px" ></a><br />\n' line = line + jline + '<strong style="padding-right:10px">Enlarge Trend Plot</strong></a>\n</td>\n' else: line = line + '<td style="background-color:black"><img src="http://cxc.cfa.harvard.edu/mta_days/mta_limit_table/no_data.png" style="width:500px"></td>\n' ftest = mcf.chkFile('/data/mta/www/mta_envelope_trend/Full_range/', pdir) if ftest > 0: line = line + '<td><img src="http://cxc.cfa.harvard.edu/mta_days/mta_envelope_trend/Full_range/' + pdir + '/Plots/' + ename + '" style="width:500px"></td>\n' else: line = line + '<td style="background-color:black"><img src="http://cxc.cfa.harvard.edu/mta_days/mta_limit_table/no_data.png" style="width:500px"></td>\n' line = line + '<td><img src="./' + gname + '/' + pname + '" style="width:500px"></td></tr>\n' fo2.write(line) line = '</table>\n' fo2.write(line) line = '</body>\n</html>\n' fo2.write(line) fo2.close() if ecnt == 0: line = '</table></div>\n<br /><br />\n<hr />\n' fo.write(line) else: for k in range(ecnt, 4): line = '<td> </td>' fo.write(line) line = '</tr>\n</table></div>\n<br /><br />\n<hr />\n' fo.write(line) # #--- Today's date # dtime = tcnv.currentTime('Display') line = 'Last Update: ' + dtime fo.write(line) line = '<br /><br />If you have any questions about this page, please contact <a href="mailto:[email protected]">[email protected]</a>.' fo.write(line) line = '</body>\n</html>\n' fo.write(line) fo.close()
def plotPanel(xmin, xmax, yMinSets, yMaxSets, xSets, ySets, xname, yname, entLabels, mksize=1.0, lwidth=1.5): """ This function plots multiple data in separate panels Input: xmin, xmax, ymin, ymax: plotting area xSets: a list of lists containing x-axis data ySets: a list of lists containing y-axis data yMinSets: a list of ymin yMaxSets: a list of ymax entLabels: a list of the names of each data mksize: a size of maker lwidth: a line width Output: a png plot: out.png return 1 if the plot is crated, if not 0 """ # #--- set line color list # colorList = ('blue', 'green', 'red', 'aqua', 'lime', 'fuchsia', 'maroon', 'black', 'yellow', 'olive') # #--- close all opened plot # plt.close('all') # #---- set a few parameters # mpl.rcParams['font.size'] = 9 props = font_manager.FontProperties(size=9) plt.subplots_adjust(hspace=0.08) tot = len(entLabels) # #--- start plotting each data # for i in range(0, len(entLabels)): axNam = 'ax' + str(i) # #--- setting the panel position # j = i + 1 if i == 0: line = str(tot) + '1' + str(j) else: line = str(tot) + '1' + str(j) + ', sharex=ax0' line = str(tot) + '1' + str(j) exec "%s = plt.subplot(%s)" % (axNam, line) exec "%s.set_autoscale_on(False)" % (axNam) #---- these three may not be needed for the new pylab, but exec "%s.set_xbound(xmin,xmax)" % (axNam) #---- they are necessary for the older version to set exec "%s.set_xlim(xmin=xmin, xmax=xmax, auto=False)" % (axNam) exec "%s.set_ylim(ymin=yMinSets[i], ymax=yMaxSets[i], auto=False)" % (axNam) xdata = xSets[i] ydata = ySets[i] # #---- actual data plotting # p, = plt.plot(xdata, ydata, color=colorList[i], marker='.', markersize=mksize, lw = lwidth) # #--- add legend # leg = legend([p], [entLabels[i]], prop=props, loc=2) leg.get_frame().set_alpha(0.5) exec "%s.set_ylabel(yname, size=8)" % (axNam) # #--- add x ticks label only on the last panel # for i in range(0, tot): ax = 'ax' + str(i) if i != tot-1: exec "line = %s.get_xticklabels()" % (ax) for label in line: label.set_visible(False) else: pass xlabel(xname) # #--- set the size of the plotting area in inch (width: 10.0in, height 2.08in x number of panels) # fig = matplotlib.pyplot.gcf() height = (2.00 + 0.08) * tot fig.set_size_inches(10.0, height) # #--- save the plot in png format # plt.savefig('out.png', format='png', dpi=100) return mcf.chkFile('./out.png')
def extract_head_info(file): ''' extreact information about the data from the fits file input: fits file name output: head_info =[fep, ccd, node, pblock, tstart, tstop, expcount, date_obs, date_end] ''' # #--- check whether the temp file exists. if so, remove it # cfile = exc_dir + 'zout' chk = mtac.chkFile(cfile) if chk > 0: cmd = 'rm ' + exc_dir + 'zout' os.system(cmd) # #--- extract head information using dmlist # cmd = 'dmlist infile=' + file + ' outfile = ' + exc_dir + 'zout opt=head' os.system(cmd) file = exc_dir + 'zout' f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() cmd = 'rm ' + exc_dir + 'zout' os.system(cmd) # #--- find needed information # for ent in data: line = re.split('\s+|\t+', ent) m1 = re.search("FEP_ID", ent) m2 = re.search("CCD_ID", ent) m3 = re.search("NODE_ID", ent) m4 = re.search("PBLOCK", ent) m5 = re.search("TSTART", ent) m6 = re.search("TSTOP", ent) m7 = re.search("BEP", ent) m8 = re.search("EXPCOUNT", ent) m9 = re.search("DATE-OBS", ent) m10 = re.search("DATE-END", ent) if m1 is not None: fep = line[2] elif m2 is not None: ccd = line[2] elif m3 is not None: node = line[2] elif m4 is not None: pblock = int(line[2]) elif m5 is not None and m7 is None: tstart = float(line[2]) elif m6 is not None and m7 is None: tstop = float(line[2]) elif m8 is not None: expcount = line[2] elif m9 is not None: date_obs = line[2] elif m10 is not None: date_end = line[2] # #--- return the info # head_info = [ fep, ccd, node, pblock, tstart, tstop, expcount, date_obs, date_end ] return head_info
def comp_stat(file, year, month, out): """ compute statistics for the hrc image and print out the result: input: hrc image file, year, month, output file name. """ chk = mcf.chkFile(file) #--- checking whether the file exists if chk > 0: # #--- to avoid getting min value from the outside of the frame edge of a CCD, set threshold # try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' /bin/nice -n15 dmimgthresh infile=' + file + ' outfile=zcut.fits cut="0:1.e10" value=0 clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmstat infile=zcut.fits centroid=no >' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file('./zcut.fits') f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() except: data = [] val = 'NA' for ent in data: ent.lstrip() m = re.search('mean', ent) if m is not None: atemp = re.split('\s+|\t', ent) val = atemp[1] break if val != 'NA': (mean, dev, min, max , min_pos_x, min_pos_y, max_pos_x, max_pos_y) = readStat(zspace) mcf.rm_file(zspace) (sig1, sig2, sig3) = find_two_sigma_value(file) else: (mean, dev, min, max , min_pos_x, min_pos_y, max_pos_x, max_pos_y) = ('NA','NA','NA','NA','NA','NA','NA','NA') (sig1, sig2, sig3) = ('NA', 'NA', 'NA') else: (mean, dev, min, max , min_pos_x, min_pos_y, max_pos_x, max_pos_y) = ('NA','NA','NA','NA','NA','NA','NA','NA') (sig1, sig2, sig3) = ('NA', 'NA', 'NA') # #--- print out the results # chk = mcf.chkFile(out) #--- checking whether the file exists if chk > 0: f = open(out, 'a') else: f = open(out, 'w') if mean == 'NA': line = '%d\t%d\t' % (year, month) f.write(line) f.write('NA\tNA\tNA\tNA\tNA\tNA\tNA\tNA\tNA\n') else: line = '%d\t%d\t' % (year, month) line = line + '%5.6f\t%5.6f\t%5.1f\t(%d,%d)\t' % (float(mean), float(dev), float(min), float(min_pos_x), float(min_pos_y)) line = line + '%5.1f\t(%d,%d)\t%5.1f\t%5.1f\t%5.1f\n' % (float(max), float(max_pos_x), float(max_pos_y), float(sig1), float(sig2), float(sig3)) f.write(line) f.close()
def extract_bias_data(today_data, comp_test=''): """ extract bias data using a given data list Input: today_data --- a list of data fits files comp_test --- if 'test', test will be run also need: <house_keeping>/Defect/bad_col_list --- a list of known bad columns Output: <data_dir>/Bias_save/CCD<ccd>/quad<quad> see more in write_bias_data() <data_dir>/Info_dir/CCD<ccd>/quad<quad> see more in printBiasInfo() """ stime_list = [] for dfile in today_data: # #--- check whether file exists # chk = mcf.chkFile(dfile) if chk == 0: continue # #--- extract time stamp # stime = bcf.extractTimePart(dfile) if stime < 0: continue # #--- extract CCD information # [ ccd_id, readmode, date_obs, overclock_a, overclock_b, overclock_c, overclock_d ] = bcf.extractCCDInfo(dfile) if readmode != 'TIMED': continue bad_col0 = [] bad_col1 = [] bad_col2 = [] bad_col3 = [] line = house_keeping + 'Defect/bad_col_list' data = mcf.readFile(line) for ent in data: # #--- skip none data line # m = re.search('#', ent) if m is not None: continue atemp = re.split(':', ent) dccd = int(atemp[0]) if dccd == ccd_id: val = int(atemp[1]) if val <= 256: bad_col0.append(val) elif val <= 512: val -= 256 bad_col1.append(val) elif val <= 768: val -= 512 bad_col2.append(val) elif val <= 1024: val -= 768 bad_col3.append(val) # #--- trim the data at the threshold = 4000 # f = pyfits.open(dfile) sdata = f[0].data sdata[sdata < 0] = 0 sdata[sdata > 4000] = 0 f.close() # #--- compte and write out bias data # result_list = bcf.extractBiasInfo(dfile) if comp_test == 'test': return result_list else: [fep, dmode, srow, rowcnt, orcmode, dgain, biasalg, barg0, barg1, barg2, barg3, \ overclock_a, overclock_b, overclock_c, overclock_d] = result_list write_bias_data(sdata, ccd_id, 0, overclock_a, stime, bad_col0) write_bias_data(sdata, ccd_id, 1, overclock_b, stime, bad_col1) write_bias_data(sdata, ccd_id, 2, overclock_c, stime, bad_col2) write_bias_data(sdata, ccd_id, 3, overclock_d, stime, bad_col3) # #---- more bias info # printBiasInfo(ccd_id, 0, stime, fep, dmode, srow, rowcnt, orcmode, dgain, biasalg, barg0, barg1, barg2, barg3, overclock_a) printBiasInfo(ccd_id, 1, stime, fep, dmode, srow, rowcnt, orcmode, dgain, biasalg, barg0, barg1, barg2, barg3, overclock_b) printBiasInfo(ccd_id, 2, stime, fep, dmode, srow, rowcnt, orcmode, dgain, biasalg, barg0, barg1, barg2, barg3, overclock_c) printBiasInfo(ccd_id, 3, stime, fep, dmode, srow, rowcnt, orcmode, dgain, biasalg, barg0, barg1, barg2, barg3, overclock_d) stime_list.append(stime) # #--- now count how many CCDs are used for a particular observations and write out to list_of_ccd_no # countObservation(stime_list)
def createCumulative(year, month, detector, type, arch_dir, i=0): 'create cumulative hrc data for a given year and month' # #--- find the previous period # pyear = year pmonth = month - 1 if pmonth < 1: pmonth = 12 pyear -= 1 syear = str(year) smonth = str(month) if month < 10: smonth = '0' + smonth spyear = str(pyear) spmonth = str(pmonth) if pmonth < 10: spmonth = '0' + spmonth if detector == 'HRC-I': inst = 'HRCI' else: inst = 'HRCS' # #--- set file names # if type == 'center': hrc = inst + '_' + smonth + '_' + syear + '.fits.gz' chrc = inst + '_08_1999_' + spmonth + '_' + spyear + '.fits.gz' chrc2 = inst + '_08_1999_' + smonth + '_' + syear + '.fits' else: hrc = inst + '_' + smonth + '_' + syear + '_' + str(i) + '.fits.gz' chrc = inst + '_09_1999_' + spmonth + '_' + spyear + '_' + str( i) + '.fits.gz' chrc2 = inst + '_09_1999_' + smonth + '_' + syear + '_' + str( i) + '.fits' # #---- if the monthly file exists, reduce the size of the file before combine it into a cumulative data # cdir = arch_dir + '/Month_hrc/' chk = mtac.chkFile(cdir, hrc) #---- checking hrc exisits or not if chk > 0: line = arch_dir + '/Month_hrc/' + hrc + '[opt type=i2,null=-99]' cmd = 'dmcopy infile="' + line + '" outfile="./ztemp.fits" clobber="yes"' os.system(cmd) cmd = 'dmimgcalc infile=' + arch_dir + 'Cumulative_hrc/' + chrc + ' infile2=ztemp.fits outfile =' + chrc2 + ' operation=add clobber=yes' os.system(cmd) os.system('rm ./ztemp.fits') cmd = 'gzip ' + chrc2 os.system(cmd) cmd = 'mv ' + chrc2 + '.gz ' + arch_dir + 'Cumulative_hrc/.' os.system(cmd) # #--- if the monthly fie does not exist, just copy the last month's cumulative data # else: try: cmd = 'cp ' + arch_dir + 'Cumulative_hrc/' + chrc + ' ' + arch_dir + 'Cumulative_hrc/' + chrc2 + '.gz' os.system(cmd) except: pass
def run_test(): """ check the latest count Il count rate and xmm orbital altitude and send out warning if mta_XMM_alert file does not exist. input: none output: /pool1/mta_XMM_alert if it does not exist already warning eamil """ # #--- find the latest 30 mins of l1 average and their time span as there are #--- often slight delay in time in the data aquisition # [l1, start, stop] = l1_median() # #--- read xmm altitude data # [atime, alt] = read_xmm_orbit() # #--- if the altitude of the satellite is lower then "alt_limit" during the time period, #--- condtion is not met; stop the program # height = 0 for i in range(0, len(atime)): if atime[i] < start: continue elif atime[i] > stop: break else: if alt[i] > height: height = alt[i] stime = atime[i] # #--- keep the record # r_time = 0.5 * (start + stop) stime = tcnv.convertCtimeToYdate(r_time) line = str(stime) + ' : ' + str(r_time) + '\t\t' + str(round(l1,1)) + '\t\t' + str(round(height,1)) + '\n' fo = open('./l1_alt_records', 'a') fo.write(line) fo.close() if l1 < l1_limit: exit(1) if height < alt_limit: exit(1) # #--- both conditions are met; check alert file already exists # # #--- keep the record of alert time # keep_record(stime, height, l1) go = 0 # #--- file does not exist # if mcf.chkFile(alert_file) == 0: go = 1 # #--- file was created more than 18 hrs ago. # else: if check_time_span(alert_file, 64800): go = 2 # #--- if the file does not exist or more than 18 hrs past after creating the file, #--- create/recreate the file and also send out a warning email. # if go > 0: # #--- read the last 30 mins of data # f = open('./l1_alt_records', 'r') adata = [line.strip() for line in f.readlines()] f.close() dline = '' dlen = len(adata) for i in range(dlen-6, dlen): dline = dline + adata[i] + '\n' # #--- alt in kkm # chigh = round((height/1000.0), 3) # #--- create email content # line = 'Test threshold crossed, Altitude = ' + str(chigh) + ' kkm with ' #line = line + 'L1 30 min average counts @ ' + str(round(l1,2)) + '.' line = line + 'L1 30 min median counts @ ' + str(round(l1,2)) + '.' line = line + '\n\n\n' line = line + 'Time \t (sec) \t\t L1 cnt Alt\n' line = line + '------------------------------------------------------\n' line = line + dline line = line + '\n\n\n' line = line + 'see:\n\n ' line = line + '\t\thttps://cxc.cfa.harvard.edu/mta/RADIATION/XMM/ ' line = line + '\n\nfor the current condition.\n' fo = open(zspace, 'w') fo.write(line) fo.close() cmd = 'cat ' + zspace + '|mailx -s\"Subject: mta_XMM_alert \n\" [email protected]' os.system(cmd) cmd = 'cat ' + zspace + '|mailx -s\"Subject: mta_XMM_alert \n\" [email protected]' os.system(cmd) cmd = 'rm ' + zspace os.system(cmd) # #--- create/renew alert_file # mcf.rm_file(alert_file) file = alert_file fo = open(file, 'w') fo.close()
def check_date(comp_test=''): """ check wether there is an output directory and if it is not, create one Input: comp_test --- if it is "test", the test data is used Output: uyear --- the current year umon --- the current month mon_name --- the current output direcotry (if it is not there, created) """ start_year = [] start_month = [] start_date = [] end_year = [] end_month = [] end_date = [] tot_ent = 1 if comp_test == 'test': # #--- test case, date is fixed # tyear = 2013 tmon = 2 tday = 13 uyear = tyear umon = tmon else: # #--- find today's date # [uyear, umon, uday, hours, min, sec, weekday, yday, dst] = tcnv.currentTime() tyear = uyear tmon = umon tday = uday end_year.append(tyear) end_month.append(tmon) end_date.append(tday) # #--- check 10 days ago # lday = tday - 10 lmon = tmon lyear = tyear if lday < 1: # #--- if 10 days ago is the last month, set starting time in the last month # tot_ent = 2 start_year.append(tyear) start_month.append(tmon) start_date.append(1) if tmon == 5 or tmon == 7 or tmon == 10 or tmon == 12: lday += 30 lmon = tmon - 1 end_year.append(tyear) end_month.append(lmon) end_date.append(30) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) elif tmon == 2 or tmon == 4 or tmon == 6 or tmon == 8 or tmon == 9 or tmon == 11: lday += 31 lmon = tmon - 1 end_year.append(tyear) end_month.append(lmon) end_date.append(31) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) elif tmon == 3: # #--- last month is in Feb # fday = 28 if tcnv.isLeapYear(tyear) > 0: fday = 29 lday += fday lmon = tmon - 1 end_year.append(tyear) end_month.append(lmon) end_date.append(fday) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) elif tmon == 1: # #--- last month is the year before # lday += 31 lmon = 12 lyear = tyear - 1 end_year.append(tyear) end_month.append(lmon) end_date.append(31) start_year.append(tyear) start_month.append(lmon) start_date.append(lday) else: # #--- 10 days ago is in the same month # start_year.append(lyear) start_month.append(lmon) start_date.append(lday) # #--- reverse the list # start_year.reverse() start_month.reverse() start_date.reverse() end_year.reverse() end_month.reverse() end_date.reverse() # #--- start checking whether directory exists. if not create it # no = 0 for dmon in (start_month): cmonth = tcnv.changeMonthFormat( dmon) #--- convert digit to letter month ucmon = cmonth.upper() mon_name = web_dir + '/' + ucmon + str(start_year[no]) no += 1 chk = mcf.chkFile(mon_name) if chk == 0: cmd = 'mkdir ' + mon_name os.system(cmd) return (uyear, umon, mon_name)