def convert_acistemp_into_c(): """ convert all acistemp fits files in K into that of C input: none, but read from <data_dir>/Acistemp/*fits output: converted fits files in Compaciscent/*fits """ outdir = data_dir + '/Compaciscent/' cmd = 'ls ' + data_dir + '/Acistemp/*fits* > ' + zspace os.system(cmd) fits_list = mcf.read_data_file(zspace, remove=1) for fits in fits_list: atemp = re.split('\/', fits) fname = atemp[-1] btemp = re.split('_', fname) msid = btemp[0] cols = [msid] + bcols flist = pyfits.open(fits) fdata = flist[1].data for col in cols: odata = fdata[col] - 273.15 #--- this is a numpy object fdata[col] = odata flist[1].data = fdata outfile = outdir + fname mcf.rm_files(outfile) flist.writeto(outfile)
def update_eph_l1(): """ update eph L1 related data input: none output: <out_dir>/<msid>_full_data_<year>.fits """ t_file = 'sce1300_full_data_*.fits*' out_dir = deposit_dir + 'Comp_save/Compephkey/' ifile = house_keeping + 'msid_list_ephkey' data = mcf.read_data_file(ifile) msid_list = [] for ent in data: atemp = re.split('\s+', ent) msid_list.append(atemp[0]) [tstart, tstop, year] = ecf.find_data_collecting_period(out_dir, t_file) # #--- update the data # get_data(tstart, tstop, year, msid_list, out_dir) # #--- zip the fits file from the last year at the beginning of the year # ecf.check_zip_possible(out_dir)
def remove_old_reg_file(lev): """ remove old reg files input: lev --- level 1 or 2 output: none """ # #--- set the cut time to 90 days ago # now = time.strftime('%Y:%j:%H:%M:%S', time.gmtime()) now = Chandra.Time.DateTime(now).secs cut = now - 86400 * 90.0 cmd = 'ls ' + cor_dir + 'Lev' + str(lev) + '/Reg_files/* > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) for ifile in data: out = os.path.getmtime(ifile) out = time.strftime('%Y:%j:%H:%M:%S', time.gmtime(out)) comp = Chandra.Time.DateTime(out).secs if comp < cut: cmd = 'rm -rf ' + ifile os.system(cmd)
def binning_data(fname): """ binning the data into 256 bins input: fname --- data file name output: abin --- a list of bin # 0 - 255 out --- a list of the counts of each bin """ data = mcf.read_data_file(fname) out = [0] * 256 fdata = [] for ent in data: fdata.append(int(float(ent))) avg = numpy.mean(fdata) std = numpy.std(fdata) for val in fdata: if val >= 256: continue out[val] += 1 abin = [] for k in range(0, 256): abin.append(k) if str(avg) == 'nan': return False else: return [abin, out, avg, std]
def find_dy_range(): msid_list = house_keeping + 'msid_list_sun_angle' data = mcf.read_data_file(msid_list) line = '' for ent in data: atemp = re.split('\s+', ent) msid = atemp[0] group = atemp[1] fits = data_dir + group.capitalize() + '/' + msid + '_data.fits' fout = pyfits.open(fits) fdata = fout[1].data dout = fdata[msid] bot = numpy.percentile(dout, 2) top = numpy.percentile(dout, 98) diff = top - bot ratio = diff / 120.0 if ratio < 1: ratio = round(ratio, 2) ratio *= 2 else: ratio = round(ratio, 0) ratio *= 3 if ratio < 0.2: ratio = 0.2 btemp = re.split('0.011', ent) line = line + btemp[0] + '\t0.011\t' + str(ratio) + '\n' fo = open('msid_list_sun_angle', 'w') fo.write(line)
def find_data_collecting_period(testdir, testf): """ find data collection time period from the last entry input: testdir --- the directory path to the data testf --- test fits file name output: tstart --- the data collecting starting time in seconds from 1998.1.1 tstop --- the data colleciton stopping time in seconds from 1998.1.1 year --- the year of the file updated """ # #--- find the last entry # cmd = 'ls ' + testdir + '/' + testf + ' > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) test = data[-1] if os.path.isfile(test): f = pyfits.open(test) data = f[1].data f.close() dtime = data['time'] tstart = numpy.max(dtime) else: tstart = 0.0 # #--- find yesterday's date # year = time.strftime("%Y", time.gmtime()) tstop = time.strftime("%Y:%j:00:00:00", time.gmtime()) tstop = Chandra.Time.DateTime(tstop).secs - 86400.0 return [tstart, tstop, year]
def read_ace_data(ifile): """ reading an ACE data file located in a Interrupt Data_dir input: ifile --- data file name output: dofy --- a list of date in ydate elec38 --- a list of elec 38 data elec175 --- a list of elec 175 data proton47 --- a list of proton 47 data proton112 --- a list of proton 112 data proton310 --- a list of porton 310 data proton761 --- a list of proton 761 data proton1060 --- a list of proton 1060 data aniso --- a list of anisotropy data """ ifile = wdata_dir + ifile + '_dat.txt' data = mcf.read_data_file(ifile) dofy = [] elec38 = [] elec175 = [] proton47 = [] proton112 = [] proton310 = [] proton761 = [] proton1060 = [] aniso = [] for ent in data: if ent: atemp = re.split('\s+|\t+', str(ent)) btemp = re.split('\.', str(atemp[0])) if str.isdigit(str(btemp[0])): if atemp[1] and atemp[2] and atemp[3] and atemp[4] \ and atemp[5] and atemp[6] and atemp[7] and atemp[8]: atemp[1] = float(atemp[1]) atemp[2] = float(atemp[2]) atemp[3] = float(atemp[3]) atemp[4] = float(atemp[4]) atemp[5] = float(atemp[5]) atemp[6] = float(atemp[6]) atemp[7] = float(atemp[7]) atemp[8] = float(atemp[8]) for m in range(1, 8): if atemp[m] <= 0: atemp[m] = 1e-5 dofy.append(float(atemp[0])) elec38.append(math.log10(atemp[1])) elec175.append(math.log10(atemp[2])) proton47.append(math.log10(atemp[3])) proton112.append(math.log10(atemp[4])) proton310.append(math.log10(atemp[5])) proton761.append(math.log10(atemp[6])) proton1060.append(math.log10(atemp[7])) aniso.append(atemp[8]) return [dofy, elec38, elec175, proton47, proton112, proton310,proton761, proton1060, aniso]
def remove_old_file(fname, categ): """ remove old backup files input: fname --- a file name categ --- a category of the file. 0: monthly backup / 1: daily backup output: none """ # #--- set cutting date # if categ == 1: cut = now - month_ago else: cut = now - year_ago # #--- find all backed up file names # cmd = 'ls /home/' + user + '/Logs/Past_logs/' + fname + '* > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) for ent in data: # #--- check the file creation time and if the file created before the cutting date, remove # ctime = check_creation_time(ent) if ctime < cut: cmd = 'rm -rf ' + ent os.system(cmd)
def substitue_idx_page(page, dfile, col_no, head, cstart=0): """ substitute slope and std values to index.html page template input: page --- tempalte string dfile --- data file name (without the path) col_no --- numbers of column in the data head --- header of the substituion string cstart --- from where to start the data reading. defalut: 0 output: page --- updated template string """ # #--- data has only one line data with col_no entries #--- first half lists slopes and the last half lists std # ifile = data_dir + dfile data = mcf.read_data_file(ifile) atemp = re.split('\s+', data[0]) for k in range(0, col_no): m = k + cstart try: slope = atemp[m] except: slope = 'nan' try: std = atemp[m + col_no] except: std = 'nan' slp_name = '#' + head.upper() + '_SLP' + str(k) + '#' std_name = '#' + head.upper() + '_STD' + str(k) + '#' page = page.replace(slp_name, slope) page = page.replace(std_name, std) return page
def plot_rej_evt_data(): """ create rejected event data plots input: none, but read from <data_dir>/CCD<ccd#>_rej.dat output: <web_dir>/Plots/ccd<ccd#>_<part>.png """ for ccd in range(0, 10): #print("CCD: " + str(ccd)) ifile = data_dir + 'CCD' + str(ccd) + '_rej.dat' data = mcf.read_data_file(ifile) set1 = [[] for x in range(0, 5)] set2 = [[] for x in range(0, 5)] for dline in data[1:]: ent = re.split('\s+', dline) ytime = mcf.chandratime_to_fraq_year(float(ent[0])) if float(ent[-2]) > 50000: set1[0].append(ytime) set1[1].append(float(ent[1])) set1[2].append(float(ent[3])) set1[3].append(float(ent[7])) set1[4].append(float(ent[9])) else: set2[0].append(ytime) set2[1].append(float(ent[1])) set2[2].append(float(ent[3])) set2[3].append(float(ent[7])) set2[4].append(float(ent[9])) plot_data(set1, ccd, 'cti') plot_data(set2, ccd, 'sci')
def correct_naming(obsid, inst): """ check secondary and analysis directories and correct wrongly named fits and par file input: obsid --- obsid inst --- instrument. either "i" or "s" """ cobsid = str(int(float(obsid))) if len(cobsid) == 5: return lobsid = mcf.add_leading_zero(obsid, 5) for sdir in ['secondary', 'analysis']: cmd = 'ls /data/hrc/' + inst + '/' + lobsid + '/' + sdir + '/hrcf* >' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) for ent in data: atemp = re.split('\/', ent) fname = atemp[-1] mc = re.search(lobsid, fname) if mc is not None: continue else: atemp = re.split('hrcf', fname) btemp = re.split('_', atemp[1]) sobs = btemp[0] new = fname.replace(sobs, lobsid) full = '/data/hrc/' + inst + '/' + lobsid + '/' + sdir + '/' + new cmd = 'mv ' + ent + ' ' + full os.system(cmd)
def read_focal_temp_data(fptemp, outdir): """ read output of find_focal_temp_peaks.py and get focal temp information input: fptemp --- plot output name outdir --- output directory read '/data/mta/Script/Weekly/Data/Focal/focal_temp_list' output: fcnt --- number of peaks observed fdata --- table input """ # #--- read the html table entries # ifile = wdir + '/Data/Focal/focal_temp_list' data = mcf.read_data_file(ifile) fcnt = len(data) fdata = '' for ent in data: fdata = fdata + ent + '\n' # #--- move the plot to an appropriate place # cmd = 'cp ' + wdir + 'Data/Focal/acis_focal_temp.png ' + outdir + fptemp os.system(cmd) return [fcnt, fdata]
def delate_old_file(): """ remove html files older than one day old from Interactive directory input: none, but read from the directory oupupt: none """ # #--- find html files in Interactive directory # cmd = 'ls ' + web_dir + 'Interactive/* > ' + zspace os.system(cmd) dlist = mcf.read_data_file(zspace, remove=1) # #--- set one day ago # cdate = time.time() - 60.0 * 60.0 * 24.0 # #--- remove any files created older than one day ago # if len(dlist) == 0: exit(1) for cfile in dlist: mc = re.search('html', cfile) if mc is not None: ftime = os.path.getmtime(cfile) if ftime < cdate: cmd = 'rm -rf ' + cfile os.system(cmd)
def read_condition(cfile): """ read data extraction condition file and also creates output fits file name input: cfile --- condition file name output: condition --- a list of lists containing column name and the value range fits --- output fits file name """ # #--- read a condition file # ifile = house_keeping + 'Selection_coditions/' + cfile data = mcf.read_data_file(ifile) condition = [] for ent in data: if ent[0] == '#': continue atemp = re.split('=', ent) condition.append(atemp) # #--- create output fits file name # test = str(cfile) test2 = test[-2:] #--- checking the last two character if test2 in ['_1', '_2', '_3']: test = test[:-2] fits = test + '.fits' return [condition, fits]
def find_disk_size(dName): """ this function finds a usage of the given disk Input: dName --- the name of the disk Output: percent -- the percentage of the disk usage """ cmd = 'df -k ' + dName + ' > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=-1) percent = 0 for ent in data: atemp = re.split('\s+|\t+', ent) try: float(atemp[1]) for test in atemp: m = re.search('%', test) if m is not None: btemp = re.split('\%', test) percent = btemp[0] #---- disk capacity in percent (%) except: pass return int(round(float(percent)))
def substitue_year_page(page, dfile, year, col_no, head, cstart=1): """ substitue slope and std on one year page input: page --- template dfile --- data file year --- the year of the page created col_no --- numuber of data column head --- header of substition string cstart --- starting column output: page --- updated tempalte """ ifile = data_dir + dfile data = mcf.read_data_file(ifile) for ent in data: atemp = re.split('\s+', ent) if float(atemp[0]) == year: for k in range(0, col_no): m = k + cstart slope = atemp[m] std = atemp[m + col_no] slp_name = '#' + head.upper() + '_SLP' + str(k) + '#' std_name = '#' + head.upper() + '_STD' + str(k) + '#' page = page.replace(slp_name, slope) page = page.replace(std_name, std) break return page
def create_table_section(ctype, btype, title): """ create bad pixel table entry input: ctype --- ccd, hccd, or col btye --- new, warm of flick title --- the title of the section output: line --- the content of the section """ line = '<tr style="text-align:center"><td>' + title + '</td>\n' for ccd in range(0, 10): ifile = "/data/mta/Script/ACIS/Bad_pixels/Data/" + ctype + str( ccd) + "_information" data = mcf.read_data_file(ifile) save = [] for out in data: mc = re.search(btype, out) if mc is not None: atemp = re.split('\s+', out) for ent in atemp[1:]: mc2 = re.search(':', ent) if mc2 is not None: continue save.append(ent) line = line + '<!-- ccd' + str(ccd) + ' -->\n' line = line + '<td>\n' if len(save) > 0: for ent in save: line = line + ent + '\n' else: line = line + ' \n' line = line + '</td>\n' line = line + '</tr>\n\n' return line
def substitue_slot_stat(template, ifile, dyear, dmon, sname, col_start=2): """ substitute slope and std of magnitue tables of monthly page input: template --- template ifile --- input file name dyear --- year of the page dmon --- month of the page sname --- header of the substition string col_starts --- starting column postion output: template --- updated template """ data = mcf.read_data_file(ifile) for ent in data: atemp = re.split('\s+', ent) btemp = re.split(':', atemp[1]) year = int(float(btemp[0])) mon = int(float(btemp[1])) if year == dyear and mon == dmon: for k in range(0, 8): slope = atemp[k + col_start] if slope == '-999.0': slope = 'nan' std = atemp[k + col_start + 8] if std == '-999.0': std = 'nan' slope_name = '#' + sname + '_SLP' + str(k) + '#' std_name = '#' + sname + '_STD' + str(k) + '#' template = template.replace(slope_name, slope) template = template.replace(std_name, std) break return template
def plot_solar_panel_data(): """ run plotting routine for all angle intervals input: none but read data from <data_dir> output: <web_dir>/Plots/<Msid>/msid_angle<angle>.png """ # #--- plot each angle interval data # for angle in angle_list: print("Processing ANGLE:" + str(angle)) # #--- read data # dfile = data_dir + 'solar_panel_angle_' + str(angle) our = mcf.read_data_file(dfile) data = [] for ent in data: if ent[0] == '#': continue else: data.append(ent) if len(data) < 2: exit(1) # #--- plot time trend of each msid # plot_each_msid(data, angle=angle) # #--- plot ysada temp vs elbi # plot_sada_elbi(data, angle)
def substitue_mag_stat(template, ifile, dyear, dmon): """ substitute slope and std of slot tables of monthly page input: template --- template ifile --- input file name dyear --- year of the page dmon --- month of the page output: template --- updated template """ data = mcf.read_data_file(ifile) for ent in data: atemp = re.split('\s+', ent) btemp = re.split(':', atemp[0]) year = int(float(btemp[0])) mon = int(float(btemp[1])) if year == dyear and mon == dmon: for k in range(1, 15): slope = atemp[k] if slope == '-999.0': slope = 'nan' std = atemp[k + 14] if std == '-999.0': std = 'nan' slope_name = '#MAG_SLP' + str(k) + '#' std_name = '#MAG_STD' + str(k) + '#' template = template.replace(slope_name, slope) template = template.replace(std_name, std) break return template
def removeDuplicated(ifile): """ remove duplicated rows from the file Input: file --- a file name of the data Output: file --- cleaned up data """ data = mcf.read_data_file(ifile) if len(data) > 0: first = data.pop(0) new = [first] for ent in data: chk = 0 for comp in new: if ent == comp: chk = 1 break if chk == 0: new.append(ent) # #--- now print out the cleaned up data # with open(ifile, 'w') as f: for ent in new: line = ent + '\n' f.write(line)
def convert_to_coordinates(ra, dec, evt, ctype): """ convert ra dec to sky, det, or chip coordinates input: ra --- ra in degree dec --- dec in degree evt --- evt1 file ctype --- sky/det/chip output: [newx, newy] """ cmd = 'dmcoords ' + evt + ' opt=cel ra=' + str(ra) + ' dec=' cmd = cmd + str(dec) + ' verbose=1 > ' + zspace os.system(cmd) newx= '' newy= '' # #--- extract sky coordindats # info = mcf.read_data_file(zspace, remove=1) info.reverse() for ent in info: if ctype == 'sky': mc = re.search('SKY\(X,Y\)', ent) elif ctype == 'det': mc = re.search('DETX,DETY', ent) elif ctype == 'chip': mc = re.search('CHIP', ent) if mc is not None: atemp = re.split('\s+', ent) newx = float(atemp[1]) newy = float(atemp[2]) return [newx, newy]
def hrc_i_param(start): """ set parameters for hrc i case input: start --- start time in second from 1998.1.1 output: [ampsatfile, ampsfcorfile, badpixfile, degapfile, evtflatfile, hypfile, obsfile, tapfile, gainfile] """ ampsatfile = calib_dir + 'sattest/hrciD1999-07-22sattestN0002.fits' ampsfcorfile = calib_dir + 'amp_sf_cor/hrciD1999-07-22amp_sf_corN0001.fits' badpixfile = house_keeping + 'hrcf10702_000N001_bpix1.fits' degapfile = calib_dir + 'gaplookup/hrciD1999-07-22gaplookupN0004.fits' evtflatfile = calib_dir + 'eftest/hrciD1999-07-22eftestN0001.fits' hypfile = calib_dir + 'fptest/hrciD1999-07-22fptestN0003.fits' tapfile = calib_dir + 'tapringtest/hrciD1999-07-22tapringN0002.fits' obsfile = house_keeping + 'obs.par' # #--- read gain selection list # infile = house_keeping + 'Gain_files/gain_selection' data = mcf.read_data_file(infile) # #--- select gain file name # for ent in data: atemp = re.split('\s+', ent) begin = int(float(atemp[0])) end = int(float(atemp[1])) if start >= begin and start < end: gainfile = calib_dir + 'gmap/' + atemp[2] break return [ampsatfile, ampsfcorfile, badpixfile, degapfile, evtflatfile, hypfile,\ obsfile, tapfile, gainfile]
def find_processed_data(inst): """ find the hrc obsids which are already re-processed input: inst --- instrument designation: "i" or "s" output: out --- a list of obsids """ if inst == 'i': data_dir = '/data/hrc/i/' else: data_dir = '/data/hrc/s/' cmd = 'ls -d ' + data_dir + '/* > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) out = [] for ent in data: atemp = re.split('\/', ent) try: val = int(float(atemp[-1])) except: continue if mcf.is_neumeric(val): out.append(val) # #--- remove duplicate # oset = set(out) out = list(oset) return out
def extract_sim_position(year, period_start, period_end): """ extract sim position information from comprehensive_data_summary data file input: year --- year (in form of 2012) period_start --- start time in seconds from 1.1.1998 period_end --- stop time in seconds from 1.1.1998 output: time --- (seconds from 1.1.1998) sim_position """ sim_time = [] sim_pos = [] ifile = mj_dir + '/comprehensive_data_summary' + str(year) data = mcf.read_data_file(ifile) for ent in data: atemp = re.split('\s+|\t+', ent) try: tinsec = mcf.convert_date_format(atemp[0], ofmt='chandra') except: continue if tinsec >= period_start and tinsec < period_end: sim_time.append(float(tinsec)) sim_pos.append(float(atemp[1])) return [sim_time, sim_pos]
def diskCapacity(diskName): """ find a disk capacity input: diskName --- name of the disk output: disk_capacity --- disk capacity """ cmd = 'df -k ' + diskName + '> ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) disk_capacity = 'na' for ent in data: try: atemp = re.split('\s+', ent) val = float(atemp[1]) for test in atemp: m = re.search('%', test) if m is not None: disk_capacity = val break except: pass return disk_capacity
def find_recently_created_file(path, fname, days, etime): """ find recently created files input: path --- path to the file with fname fname --- a file name days --- a time limit in days output: save --- a list of files with full paths """ cut = etime - days * 86400 cmd = 'ls ' + path + '/' + fname + '> ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) t_save = [] d_dict = {} for ent in data: ltime = time.strftime("%Y:%j:%H:%M:%S", time.gmtime(os.path.getctime(ent))) stime = int(Chandra.Time.DateTime(ltime).secs) if stime < cut: continue elif stime > etime: break else: t_save.append(stime) d_dict[stime] = ent t_save = sorted(t_save) save = [] for stime in t_save: save.append(d_dict[stime]) return save
def read_fitting_results(infile): """ read the file given and make a list of slope with the error input: infile --- the data file name output: s_list --- a list of <slope>+/-<error> """ try: data = mcf.read_data_file(infile) except: data = [] s_list1 = [] s_list2 = [] for ent in data: atemp = re.split('\s+', ent) if len(atemp) == 6: slope = atemp[1] + '+/-' + atemp[2] slope2 = atemp[4] + '+/-' + atemp[5] else: slope = atemp[2] + '+/-' + atemp[3] slope2 = atemp[5] + '+/-' + atemp[6] s_list1.append(slope) s_list2.append(slope2) return [s_list1, s_list2]
def find_the_last_data_created_date(): """ find the data of the last created fits data file. input: none output: year --- year mon --- momth day --- day of the month """ cmd = 'ls /data/aschrc1/GENHRC/RAW/HRC_HK0/hrc_hk0*fits* ' cmd = cmd + '/data/aschrc1/GENHRC/RAW/HRC_HK0/*/hrc_hk0*fits* >' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) date = [] for ent in data: atemp = re.split('hrc_hk0_', ent) btemp = re.split('\.fits', atemp[1]) date.append(btemp[0]) date.sort() lent = str(date[-1]) year = lent[0] + lent[1] + lent[2] + lent[3] year = int(float(year)) mon = lent[4] + lent[5] mon = int(float(mon)) day = lent[6] + lent[7] day = int(float(day)) return [year, mon, day]
def update_bias_html(): """ pdate bias_home.html page input: None but read from: <house_keeping>/bias_home.html output: <web_dir>/bias_home.html """ # #--- line to replace # newdate = "Last Upate: " + time.strftime("%m/%d/%Y", time.gmtime()) # #--- read the template # ifile = house_keeping + 'bias_home.html' data = mcf.read_data_file(ifile) # #--- print out # outfile = web_dir + 'bias_home.html' with open(outfile, 'w') as fo: for ent in data: m = re.search('Last Update', ent) if m is not None: fo.write(newdate + '\n') else: fo.write(ent + '\n')