def update_html(update): """ check whether the update is needed and if so, run the update input: update --- if it is 1, run the update without chekcing the file exist or not output: none, but updated html pages (in <web_dir>) """ # #--- find today's date # today = time.localtime() year = today.tm_year # #--- if update is asked, just run the update # if update > 0: run_update(year) # #--- otherwise, find the last update, and if needed, run the update # else: cmd = 'ls ' + web_dir +'*.html > ' + zspace os.system(cmd) f = open(zspace, 'r') out = f.read() f.close() # #--- chekcing the file existance (looking for year in the file name) # mcf.rm_file(zspace) mc = re.search(str(year), out) if mc is None: run_update(year)
def extract_data_and_combine(time_list, detector, level, filetype, name, fcol_line=''): """ extract fits files in the time span(s) and combine all of them to one fits file input: time_list --- a list of lists [[start_time>],[<stop_time>]] detector --- detector name (e.g. hrc) level --- level (e.g. 0, 1) filetype --- file type (e.g. hrcss, hrchk) name --- name of the resulted combined fits file output: name --- resulted fits file """ # #--- extreact files # fits_list = extract_fits_files(time_list, detector, level, filetype) # #--- combine fits file to one # hcf.combine_fits_files(fits_list, name, azip=0, fcol_line=fcol_line) # #--- remove indivisual fits files # for ent in fits_list: mcf.rm_file(ent)
def extract_cron_file_name(): """ extract cron error message file names for the current user/machine output: cron_file_name: a list of cron file names (file names only no directory path) """ try: cmd = 'crontab -l >' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) except: exit(1) cron_file_name = [] for ent in data: m = re.search('Logs', ent) if m is not None and ent[0] != '#': atemp = re.split('Logs/', ent) btemp = re.split('2>&1', atemp[1]) cron = btemp[0] cron = cron.strip() cron_file_name.append(cron) # #--- removing duplicated lines # cron_file_name = list(set(cron_file_name)) return cron_file_name
def combine_fits(flist, outname): """ combine fits files in the list input: flist --- a list of fits file names outname --- a outputfits file name output: outname --- a combined fits file """ mcf.rm_file(outname) cmd = 'mv ' + flist[0] + ' ' + outname os.system(cmd) for k in range(1, len(flist)): try: mfits.appendFitsTable(outname, flist[k], 'temp.fits') except: continue cmd = 'mv temp.fits ' + outname os.system(cmd) cmd = 'rm -f ' + flist[k] os.system(cmd) cmd = 'rm -rf *fits.gz' os.system(cmd) return outname
def extract_data(start, stop): """ extract data to compute HRMA focus plots input: start ---- start time in the foramt of mm/dd/yy (e.g. 05/01/15) stio ---- stop time in the format of mm/dd/yy output: acis*evt2.fits.gz, hrc*evt2.fits.gz """ # #--- check whether previous fits files are still around, and if so, remove them # cmd = 'ls * > ' + zspace os.system(cmd) f = open(zspace, 'r') chk = f.read(10000000) f.close() mcf.rm_file(zspace) mc = re.search('fits', chk) if mc is not None: cmd = 'rm *fits*' os.system(cmd) # #--- if time interval is not given, set for a month interval # if start == '': [start, stop] = set_interval() # #--- extract acis and hrc evt2 files # inst = 'acis' run_arc(inst, start, stop) inst = 'hrc' run_arc(inst, start, stop)
def test_combine_evt1_files(self): # #--- make a list of all evt1 file of the year # year = 2000 hdir = 'hrc_i_115' e_list = get_evt1_list(year, hdir) if len(e_list) == 0: print "Something wrong in getting files" # #--- combined all evt1 files of the year # oname = 'test_combined_evt1.fits' hcf.combine_fits_files(e_list, oname) tstart = hcf.read_header_value(oname, 'tstart') tstop = hcf.read_header_value(oname, 'tstop') tstart = int(float(tstart)) tstop = int(float(tstop)) self.assertEquals(tstart, 65961070) self.assertEquals(tstop, 93190294) mcf.rm_file(oname)
def mv_old_file(dom): dom -= 30 if dom > 0: [year, ydate] = tcnv.DOMtoYdate(dom) sydate = str(ydate) if ydate < 10: sydate = '00' + sydate elif ydate < 100: sydate = '0' + sydate atime = str(year) + ':' + sydate + ':00:00:00' stime = tcnv.axTimeMTA(atime) cmd = 'ls ' + house_keeping + '/Defect/CCD*/* > ' + zspace os.system(cmd) fs = open(zspace, 'r') ldata = [line.strip() for line in fs.readlines()] fs.close() mcf.rm_file(zspace) for ent in ldata: atemp = re.split('\/acis', ent) btemp = re.split('_', atemp[1]) if int(btemp[0]) < stime: out = ent out = out.replace('Defect', 'Defect/Save') cmd = 'mv ' + ent + ' ' + out os.system(cmd)
def run_arc(inst, start, stop): """ run arc4gl and extract evt2 data for "inst" input: inst --- instrument, acis or hrc start --- interval start time in format of mm/dd/yy (e.g. 05/01/15) stop --- interval stop time in format of mm/dd/yy """ line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=' + inst + '\n' line = line + 'level=2\n' line = line + 'filetype=evt2\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 # #--- run arc4gl # bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def selectTableData(file, colname, condition, outname, extension=1, clobber='no'): """ select data for a given colname and the condition and create a new table fits file Input: file --- input table fits file colname --- column name condition--- contidion of the data selection if the selection is the interval, the format is in <start>:<stop> if it is equal use: ==<value> if it is not equal: !=<value> outname --- output file name clobber --- overwrite the file if exists. if not given, 'no' """ m1 = re.search('y', clobber) m2 = re.search('Y', clobber) if (m1 is not None) or (m2 is not None): mcf.rm_file(outname) t = fits.open(file) tdata = t[extension].data mc = re.search(':', condition) mc1 = re.search('\!', condition) chk = 0 if mc is not None: atemp = re.split(':', condition) start = float(atemp[0]) stop = float(atemp[1]) mask = tdata.field(colname) >= start modt = tdata[mask] mask = modt.field(colname) <= stop modt2 = modt[mask] elif mc1 is not None: condition = condition.replace('!=', "") mask = tdata.field(colname) != condition modt2 = tdata[mask] else: condition = condition.replace('==', "") if (isinstance(condition, float)) or (isinstance(condition, int)): mask = tdata.field(colname) == condition modt2 = tdata[mask] else: data = select_data_with_logical_mask(tbdata, colname, condition) chk = 1 header = fits.getheader(file) if chk == 0: data = fits.BinTableHDU(modt2, header) data.writeto(outname)
def fitsImgSection(file, x1, x2, y1, y2, outname, extension=0, clobber='no'): """ extract a x by y section of fits image file Input: file --- input fits image file name x1, x2 --- x range y1, y2 --- y rnage outname --- output fits image name extension --- extension #. default = 0 clobber --- clobber or not. default = no Output: outname --- fits image file of size x by y """ m1 = re.search('y', clobber) m2 = re.search('Y', clobber) if (m1 is not None) or (m2 is not None): mcf.rm_file(outname) t = fits.open(file) data = t[extension].data xsize = abs(x2 - x1) ysize = abs(y2 - y1) output = numpy.matrix(numpy.zeros([ysize, xsize])) for x in range(x1, x2): for y in range(y1, y2): newx = x - x1 newy = y - y1 output[newy, newx] = data[y, x] header = fits.getheader(file) fits.writeto(outname, output, header) t.close()
def run_arc4gl(start, stop, operation='retrieve', dataset='flight', detector='telem', level='raw'): """ extract data from archive using arc4gl input: start --- starting time in the format of mm/dd/yy,hh/mm/ss. hh/mm/ss is optional stop --- stoping time operation --- operation command. default = retrieve dataset --- dataset name. default = flight detector --- detector name default = telem level --- level defalut = raw output: extracted data set """ # #--- write arc4gl command # line = 'operation = ' + operation + '\n' line = line + 'dataset = ' + dataset + '\n' line = line + 'detector = ' + detector + '\n' line = line + 'level = ' + level + '\n' line = line + 'tstart=' + str(start) + '\n' line = line + 'tstop=' + str(stop) + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() # #--- run arc4gl # cmd1 = '/usr/bin/env PERL5LIB=""' cmd2 = ' echo ' + hakama + '|arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def get_dump_em_files(start, stop): """ extract Dump_EM files from archive input: start --- start time in format of mm/dd/yy stop --- stop time in format of mm/dd/yy output: *Dump_EM* data in ./EM_data directory data --- return data lists """ # #--- get data from archive # run_arc4gl(start, stop) # #--- move the data to EM_Data directory and return the list of the data extracted # try: cmd = 'mv *Dump_EM* ' + exc_dir + 'EM_Data/.' os.system(cmd) cmd = 'gzip -d ' + exc_dir + 'EM_Data/*gz' os.system(cmd) cmd = 'ls ' + exc_dir + 'EM_Data/*Dump_EM*sto > ' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) except: data = [] return data
def get_data(msid, start, stop): """ extract data for the given data period input: msid --- msid start --- starting time in seconds from 1998.1.1 stop --- stopping time in seconds from 1998.1.1 output: data --- a list of msid values """ # #--- extract data with dataseeker # try: ecf.data_seeker(start, stop, msid) [col, tbdata] = ecf.read_fits_file('temp_out.fits') mcf.rm_file('temp_out.fits') except: return [] time = tbdata.field('time') # #--- if the dataseeker's data is not filled for the given data period #--- stop any farther data proccess # if time[-1] < 0.95 * stop: data = [] else: try: name = msid + '_avg' data = tbdata.field(name) except: data = [] return data
def find_tscpos_positive_period(start, stop): """ find time periods of tscpos is located in positive position input: start --- starting time in mm/dd/yy,hh:mm:ss stop --- stopping time in mm/dd/yy,hh:mm:ss output: positive_period a list of lists containing a list of time of period starting and a list of time of period ending. both in seconds from 1998.1.1 """ # #--- extract sim data # dlist = hcf.run_arc5gl('retrieve', start, stop, detector='sim', level='0', filetype='sim') # #--- find time period when tscpos is in positive side # positive_periods = create_data_period(start, stop, dlist, colname='TSCPOS', lgc='>=', val=0) # #-- clean the output directory # for ent in dlist: mcf.rm_file(ent) return positive_periods
def run_lephem(fname, time): """ run idl script lephem.pro which convert the data into ascii data input: fname --- the name of the input file: DE<time>.EPH time --- time stamp of the file copied output: ascii data file -- DE<time>.EPH.dat0 """ cmd = 'cp ' + fname + ' ' + eph_data + '/.' os.system(cmd) cname = 'DE' + str(time) + '.EPH' try: line = "lephem,'" + str(cname) + "'\n" line = line + "exit\n" fo = open('./eph_run.pro', 'w') fo.write(line) fo.close() os.environ['IDL_PATH'] = idl_path subprocess.call("idl eph_run.pro", shell=True) mcf.rm_file('./eph_run.pro') return 1 except: return 0
def extract_acis_evt1(start, stop): """ extract acis evt1 files input: start --- start time in the format of mm/dd/yy (e.g. 05/01/15) stop --- sop time in the format of mm/dd/yy output: acisf*evt1.fits.gz """ # #--- write required arc4gl command # line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' # line = line + 'version=last\n' line = line + 'filetype=evt1\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def run_focal_temp_data(start, stop): """ run focal temp script and create a plot, read a table input: start --- start time in seconds from 1998.1.1 stop --- stop time in seconds from 1998.1.1 output: fcnt --- number of peaks observed fdata --- table input """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' /usr/local/bin/perl ' + wdir + 'get_ftemp_data.perl ' + str(start) + ' ' + str(stop) cmd = cmd1 + cmd2 # #--- run the focal temp script to extract data # bash(cmd, env=ascdsenv) mcf.rm_file('./test') cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' idl ./run_temp > out' cmd = cmd1 + cmd2 # #--- run the focal temp script to create a plot # bash(cmd, env=ascdsenv2) cmd = 'rm -rf ./*fits ' os.system(cmd)
def find_entries(dir): """ create list of fies/directory under the "dir" ipnt: dir --- directory name under /data/mta/www/mta_script_list/ output: data --- a list of files and directories """ try: cmd = 'ls -d /data/mta/www/mta_script_list/' + dir + '/* >' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) except: data = [] if len(data) > 0: cleaned = [] for ent in data: mc = re.search('\~', ent) if mc is not None: continue else: cleaned.append(ent) data = cleaned return data
def check_new_data(new_data, new_dict): """ check whether new observations are added to the list and send email to admin input: new_data --- a list of updated new_obs_list new_dict --- a dictionary of the previous new_obs_list info output: email sent to adim """ save = [] for ent in new_data: atemp = re.split('\s+', ent) obsid = int(float(atemp[2])) try: test = new_dict[obsid] except: save.append(ent) if len(save) > 0: line = '' for ent in save: line = line + ent + '\n' fo = open(zspace, 'w') fo.write(line) fo.close() cmd = 'cat ' + zspace + '| mailx -s "Subject:TEST!! TEST !! New Observation(s)" [email protected]' os.system(cmd) mcf.rm_file(zspace)
def find_two_sigma_value(fits): # #-- make histgram # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimghist infile=' + fits + ' outfile=outfile.fits hist=1::1 strict=yes clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmlist infile=outfile.fits outfile=' + zspace + ' opt=data' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f= open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) # #--- read bin # and its count rate # hbin = [] hcnt = [] vsum = 0 for ent in data: atemp = re.split('\s+|\t+', ent) if mcf.chkNumeric(atemp[0]): hbin.append(float(atemp[1])) val = int(atemp[4]) hcnt.append(val) vsum += val # #--- checking one sigma and two sigma counts # if len(hbin) > 0: v68= int(0.68 * vsum) v95= int(0.95 * vsum) v99= int(0.997 * vsum) sigma1 = -999 sigma2 = -999 sigma3 = -999 acc= 0 for i in range(0, len(hbin)): acc += hcnt[i] if acc > v68 and sigma1 < 0: sigma1 = hbin[i] elif acc > v95 and sigma2 < 0: sigma2 = hbin[i] elif acc > v99 and sigma3 < 0: sigma3 = hbin[i] break return (sigma1, sigma2, sigma3) else: return(0, 0, 0)
def acis_dose_test_run(): """ test ska shell access """ start = '05/07/15,00:00:00' stop = '05/15/15,00:00:00' line = 'operation=browse\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' line = line + 'filetype=evt1\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open('./zspace', 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i./zspace > ./zout' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mtac.rm_file('./zspace') f = open('./zout', 'r') fitsList = [line.strip() for line in f.readlines()] f.close() mtac.rm_file('./zout') for ent in fitsList: print ent
def print_cti_results(out_type, elm, ccd, content): """ print out selected/corrected cti data to an appropriate file Input: out_type --- directory name under <data_dir> elm --- the name of element (al, mn, ti) ccd --- ccd # content --- a table list. each line is already terminated by "\n" Output: <data_dir>/<out_type>/<elm>_ccd<ccd#> """ file = data_dir + '/' + out_type + '/' + elm + '_ccd' + str(ccd) mcf.rm_file(file) f = open(file, 'w') for ent in content: f.write(ent) # #--- just in a case, the line is not terminated by '\n', add it # chk = re.search('\n', ent) if chk is None: f.write('\n') f.close()
def notify_new_gratings_obs(): """ send email notification to admin when new gratings observations are found input: none output: email to admin """ ifile = data_dir + 'obslist~' odata = mcf.read_data_file(ifile) ifile = data_dir + 'obslist' ndata = mcf.read_data_file(ifile) diff = set(ndata) - set(odata) if len(diff) > 0: line = 'New Gratings Observations\n\n' for ent in diff: line = line + ent + '\n' with open(zspace, 'w') as fo: fo.write(line) cmd = 'cat ' + zspace + '|mailx -s \"Subject: New Gratings Observations\n\" ' + admin os.system(cmd) mcf.rm_file(zspace)
def remove_old_data(fits, cols, cut): """ remove the data older the cut time input: fits --- fits file name cols --- a list of column names cut --- cut time in seconds from 1998.1.1 output: updated fits file """ f = pyfits.open(fits) data = f[1].data f.close() # #--- find where the cut time # pos = 0 dtime = list(data['time']) for k in range(0, len(dtime)): if dtime[k] >= cut: pos = k break # #--- remove the data before the cut time # udata = [] for k in range(0, len(cols)): udata.append(list(data[cols[k]][pos:])) mcf.rm_file(fits) create_fits_file(fits, cols, udata)
def update_fits_file(fits, cols, cdata): """ update fits file input: fits--- fits file name cols--- a list of column names cdata --- a list of lists of data values output: updated fits file """ # #--- if the fits file exists, append the new data # if os.path.isfile(fits): f = pyfits.open(fits) data = f[1].data f.close() udata= [] for k in range(0, len(cols)): nlist = list(data[cols[k]]) + list(cdata[k]) udata.append(nlist) mcf.rm_file(fits) # #--- if the fits file does not exist, create one # else: udata = cdata create_fits_file(fits, cols, udata)
def send_email(address, subject, content, cc = '', submitter=''): """ sending out email input: address --- email address. it can be multiple addresses, delimiated by ','. subject --- head line of the email content --- email content cc --- cc email address; default: <blank> (no cc address) outout: email sent out """ # #--- if this is a test, all email is sent to the submitter # if test == 'yes': if submitter == '': address = admin else: address = oda.get_email_address(submitter) fo = open(zspace, 'w') fo.write(content) fo.close() if cc == '': #cmd = 'cat ' + zspace + ' |mailx -s "Subject: ' + subject + '" ' + ' -b ' + admin + ' ' + address cmd = 'cat ' + zspace + ' |mailx -s "Subject (TEST!!): ' + subject + '" ' + address else: #cmd = 'cat ' + zspace + ' |mailx -s "Subject: ' + subject + '" ' + ' -c' + cc + ' -b ' + admin + ' ' + address cmd = 'cat ' + zspace + ' |mailx -s "Subject (TEST!!): ' + subject + '" ' + ' -c' + cc + ' ' + address os.system(cmd) mcf.rm_file(zspace)
def find_last_entry(dir, tail): """ for a given file name (with a full path), extract date information input: dir --- the name of directory where the data are kept tail --- a suffix of the data file output: [year, mon, day] """ # #--- find the last file created # cmd = 'ls ' + dir + '/*' + tail + '| tail -1 > ' + zspace os.system(cmd) file = open(zspace, 'r').read() mcf.rm_file(zspace) # #--- extract time part. assume that the file end <tail> and the time part is prepended to it # atemp = re.split('/', file) for ent in atemp: chk = re.search(tail, ent) if chk is not None: btemp = re.split('_', ent) ldate = btemp[0] break # #--- assume that the time is in the format of yyyymmdd, e.g. 20140515 # year = int(float(ldate[0:4])) mon = int(float(ldate[4:6])) day = int(float(ldate[6:8])) return [year, mon, day]
def run_bad_pix_and_photon(outdir): """ run bad pixel table script and photon table scrit input: outdir --- output directory name output: files in outdir: bad_pix_list, photons """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = 'perl /data/mta4/MTA/bin/weekly_obs2html.pl 8 photons' cmd = cmd1 + cmd2 # #--- run the phonton script # bash(cmd, env=ascdsenv) mcf.rm_file(zspace) cmd2 = 'perl ' + tdir + 'read_bad_pix_new.perl' cmd = cmd1 + cmd2 # #--- run the bad pixel script # bash(cmd, env=ascdsenv) mcf.rm_file(zspace) cmd = 'mv photons bad_pix_list ' + outdir os.system(cmd)
def find_the_last_data_created_date(): """ find the data of the last created fits data file. input: none output: year --- year mon --- momth day --- day of the month """ cmd = 'ls /data/aschrc1/GENHRC/RAW/HRC_ENG/hrc_rates*fits* >' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) date = [] for ent in data: atemp = re.split('hrc_rates_', ent) btemp = re.split('\.fits', atemp[1]) date.append(btemp[0]) date.sort() lent = str(date[-1]) year = lent[0] + lent[1] + lent[2] + lent[3] year = int(float(year)) mon = lent[4] + lent[5] mon = int(float(mon)) day = lent[6] + lent[7] day = int(float(day)) return [year, mon, day]
def read_orbit_data(tstart, tstop): """ read altitude and sun angle data input: tstart --- starting time in seconds from 1998.1.1 tstop --- stopping time in seconds from 1998.1.1 output: data --- a list of lists of [time alt, sun_angle] """ # #--- set up the input for dataseeker and extract the data # fits = 'dataseek_avg.fits' cmd = 'touch test' os.system(cmd) cmd1 = '/usr/bin/env PERL5LIB= ' cmd2 = " dataseeker.pl infile=test outfile=" + fits + " " cmd2 = cmd2 + "search_crit='columns=pt_suncent_ang,sc_altitude timestart=" + str( tstart) cmd2 = cmd2 + " timestop=" + str(tstop) + "' loginFile=" + lfile cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- read fits file and extract the data # cols = ['time', 'sc_altitude', 'pt_suncent_ang'] data = read_fits_data(fits, cols) # #--- clean up # mcf.rm_file(fits) mcf.rm_file('test') return data
def read_data_file(ifile, sep='', remove=0, c_len=0): """ read ascii data file input: ifile --- file name sep --- split indicator: default: '' --- not splitting remove --- indicator whether to remove the file after reading: default: 0 --- no c_len --- numbers of columns to be read. col=0 to col= c_len. default: 0 --- read all output: data --- a list of lines or a list of lists """ data = mcf.read_data_file(ifile) if remove > 0: mcf.rm_file(ifile) if sep != '': atemp = re.split(sep, data[0]) if c_len == 0: c_len = len(atemp) save = [] for k in range(0, c_len): save.append([]) for ent in data: atemp = re.split(sep, ent) for k in range(0, c_len): try: save[k].append(float(atemp[k])) except: save[k].append(atemp[k]) return save else: return data
def run_kplookup(time): """ run idl script kplookup.pro which adjusts the data for the solar wind influence input: time --- time in the format of <yy><ydate> output: DE<time>.EPH.dat0 updated for the soloar wind influence """ xxx = 999 if xxx == 999: # try: line = "kplookup,'/data/mta/Script/Ephem/EPH_Data/DE" + str( time) + ".EPH.dat0'" + "\n" line = line + "exit\n" fo = open('./kp_run.pro', 'w') fo.write(line) fo.close() os.environ['IDL_PATH'] = idl_path subprocess.call("idl kp_run.pro", shell=True) cmd = 'idl kp_run.pro' os.system(cmd) mcf.rm_file('./kp_run.pro') return 1
def selectTableData(file, colname, interval, outname, extension = 1, clobber='no'): """ select data for a given colname and the condition and create a new table fits file Input: file --- input table fits file colname --- column name interval --- the data interval inthe forma of <start>:<stop> outname --- output file name clobber --- overwrite the file if exists. if not given, 'no' """ m1 = re.search('y', clobber) m2 = re.search('Y', clobber) if (m1 is not None) or (m2 is not None): mcf.rm_file(outname) t = fits.open(file) tdata = t[extension].data atemp = re.split(':', interval) start = float(atemp[0]) stop = float(atemp[1]) mask = tdata.field(colname) >= start modt = tdata[mask] mask = modt.field(colname) <= stop modt2 = modt[mask] header = fits.getheader(file) data = fits.BinTableHDU(modt2, header) data.writeto(outname)
def get_cti_data(test=''): """ get cti data input: none output: cti_data.txt """ # #--- get all data # cmd = 'cat /data/mta/Script/ACIS/CTI/Data/Det_Data_adjust/mn_ccd* > ' + zspace os.system(cmd) # #--- check the previous data file exists. if so, move to save. # if os.path.isfile('./cti_data.txt') and test == '': cmd = 'mv -f cti_data.txt cti_data.txt~' os.system(cmd) # #--- sort and select out data # cmd = 'sort -u -k 1,1 ' + zspace + ' -o cti_data.txt' os.system(cmd) mcf.rm_file(zspace) # #--- change the permission # cmd = 'chmod 755 cti_data.txt' os.system(cmd) cmd = 'mv -f ./cti_data.txt /data/mta4/www/DAILY/mta_rad/' os.system(cmd)
def get_sca_data(): # # NOTE: sca00 is not updated anymore and discoutinued. # """ extract ephsca.fits data file from dataseeker input: none output: ephsca.fits """ # #--- create an empty "test" file # mcf.rm_file('./test') fo = open('./test', 'w') fo.close() # #--- and run dataseeker # cmd1 = '/usr/bin/env PERL5LIB=' cmd2 = ' dataseeker.pl infile=test outfile=ephsca.fits search_crit="columns=_sca00_avg" ' cmd3 = 'clobber=yes loginFile=/home/mta/loginfile' cmd = cmd1 + cmd2 + cmd3 bash(cmd, env=ascdsenv) mcf.rm_file('./test') cmd = 'mv -f ephsca.fits /data/mta4/www/DAILY/mta_rad/.' os.system(cmd)
def convert_acistemp_into_c(): """ convert all acistemp fits files in K into that of C input: none, but read from <data_dir>/Acistemp/*fits output: converted fits files in Compaciscent/*fits """ outdir = data_dir2 + '/Compaciscent/' cmd = 'ls ' + data_dir + '/Acistemp/*fits* > ' + zspace os.system(cmd) fits_list = mcf.readFile(zspace) mcf.rm_file(zspace) for fits in fits_list: atemp = re.split('\/', fits) fname = atemp[-1] btemp = re.split('_', fname) msid = btemp[0] cols = [msid] + bcols flist = pyfits.open(fits) fdata = flist[1].data for col in cols: odata = fdata[col] - 273.15 #--- this is a numpy object fdata[col] = odata flist[1].data = fdata outfile = outdir + fname mcf.rm_file(outfile) flist.writeto(outfile)
def run_ftp(tdir, dlist, ftp_address = ftp_site, outdir = './'): """ retrieve files from ftp site input: tdir --- location of ftp file under "ftp_site" dlist --- a list of file names you want to retrieve ftp_address --- ftp address, default: ftp_site (see the top of this script) outdir --- a directory name where you want to deposit files. default: './' output: retrieved files in outdir count --- the number of files retrieved """ # #--- open ftp connection # ftp = FTP(ftp_address) ftp.login('anonymous', '*****@*****.**') ftp.cwd(tdir) # #--- check though the data # count = 0 for file in dlist: local_file = os.path.join(outdir, file) try: ftp.retrbinary('RETR %s' %file, open(local_file, 'wb').write) count += 1 except: pass # #--- checking whether the retrieved file is empty, if so, just remove it # if os.stat(local_file)[6] == 0: mcf.rm_file(local_file) ftp.quit() return count
def create_fp_list(): """ create a focal plane temperature file list Input: none, but read from /data/mta/Script/ACIS/Focal/Short_term/ Ooutput: fstart --- a list of start time of the files fstop --- a list of stop time of the files f_list --- a list of file names """ # temperature_file_list = mcf.create_list_from_dir('/data/mta/Script/ACIS/Focal/Short_term/*') cmd = 'ls /data/mta/Script/ACIS/Focal/Short_term/data_* > ' + zspace os.system(cmd) f = open(zspace, 'r') temperature_file_list = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) fstart = [] fstop = [] f_list = [] for tfile in temperature_file_list: # #--- tfile has a format of "data_2012_285_0003_285_1130" # chk = re.search('data_', tfile) if chk is not None: atemp = re.split('data_', tfile) if len(atemp) > 0: btemp = re.split('_', atemp[1]) year = int(btemp[0]) year2 = year ydate = int(btemp[1]) ydate2 = int(btemp[3]) if ydate > ydate2: year2 += 1 ttemp = btemp[2] ttemp = list(ttemp) if len(ttemp) == 4: hour = ttemp[0] + ttemp[1] minutes = ttemp[2] + ttemp[3] line = btemp[0] + ':' + btemp[ 1] + ':' + hour + ':' + minutes + ':00' begin = tcnv.axTimeMTA(line) ttemp = btemp[4] ttemp = list(ttemp) hour = ttemp[0] + ttemp[1] minutes = ttemp[2] + ttemp[3] line = str(year2) + ':' + btemp[ 3] + ':' + hour + ':' + minutes + ':00' end = tcnv.axTimeMTA(line) fstart.append(begin) fstop.append(end) f_list.append(tfile) return (fstart, fstop, f_list)
def find_observation(start, stop, lev): """ find information about observations in the time period input: start --- starting time in the format of <yyyy>-<mm>-<dd>T<hh>:<mm>:<ss> stop --- stoping time in the format of <yyyy>-<mm>-<dd>T<hh>:<mm>:<ss> lev --- data level output: acis_obs --- a list of the observation infromation """ # #--- run arc5gl to get observation list # run_arc5gl_browse(start, stop,lev, zspace) # #--- create obsid list # data = read_file(zspace, remove=1) obsid_list = [] for ent in data: mc = re.search('acisf', ent) if mc is not None: atemp = re.split('acisf', ent) btemp = re.split('_', atemp[1]) obsid = btemp[0] mc = re.search('N', obsid) if mc is not None: ctemp = re.split('N', obsid) obsid = ctemp[0] obsid_list.append(obsid) # #--- remove duplicate # o_set = set(obsid_list) obsid_list = list(o_set) # #--- open database and extract data for each obsid # save = {} tlist = [] for obsid in obsid_list: out = get_data_from_db(obsid) if out != NULL: [tsec, line] = out tlist.append(tsec) save[tsec] = line tlist.sort() mcf.rm_file('./acis_obs') fo = open('./acis_obs', 'w') for ent in tlist: fo.write(save[ent]) fo.close()
def read_data(infile, remove=0): try: f = open(infile, 'r') data = [line.strip() for line in f.readlines()] f.close() except: data = [] if remove == 1: mcf.rm_file(infile) return data
def extract_stat_result(file): """ extract stat informaiton: Input: file --- image fits file Output: avg minp maxp devp """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmstat infile=' + file + ' centroid=no >' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) # #--- extract mean, dev, min, and max # for ent in data: atemp = re.split('\s+|\t+', ent) m1 = re.search('mean', ent) m2 = re.search('min', ent) m3 = re.search('max', ent) m4 = re.search('sigma',ent) if m1 is not None: avg = atemp[1] if m2 is not None: minv = atemp[1] btemp = re.split('\(', ent) ctemp = re.split('\s+|\t+', btemp[1]) minp = '(' + ctemp[1] + ',' + ctemp[2] + ')' if m3 is not None: maxv = atemp[1] btemp = re.split('\(', ent) ctemp = re.split('\s+|\t+', btemp[1]) maxp = '(' + ctemp[1] + ',' + ctemp[2] + ')' if m4 is not None: dev = atemp[1] return [avg, minv, minp, maxv, maxp, dev]
def find_entries(dir): """ create list of fies/directory under the "dir" ipnt: dir --- directory name under /data/mta/www/mta_script_list/ output: data --- a list of files and directories """ cmd = 'ls -d /data/mta/www/mta_script_list/' + dir + '/* >' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) return data
def test_run_ftp(self): tdir = '/pub/lists/costello' dlist = ['ace_pkp_15m.txt'] run_ftp(tdir, dlist) if os.path.isfile('ace_pkp_15m.txt'): if os.stat('ace_pkp_15m.txt')[6] == 0: yes = 0 else: yes = 1 mcf.rm_file('ace_pkp_15m.txt') else: yes = 0 self.assertEquals(yes, 1)
def check_dataseeker_entry(msid): """ check msid is listed in dataseeker database input: msid --- msid output: True/False """ cmd = 'grep -i ' + msid + ' ' + house_keeping + 'dataseeker_entry_list >' + zspace os.system(cmd) if os.stat(zspace).st_size == 0: mcf.rm_file(zspace) return False else: mcf.rm_file(zspace) return True
def read_file_data(infile, remove=0): """ read the content of the file and return it input: infile --- file name remove --- if 1, remove the input file after read it, default: 0 output: out --- output """ f = open(infile, 'r') out = [line.strip() for line in f.readlines()] f.close() if remove == 1: mcf.rm_file(infile) return out
def read_file(file, remove=0): """ read a file input: file --- a file to be read remove --- indicator whether to remove the file after it was read default=0 (no) output: data --- a list of the data """ f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() if remove > 0: mcf.rm_file(file) return data
def test_get_cti_data(self): get_cti_data(test = 'test') cmd = 'ls > ' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) yes = 0 for ent in data: if ent == 'cti_data.txt': yes = 1 break self.assertEquals(yes, 1)
def find_available_deph_data(): """ create a list of potential new data file name input: none, but read from /dsops/GOT/aux/DEPH.dir/ output: cdata --- a list of the data file names """ # #--- find current time # ttemp = tcnv.currentTime() year = int(ttemp[0]) ydate = int(ttemp[7]) syear = str(year) tyear = syear[2] + syear[3] # #--- first 20 days of the year, we also check the last year input data # if ydate < 20: lyear = year -1 slyear = str(lyear) ltyear = slyear[2] + slyear[3] cmd = 'ls /dsops/GOT/aux/DEPH.dir/DE' + ltyear + '*.EPH > ' + zspace os.system(cmd) cmd = 'ls /dsops/GOT/aux/DEPH.dir/DE' + tyear + '*.EPH >> ' + zspace try: os.system(cmd) except: pass else: cmd = 'ls /dsops/GOT/aux/DEPH.dir/DE' + tyear + '*.EPH > ' + zspace try: os.system(cmd) except: pass try: f = open(zspace, 'r') cdata = [line.strip() for line in f.readlines()] f.close() except: cdata = [] mcf.rm_file(zspace) return cdata
def extract_hrc_evt2(obsid): """ extract hrc evt2 file Input: obsid --- obsid of the data Output: hrcf<obsid>*evt2.fits.gz file name if the data is extracted. if not 'na' """ # #--- write required arc5gl command # line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=hrc\n' line = line + 'level=2\n' line = line + 'filetype=evt2\n' line = line + 'obsid=' + str(obsid) + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + ' > fits_list' # #--- run arc5gl # os.system(cmd) mcf.rm_file(zspace) # #--- check the data is actually extracted # f = open('fits_list', 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file('fits_list') for ent in data: mc = re.search('fits.gz', ent) if mc is not None: cmd = 'gzip -d ' + ent fits = ent.replace('\.gz', '') return fits break else: return 'na'
def send_mail(tag, email_list): """ sending email out Input: tag --- user and machine name in the form of c3po-v_mat email_list --- a string of name in the from of 'tisobecfa.harvard.edu,[email protected]' Output: email sent out """ chk = mcf.isFileEmpty(zspace) if chk > 0: atemp = re.split('_', tag) cmd = 'cat ' + zspace + ' | mailx -s "Subject: Cron Error : ' + atemp[1] + ' on ' + atemp[0] + '" ' + email_list os.system(cmd) mcf.rm_file(zspace)