def acis_cti_get_data(): """ extract acis evt1 files which are not processed for CTI observations Input: none, but read from directory: /data/mta/www/mp_reports/photons/acis/cti/* Output: <working_dir>/acisf<obsid>*evt1.fits """ # #--- get a new data list # obsid_list = find_new_entry() # #--- if there is no new data, just exit # if len(obsid_list) > 0: # #--- create a temporary saving directory # mcf.mk_empty_dir(working_dir) # #--- extract acis event1 file # outdir = working_dir + '/new_entry' #---- new_entry list will be used later f = open(outdir, 'w') for obsid in obsid_list: f.write(obsid) f.write('\n') cnt = 0 chk = extract_acis_evt1(obsid) if chk != 'na': cnt += 1 cmd = 'mv *' + str(obsid) + '*.fits.gz ' + working_dir os.system(cmd) f.close() if cnt > 0: cmd = 'gzip -d ' + working_dir + '*.gz' os.system(cmd) else: exit(1)
def test_run_flt_pipe(self): mcf.mk_empty_dir(temp_comp_area) cmd = 'cp ./acisf52675_000N001_evt1.fits.gz ' + temp_comp_area os.system(cmd) cmd = 'gzip -d ' + temp_comp_area + '/*.gz' os.system(cmd) file = temp_comp_area + '/acisf52675_000N001_evt1.fits' chk = run_flt_pipe(file) self.assertEquals(chk, 0) #------------------------------------------------------------ test_list = [ '0', '52675', '0', '2014-06-27T04:21:09', '2014-06-27T08:31:19', ['5.390+-0.449', '6.721+-0.277', '-99999+-00000', '0.533+-0.229'], ['2.695+-0.036', '1.303+-0.033', '1.371+-0.028', '1.191+-0.023'], ['1.303+-0.050', '2.531+-0.024', '1.352+-0.033', '1.372+-0.036'] ] full_list = find_cti_values_from_file() self.assertEquals(full_list[0], test_list)
def update_detrend_factor_table(): """ extract information about amp_avg values and update <house_keeping>/amp_avg_list Input: none Output: <house_keeping>/amp_avg_list """ # #--- clean up temp_dir so that we can put new fits files # mcf.mk_empty_dir(temp_dir) # #--- extract stat fits files # new_entry = get_new_entry() # #--- if files are extracted, compute amvp_avg values for the obsid # if len(new_entry) > 0: processed_list = update_amp_avg_list(new_entry) # #--- clean up "keep_entry" and amp_avg_list # update_holding_list(new_entry, processed_list) cleanup_amp_list()
def get_new_value(year, month): """ extract aorwspd values from dataseeker input: year/month output: [av1, av2, av3, av4, av5, av6] --- six values of aorwspd(1-6) """ # #--- set month long time interval in sec from 1998.1.1 # year2 = year month2 = month -1 if month2 < 1: mont2 = 12 year2 -= 1 ydate = tcnv.findYearDate(year, month, 15) t_in = str(year) + ':' + str(ydate) + ':00:00:00' time1 = tcnv.axTimeMTA(t_in) ydate = tcnv.findYearDate(year2, month2, 15) t_in = str(year2) + ':' + str(ydate) + ':00:00:00' time2 = tcnv.axTimeMTA(t_in) # #--- set command to call dataseeker # f = open('./test', 'w') #-- we need an empty "test" file to run dataseeker f.close() mcf.mk_empty_dir('param') #-- make empty param directory line = 'columns=_aorwspd1_avg,' line = line + '_aorwspd2_avg,' line = line + '_aorwspd3_avg,' line = line + '_aorwspd4_avg,' line = line + '_aorwspd5_avg,' line = line + '_aorwspd6_avg' line = line + ' timestart=' + str(time2) line = line + ' timestop=' + str(time1) cmd = 'punlearn dataseeker; dataseeker.pl infile=test outfile=ztemp.fits search_crit=\'' cmd = 'dataseeker.pl infile=test outfile=ztemp.fits search_crit="' cmd = cmd + line + '" loginFile="' + loginfile + '"' # #--- run dataseeker # bash("/usr/bin/env PERL5LIB='' " + cmd, env=ascdsenv) mcf.rm_file(zspace) mcf.rm_file('./test') os.system('rm -rf ./param') # #--- read fits file # try: dout = pyfits.getdata('./ztemp.fits') aw1 = dout.field('AORWSPD1_AVG') aw2 = dout.field('AORWSPD2_AVG') aw3 = dout.field('AORWSPD3_AVG') aw4 = dout.field('AORWSPD4_AVG') aw5 = dout.field('AORWSPD5_AVG') aw6 = dout.field('AORWSPD6_AVG') except: aw1 = aw2 = aw3 = aw4 = aw5 = aw6 = [] mcf.rm_file("./ztemp.fits") # #--- create monthly "sum" of the reaction wheel rotations #--- dataseeker gives 5 min avg of the value; one day is 24 hr x 60 min / 5min = 288. # sum1 = sum2 = sum3 = sum4 = sum5 = sum6 = 0.0 for i in range(0, len(aw1)): sum1 += abs(aw1[i]) sum2 += abs(aw2[i]) sum3 += abs(aw3[i]) sum4 += abs(aw4[i]) sum5 += abs(aw5[i]) sum6 += abs(aw6[i]) av1 = sum1 / 288 av2 = sum2 / 288 av3 = sum3 / 288 av4 = sum4 / 288 av5 = sum5 / 288 av6 = sum6 / 288 return [av1, av2, av3, av4, av5, av6]
def use_arc5gl_acis_hist(year, month, end_year, end_month): ''' using arc5gl, extreact acis hist data input: year, month, end_year, end_month (the last two are in sec from 1.1.1998) output: acis hist data in fits files saved in exc_dir/Temp_dir ''' # #--- prep for output files # tdir = exc_dir + 'Temp_dir' mcf.mk_empty_dir(tdir) # #--- write a command file # with open(zspace, 'w') as f: f.write("operation=retrieve\n") f.write("dataset=flight\n") f.write("detector=acis\n") f.write("level=0\n") f.write("filetype=histogr\n") lmon = str(month) if month < 10: lmon = '0' + lmon lyear = str(year) line = 'tstart=' + lyear + '-' + lmon + '-01T00:00:00\n' f.write(line) lmon = str(int(end_month)) if end_month < 10: lmon = '0' + lmon lyear = str(end_year) line = 'tstop=' + lyear + '-' + lmon + '-01T00:00:00\n' f.write(line) f.write("go\n") # #--- run arc5gl # try: cmd = 'cd ' + exc_dir + '; /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace os.system(cmd) except: cmd1 = "/usr/bin/env PERL5LIB= " cmd2 = ' cd ' + exc_dir + '; /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace try: os.system(cmd2) except: cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_files(zspace) # #--- check whether the files are extracted. if not just exit. # cmd = 'ls ' + exc_dir + '* > ' + zspace os.system(cmd) chk = open(zspace, 'r').read() mcf.rm_files(zspace) m = re.search('fits.gz', chk) if m is None: exit(1) if mcf.check_file_with_name(exc_dir, 'fits') == False: exit(1) cmd = 'mv ' + exc_dir + '*fits.gz ' + exc_dir + 'Temp_dir/.' os.system(cmd) cmd = 'gzip -d ' + exc_dir + 'Temp_dir/*.gz' os.system(cmd) # #--- make a list of extracted fits files # cmd = 'ls ' + exc_dir + 'Temp_dir/*fits > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) return data
def clean_table(): """ sort and clean the table data in <data_dir>/Results Input: none but read from <data_dir>/Results/<elm>_ccd<ccd#> Output: cleaned up <data_dir>/Results/<elm>_ccd<ccd#> """ # #--- make a backup # atime = tcnv.currentTime(format='UTC') tyear = str(atime[0]) syear = tyear[2] + tyear[3] tmon = atime[1] smon = str(tmon) if int(tmon) < 10: smon = '0' + smon tday = atime[2] sday = str(tday) if int(tday) < 10: sday = '0' + sday cout = data_dir + '/Results/Save_' + smon + sday + syear mcf.mk_empty_dir(cout) cmd = 'cp -f ' + data_dir + '/Results/*_ccd* ' + cout + '/.' os.system(cmd) # #--- now clean up the data # cmd = 'ls ' + data_dir + '/Results/*_ccd* > ' + zspace os.system(cmd) f = open(zspace, 'r') flist = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) for file in flist: f = open(file, 'r') data = [line.strip() for line in f.readlines()] f.close() data.sort() prev = data[0] cleaned = [prev] for ent in data: if ent == prev: prev = ent else: cleaned.append(ent) prev = ent fo = open(file, 'w') for ent in cleaned: line = ent + '\n' fo.write(line) fo.close()
def get_new_data(obsid_list, start_list, stop_list): """ extract fits for the obsid, then extrat data needed input: obsid_list --- a list of obsids start_list --- a list of start time of the observation stop_list --- a list of stop time of the observation output: data_set --- a list of data sets """ # #--- find middle of the time. it will be used as a time stamp for this data set # tstart = start_list[0] tstop = stop_list[len(stop_list) - 1] mtime = int(tstart + 0.5 * (tstop - tstart)) # #--- extract acis event1 file and combined all of them # mcf.mk_empty_dir(working_dir) pobsid = [] fits_list = [] data_set = [] for i in range(0, len(obsid_list)): obsid = obsid_list[i] start = start_list[i] stop = stop_list[i] # #--- check whether this data is already extreacted. if so, don't re-extrad the data # chk = 0 for comp in pobsid: if obsid == comp: chk = 1 break if chk == 0: fits = extract_acis_evt1(obsid) pobsid.append(obsid) if fits != 'na': fits_list.append(fits) else: fits = 'na' for ent in fits_list: m1 = re.search(obsid, ent) if m1 is not None: fits = ent break if fits == 'na': continue # #--- "Table.read" opens fits file and read fits table data # rfits = working_dir + fits tdata = Table.read(rfits, hdu=1) tdiff = stop - start # #--- extract specified time range, pha range, and chipy # mask = (tdata.field('time') >= start) & (tdata.field('time') <= stop) \ & (tdata.field('pha') <= 4000) & (tdata.field('chipy') <= 20) tdata = tdata[mask] if len(tdata) < 1000: continue tdata = tdata[cselect] # #--- grade # mask = (tdata.field('grade') <= 6) & (tdata.field('grade') != 1) \ & (tdata.field('grade') != 5) tdata = tdata[mask] data_set.append(tdata) return data_set
def get_new_data(obsid_list, start_list, stop_list): """ extract fits for the obsid, then extrat data needed input: obsid_list --- a list of obsids start_list --- a list of start time of the observation stop_list --- a list of stop time of the observation output: data_set --- a list of data sets """ data_set = [] # #--- find middle of the time. it will be used as a time stamp for this data set # tstart = start_list[0] tstop = stop_list[len(stop_list)-1] mtime = int (tstart + 0.5 * (tstop - tstart)) # #--- extract acis event1 file and combined all of them # mcf.mk_empty_dir(working_dir) pobsid = [] fits_list = [] for i in range(0, len(obsid_list)): obsid = obsid_list[i] start = start_list[i] stop = stop_list[i] # #--- check whether this data is already extreacted. if so, don't re-extrad the data # chk = 0 for comp in pobsid: if obsid == comp: chk = 1 break if chk == 0: fits = extract_acis_evt1(obsid) pobsid.append(obsid) if fits != 'na': fits_list.append(fits) else: fits = 'na' for ent in fits_list: m1 = re.search(obsid, ent) if m1 is not None: fits = ent break if fits == 'na': continue # #--- "Table.read" opens fits file and read fits table data # rfits = working_dir + fits tdata = Table.read(rfits, hdu=1) tdiff = stop - start # #--- extract specified time range, pha range, and chipy # mask = (tdata.field('time') >= start) & (tdata.field('time') <= stop) & (tdata.field('pha') <= 4000) & (tdata.field('chipy') <= 20) tdata = tdata[mask] if len(tdata) < 1000: continue tdata = tdata[cselect] # #--- grade # mask = (tdata.field('grade') <= 6) & (tdata.field('grade') != 1) & (tdata.field('grade') != 5) tdata = tdata[mask] data_set.append(tdata) return data_set