def run_bad_pix_and_photon(outdir): """ run bad pixel table script and photon table scrit input: outdir --- output directory name output: files in outdir: bad_pix_list, photons """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = 'perl /data/mta4/MTA/bin/weekly_obs2html.pl 8 photons' cmd = cmd1 + cmd2 # #--- run the phonton script # bash(cmd, env=ascdsenv) mcf.rm_file(zspace) cmd2 = 'perl ' + tdir + 'read_bad_pix_new.perl' cmd = cmd1 + cmd2 # #--- run the bad pixel script # bash(cmd, env=ascdsenv) mcf.rm_file(zspace) cmd = 'mv photons bad_pix_list ' + outdir os.system(cmd)
def db_setup(self, wipe=False, tstart=None, tstop=None): """ Initialize tables or delete them Use the make_new_tables script to initialize new tables and copy over rows from the "real" sybase tables as needed. :param tstart: start of range to copy from sybase :param tstart: stop of range to copy from sybase :param wipe: just delete the tables """ make_new_tables = os.path.join('./make_new_tables.py') if wipe: make_new_tables += " --wipe " testdb = self.db_handle() db_str = self.db_cmd_str() cmd = "%s %s" % (make_new_tables, db_str) if tstart: cmd += " --tstart %s " % DateTime(tstart).date if tstop: cmd += " --tstop %s " % DateTime(tstop).date err.write("%s \n" % cmd) bash(cmd) self.db_initialized = True
def extract_point(obs_info, src, obsdir, point): print "Remaking {}".format(point) det = 'acis' radius = 6 if obs_info['instrume'] == 'HRC': det = 'hrc' radius = 30 tempdir = tempfile.mkdtemp(dir='/export/jeanconn/tempdir/') bash('echo "cd %s\n obsid=%d\n get %s2{evt2}\n" | arc5gl -stdin' % (tempdir, src['obsid'], det)) reg = os.path.join(obsdir, 'center.reg') c = open(reg, 'w') regstring = "circle(%f, %f, %d)" % (src[0]['X'], src[0]['Y'], radius) c.write("%s\n" % regstring) c.close() evt2 = glob('%s/*evt2.fits*' % tempdir)[0] dmstring = '[cols time,ra,dec,x,y]' if det == 'acis': dmstring = dmstring + '[energy=300:7000]' #print("/proj/sot/ska/bin/doapp -ciao dmcopy %s'[(x,y)=%s]%s' %s" % # (evt2, regstring, dmstring, point)) status = bash("/proj/sot/ska/bin/doapp -ciao dmcopy %s'[(x,y)=%s]%s' %s clobber+" % (evt2, regstring, dmstring, point)) if not status: os.unlink(evt2)
def extract_acis_evt1(start, stop): """ extract acis evt1 files input: start --- start time in the format of mm/dd/yy (e.g. 05/01/15) stop --- sop time in the format of mm/dd/yy output: acisf*evt1.fits.gz """ # #--- write required arc4gl command # line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' # line = line + 'version=last\n' line = line + 'filetype=evt1\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def run_arc4gl(start, stop, operation='retrieve', dataset='flight', detector='telem', level='raw'): """ extract data from archive using arc4gl input: start --- starting time in the format of mm/dd/yy,hh/mm/ss. hh/mm/ss is optional stop --- stoping time operation --- operation command. default = retrieve dataset --- dataset name. default = flight detector --- detector name default = telem level --- level defalut = raw output: extracted data set """ # #--- write arc4gl command # line = 'operation = ' + operation + '\n' line = line + 'dataset = ' + dataset + '\n' line = line + 'detector = ' + detector + '\n' line = line + 'level = ' + level + '\n' line = line + 'tstart=' + str(start) + '\n' line = line + 'tstop=' + str(stop) + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() # #--- run arc4gl # cmd1 = '/usr/bin/env PERL5LIB=""' ####cmd2 = ' source /home/mta/bin/reset_param;' cmd2 = ' echo ' + hakama + '|arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def run_arc5gl(line, out=''): """ run arc5gl command input: line --- acc5gl command lines out --- output file name; default: "" --- no output file output: results of the command """ with open(zspace, 'w') as fo: fo.write(line) try: cmd = '/proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace if out != '': cmd = cmd + '> ' + out os.system(cmd) except: try: cmd = '/proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace if out != '': cmd = cmd + '> ' + out os.system(cmd) except: cmd1 = "/usr/bin/env PERL5LIB= " cmd2 = '/proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace if out != '': cmd2 = cmd2 + '> ' + out cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_files(zspace)
def acis_dose_test_run(): """ test ska shell access """ start = '05/07/15,00:00:00' stop = '05/15/15,00:00:00' line = 'operation=browse\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' line = line + 'filetype=evt1\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open('./zspace', 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i./zspace > ./zout' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mtac.rm_file('./zspace') f = open('./zout', 'r') fitsList = [line.strip() for line in f.readlines()] f.close() mtac.rm_file('./zout') for ent in fitsList: print ent
def update_dea_rdb(): """ update DS deahk realated rdb files input: none but read from: <house_keeping>/today_dump_files output: <ds_dir>/deahk_temp <ds_dir>/deahk_elec """ # #--- make backup first # cmd = 'cp ' + ds_dir + 'deahk_temp.rdb ' + ds_dir + 'deahk_temp.rdb~' os.system(cmd) cmd = 'cp ' + ds_dir + 'deahk_elec.rdb ' + ds_dir + 'deahk_elec.rdb~' os.system(cmd) # #--- read today's dump list # dfile = house_keeping + 'today_dump_files' data = mcf.read_data_file(dfile) for ent in data: ifile = '/dsops/GOT/input/' + ent + '.gz' # #--- run Peter Ford's scripts and pipe into deakh.py # cmd1 = "/usr/bin/env PERL5LIB='' " #cmd2 = '/bin/gzip -dc ' + ifile + '|' + bin_dir + 'getnrt -O | ' + bin_dir + 'deahk.py' cmd2 = '/bin/gzip -dc ' + ifile + '|' + bin_dir + 'getnrt -O | ' + bin_dir + 'deahk.pl' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) if os.path.isfile('./deahk_temp.tmp'): process_deahk('deahk_temp') if os.path.isfile('./deahk_elec.tmp'): process_deahk('deahk_elec')
def run_arc(inst, start, stop): """ run arc4gl and extract evt2 data for "inst" input: inst --- instrument, acis or hrc start --- interval start time in format of mm/dd/yy (e.g. 05/01/15) stop --- interval stop time in format of mm/dd/yy """ line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=' + inst + '\n' line = line + 'level=2\n' line = line + 'filetype=evt2\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 # #--- run arc4gl # bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def run_arc4gl(start, stop, operation='retrieve', dataset='flight', detector='telem', level='raw'): """ extract data from archive using arc4gl input: start --- starting time in the format of mm/dd/yy,hh/mm/ss. hh/mm/ss is optional stop --- stoping time operation --- operation command. default = retrieve dataset --- dataset name. default = flight detector --- detector name default = telem level --- level defalut = raw output: extracted data set """ # #--- write arc4gl command # line = 'operation = ' + operation + '\n' line = line + 'dataset = ' + dataset + '\n' line = line + 'detector = ' + detector + '\n' line = line + 'level = ' + level + '\n' line = line + 'tstart=' + str(start) + '\n' line = line + 'tstop=' + str(stop) + '\n' line = line + 'go\n' fo = open(zspace, 'w') fo.write(line) fo.close() # #--- run arc4gl # cmd1 = '/usr/bin/env PERL5LIB=""' cmd2 = ' echo ' + hakama + '|arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace)
def run_arc5gl(tstart, tstop, ftype): """ run arc5gl input: start --- start time stop --- stop time ftype --- file type output: extracted fits files """ line = 'operation=retrieve\n' line = line + 'tstart=' + str(tstart) + '\n' line = line + 'tstop=' + str(tstop) + '\n' line = line + 'dataset=flight\n' line = line + 'level=0\n' line = line + 'detector=hrc\n' line = line + 'subdetector=eng\n' line = line + 'filetype=' + ftype + '\n' line = line + 'go\n' with open(zspace, 'w') as fo: fo.write(line) try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace os.system(cmd) except: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_files(zspace)
def run_arc5gl(tstart, tstop, ftype): """ run arc5gl input: start --- start time stop --- stop time ftype --- file type output: extracted fits files """ fo = open(zspace, 'w') line = 'operation=retrieve\n' line = line + 'tstart=' + str(tstart) + '\n' line = line + 'tstop=' + str(tstop) + '\n' line = line + 'dataset=flight\n' line = line + 'level=0\n' line = line + 'detector=hrc\n' line = line + 'subdetector=eng\n' line = line + 'filetype=' + ftype + '\n' line = line + 'go\n' fo.write(line) fo.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv)
def run_celldetect(): """ Run celldetect for each data set (*fits.gz, except of ERs) in the current directory. Creates *src2.fits files for each dataset. """ # Take all *fits.gz files except ERs (er_files, obsids 5****, 6****) all_files = glob('*.fits.gz') er_files = glob('acisf[5|6]*.fits.gz') + glob('hrcf[5|6]*.fits.gz') fits_files = list(set(all_files) - set(er_files)) bash("/usr/bin/env PERL5LIB=") for infile in fits_files: # Handle only none grating observations grating = read_header_value_from_file(infile, 'GRATING') if grating == 'NONE': outfile = infile.replace('evt', 'src') outfile = outfile.replace('.gz', '') # mode=h ?? cmd_str = f'celldetect infile={infile} outfile={outfile} > /dev/null' # run celldetect try: bash(cmd_str, env=ascdsenv) except: logging.info(f'Celldetect failed on {infile} file') pass
def get_sca_data(): # # NOTE: sca00 is not updated anymore and discoutinued. # """ extract ephsca.fits data file from dataseeker input: none output: ephsca.fits """ # #--- create an empty "test" file # mcf.rm_file('./test') fo = open('./test', 'w') fo.close() # #--- and run dataseeker # cmd1 = '/usr/bin/env PERL5LIB=' cmd2 = ' dataseeker.pl infile=test outfile=ephsca.fits search_crit="columns=_sca00_avg" ' cmd3 = 'clobber=yes loginFile=/home/mta/loginfile' cmd = cmd1 + cmd2 + cmd3 bash(cmd, env=ascdsenv) mcf.rm_file('./test') cmd = 'mv -f ephsca.fits /data/mta4/www/DAILY/mta_rad/.' os.system(cmd)
def combine_image(fits1, fits2): """ combine two fits image files. input :fits1 fits2. a combined fits file is moved to fits2. """ chk = mtac.chkFile('./', fits2) #--- check the second fits file exist if chk == 0: cmd = 'mv ' + fits1 + ' ' + fits2 os.system(cmd) else: try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgcalc infile=' + fits1 + ' infile2=' + fits2 + ' outfile=mtemp.fits operation=add clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd = 'rm ' + fits1 os.system(cmd) # #--- rename the combined fits image to "fits2" # cmd = 'mv mtemp.fits ' + fits2 os.system(cmd) except: cmd = 'rm ' + fits1 os.system(cmd)
def read_orbit_data(tstart, tstop): """ read altitude and sun angle data input: tstart --- starting time in seconds from 1998.1.1 tstop --- stopping time in seconds from 1998.1.1 output: data --- a list of lists of [time alt, sun_angle] """ # #--- set up the input for dataseeker and extract the data # fits = 'dataseek_avg.fits' cmd = 'touch test' os.system(cmd) cmd1 = '/usr/bin/env PERL5LIB= ' cmd2 = " dataseeker.pl infile=test outfile=" + fits + " " cmd2 = cmd2 + "search_crit='columns=pt_suncent_ang,sc_altitude timestart=" + str( tstart) cmd2 = cmd2 + " timestop=" + str(tstop) + "' loginFile=" + lfile cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- read fits file and extract the data # cols = ['time', 'sc_altitude', 'pt_suncent_ang'] data = read_fits_data(fits, cols) # #--- clean up # mcf.rm_file(fits) mcf.rm_file('test') return data
def run_idl(dir): """ process fits files with an updated idl scripts input: dir --- the name of output directory output: dir/<stemp> --- a directory which contains the processed data """ # #--- read a template and create the current command file # file = bin_dir + 'house_keeping/pl_template' line = open(file, 'r').read() line = line.replace('#DIR#', dir) fo = open('./mk_idl_command.pl', 'w') fo.write(line) fo.close() # #--- make an output directory # cmd = 'mkdir /data/mta4/Gratings/' + dir os.system(cmd) # #--- run a perl script to create an idl script # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' perl ./mk_idl_command.pl ' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- run the idl script to process fits files # os.system('idl ./mkcommand.idl')
def run_flt_pipe(part_fits): """ run mta flt_run_pipe to extract cti data Input: part_fits --- fits file name Output: <temp_comp_area>/photons/.... cti data 0 if the operation was successful 1 if the operation failed """ try: # #--- create input information file for flt_run_pipe # cmd = 'echo ' + part_fits + '> ' + temp_comp_area + 'zcomp_dat.lis' os.system(cmd) # #--- run the pipe # pipe_cmd1 = '/usr/bin/env PERL5LIB=' pipe_cmd2 = " flt_run_pipe -r zcomp -i" + temp_comp_area + " -o" + temp_comp_area + " -t mta_monitor_cti.ped -a \"genrpt=yes\" " pipe_cmd = pipe_cmd1 + pipe_cmd2 bash(pipe_cmd, env=ascdsenv, logfile=open('log.txt', 'w')) # #--- check wether the computation actually worked. we assume that if "photons" directory crated, it did. # return test_photon_dir() except: return 1
def run_idl(idir): """ process fits files with an updated idl scripts input: idir --- the name of output directory output: dir/<stemp> --- a directory which contains the processed data """ # #--- read a template and create the current command file # ifile = house_keeping + 'pl_template' with open(ifile, 'r') as f: line = f.read() line = line.replace('#DIR#', idir) with open('./mk_idl_command.pl', 'w') as fo: fo.write(line) # #--- make an output directory # cmd = 'mkdir -p ' + exc_dir + 'Gratings/' + idir os.system(cmd) # #--- run a perl script to create an idl script # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' perl ./mk_idl_command.pl ' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- run the idl script to process fits files # os.system('idl ./mkcommand.idl') os.system('rm -f ./mkcommand.idl ./mk_idl_command.pl')
def find_two_sigma_value(fits): # #-- make histgram # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimghist infile=' + fits + ' outfile=outfile.fits hist=1::1 strict=yes clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmlist infile=outfile.fits outfile=' + zspace + ' opt=data' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f= open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) # #--- read bin # and its count rate # hbin = [] hcnt = [] vsum = 0 for ent in data: atemp = re.split('\s+|\t+', ent) if mcf.chkNumeric(atemp[0]): hbin.append(float(atemp[1])) val = int(atemp[4]) hcnt.append(val) vsum += val # #--- checking one sigma and two sigma counts # if len(hbin) > 0: v68= int(0.68 * vsum) v95= int(0.95 * vsum) v99= int(0.997 * vsum) sigma1 = -999 sigma2 = -999 sigma3 = -999 acc= 0 for i in range(0, len(hbin)): acc += hcnt[i] if acc > v68 and sigma1 < 0: sigma1 = hbin[i] elif acc > v95 and sigma2 < 0: sigma2 = hbin[i] elif acc > v99 and sigma3 < 0: sigma3 = hbin[i] break return (sigma1, sigma2, sigma3) else: return(0, 0, 0)
def combine_image(fits1, fits2): """ combine two fits image files. input : fits1 fits2 output: fits2 --- a combined fits file is moved to fits2 """ if os.path.isfile(fits2): try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgcalc infile=' + fits1 + ' infile2=' + fits2 cmd2 = cmd2 + ' outfile=mtemp.fits operation=add clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_files(fits1) # #--- rename the combined fits image to "fits2" # cmd = 'mv mtemp.fits ' + fits2 os.system(cmd) except: mcf.rm_files(fits1) mcf.rm_files('mtemp.fits') else: cmd = 'mv ' + fits1 + ' ' + fits2 os.system(cmd)
def run_focal_temp_data(start, stop): """ run focal temp script and create a plot, read a table input: start --- start time in seconds from 1998.1.1 stop --- stop time in seconds from 1998.1.1 output: fcnt --- number of peaks observed fdata --- table input """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' /usr/local/bin/perl ' + wdir + 'get_ftemp_data.perl ' + str(start) + ' ' + str(stop) cmd = cmd1 + cmd2 # #--- run the focal temp script to extract data # bash(cmd, env=ascdsenv) mcf.rm_file('./test') cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' idl ./run_temp > out' cmd = cmd1 + cmd2 # #--- run the focal temp script to create a plot # bash(cmd, env=ascdsenv2) cmd = 'rm -rf ./*fits ' os.system(cmd)
def img_calc(image1, image2='None', outfile='temp.fits', factor=1, method='add'): """ fuction to run dm tool dmimgcalc input: image1 --- input image fits file 1 image2 --- input image fits file 2, default: "None" outfile --- output image fits file name, default: "temp.fits" factor --- a scaling factor default: 1 method --- operation: add, div, etc default: add output: <outfile> """ cmd1 = '/usr/bin/env PERL5LIB=""' cmd2 = ' dmimgcalc ' + image1 + ' ' + image2 + ' ' + outfile + ' ' + method if factor != 1: cmd2 = cmd2 + ' weight=' + str(factor) cmd2 = cmd2 + ' clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv)
def extract_point(obs_info, src, obsdir, point): print "Remaking {}".format(point) det = 'acis' radius = 6 if obs_info['instrume'] == 'HRC': det = 'hrc' radius = 30 tempdir = tempfile.mkdtemp(dir='/export/jeanconn/tempdir/') bash('echo "cd %s\n obsid=%d\n get %s2{evt2}\n" | arc5gl -stdin' % (tempdir, src['obsid'], det)) reg = os.path.join(obsdir, 'center.reg') c = open(reg, 'w') regstring = "circle(%f, %f, %d)" % (src[0]['X'], src[0]['Y'], radius) c.write("%s\n" % regstring) c.close() evt2 = glob('%s/*evt2.fits*' % tempdir)[0] dmstring = '[cols time,ra,dec,x,y]' if det == 'acis': dmstring = dmstring + '[energy=300:7000]' #print("/proj/sot/ska/bin/doapp -ciao dmcopy %s'[(x,y)=%s]%s' %s" % # (evt2, regstring, dmstring, point)) status = bash( "/proj/sot/ska/bin/doapp -ciao dmcopy %s'[(x,y)=%s]%s' %s clobber+" % (evt2, regstring, dmstring, point)) if not status: os.unlink(evt2)
def run_arc4gl(start, stop): """ extract acis and hrc evt1a.fits files using arc4gl input: start --- start time in the format of 03/01/15 stop --- stop time output: fits files (e.g., acisf17108_001N002_evt1a.fits.gz) """ # #--- read a template and create the current command file # file = bin_dir + 'house_keeping/arc_template' line = open(file, 'r').read() line = line.replace('#START#', start) line = line.replace('#STOP#', stop) fo = open('./run_arc', 'w') fo.write(line) fo.close() # #--- run arc4gl # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i./run_arc ' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- remove unwanted fits files # os.system('rm *src1a*')
def run_arc5gl_process(cline): """ run arc5gl process input: cline --- command lines output: f_list --- a list of fits (either extracted or browsed) *fits --- if the command asked to extract; resulted fits files """ with open(zspace, 'w') as fo: fo.write(cline) try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace + ' > ./zout' os.system(cmd) except: try: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + ' > ./zout' os.system(cmd) except: cmd1 = "/usr/bin/env PERL5LIB= " cmd2 = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + ' > ./zout' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) rm_files(zspace) out = read_data_file('./zout', remove=1) save = [] for ent in out: mc = re.search('fits', ent) if mc is not None: atemp = re.split('\s+', ent) save.append(atemp[0]) return save
def clip_at_nth(infits, cut=10): """ set upper limit at nth brightest and chop the image at that value input: fits file, cut: n th brightest, default is 10th """ # #--- trim the extreme values # upper = find_nth(infits, cut) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgthresh infile=' + infits + ' outfile=zout.fits cut="0:' + str( upper) + '" value=0 clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) outfile = infits.replace('.fits', '_full.fits') cmd = 'mv ' + infits + ' ' + outfile os.system(cmd) m = re.search('gz', infits) if m is not None: os.system('gzip zout.fits') cmd = 'mv zout.fits.gz ' + infits os.system(cmd) else: cmd = 'mv zout.fits ' + infits os.system(cmd)
def clip_at_nth(infits, cut=10): """ set upper limit at nth brightest and chop the image at that value input: fits file, cut: n th brightest, default is 10th """ # #--- trim the extreme values # upper = find_nth(infits, cut) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgthresh infile=' + infits+ ' outfile=zout.fits cut="0:' + str(upper) + '" value=0 clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) outfile = infits.replace('.fits','_full.fits') cmd = 'mv ' + infits + ' ' + outfile os.system(cmd) m = re.search('gz', infits) if m is not None: os.system('gzip zout.fits') cmd = 'mv zout.fits.gz ' + infits os.system(cmd) else: cmd = 'mv zout.fits ' + infits os.system(cmd)
def find_nth(fits_file='NA', cut=10): """ find nth brightest value input: fits file/ cut = upper limit """ if fits_file == 'NA': fits_file = raw_input('Fits file name: ') cut = raw_input('Where to Cut?: ') # #-- make histgram # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimghist infile=' + fits_file + ' outfile=outfile.fits hist=1::1 strict=yes clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmlist infile=outfile.fits outfile=./zout opt=data' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f = open('./zout', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm outfile.fits ./zout') # #--- read bin # and its count rate # hbin = [] hcnt = [] for ent in data: try: atemp = re.split('\s+|\t+', ent) if (len(atemp) > 3) and mtac.chkNumeric( atemp[1]) and mtac.chkNumeric( atemp[2]) and (int(atemp[4]) > 0): hbin.append(float(atemp[1])) hcnt.append(int(atemp[4])) except: pass # #--- checking 10 th bright position # limit = cut - 1 try: j = 0 for i in range(len(hbin) - 1, 0, -1): if j == limit: val = i break else: if hcnt[i] > 0: #---- only when the value is larger than 0, record as count j += 1 return hbin[val] except: return 'I/INDEF'
def get_data_with_dataseeker(tstart, tstop, col_list): """ extract data using dataseeker input: tstart --- starting time in seconds from 1998.1.1 tstop --- stopping time in seconds from 1998.1.1 col_list --- data name to be extracted (without _ or _avg part) output: save --- a list of lists of data, including time list """ # #--- check wehter enmpty command file exist. if not, create # if not os.path.isfile('test'): cmd = 'touch test' os.system(cmd) # #--- create dataseeker command # cmd1 = '/usr/bin/env PERL5LIB="" ' cmd2 = 'dataseeker.pl infile=test outfile=temp.fits ' cmd2 = cmd2 + 'search_crit="columns=' # #--- column name start with '_' and end '_avg' # for k in range(0, len(col_list)): col = col_list[k] if k == 0: acol = '_' + col + '_avg' else: acol = ',_' + col + '_avg' cmd2 = cmd2 + acol cmd2 = cmd2 + ' timestart=' + str(tstart) + ' timestop=' + str(tstop) + '"' cmd2 = cmd2 + ' loginFile=' + house_keeping + 'loginfile ' # #--- run the dataseeker command under ascds environment # cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- read the data and create a list of lists # hrd = pyfits.open('temp.fits') data = hrd[1].data hrd.close() dtime = data['time'] save = [dtime] for col in col_list: acol = col + '_avg' save.append(data[acol]) # #--- clean up # mcf.rm_files('test') mcf.rm_files('temp.fits') return save
def find_nth(fits_file = 'NA', cut= 10): """ find nth brightest value input: fits file/ cut = upper limit """ if fits_file == 'NA': fits_file = raw_input('Fits file name: ') cut = raw_input('Where to Cut?: ') # #-- make histgram # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimghist infile=' + fits_file + ' outfile=outfile.fits hist=1::1 strict=yes clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 =' dmlist infile=outfile.fits outfile=./zout opt=data' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f = open('./zout', 'r') data = [line.strip() for line in f.readlines()] f.close() os.system('rm outfile.fits ./zout') # #--- read bin # and its count rate # hbin = [] hcnt = [] for ent in data: try: atemp = re.split('\s+|\t+', ent) if (len(atemp) > 3) and mtac.chkNumeric(atemp[1]) and mtac.chkNumeric(atemp[2]) and (int(atemp[4]) > 0): hbin.append(float(atemp[1])) hcnt.append(int(atemp[4])) except: pass # #--- checking 10 th bright position # limit = cut -1 try: j = 0 for i in range(len(hbin)-1, 0, -1): if j == limit: val = i break else: if hcnt[i] > 0: #---- only when the value is larger than 0, record as count j += 1 return hbin[val] except: return 'I/INDEF'
def run_telem_data(telem_command, daylist, outdir): """ run telemetry data idl script and clean up the result input: telem_command --- idl command to extract data (e.g. weekly_telem,20150904,20150910) daylist --- date header for telem.txt outdir --- output directory name output: tdata --- a table data created by the process """ fo = open('./run_telem', 'w') fo.write("cd,'./Telem/'\n") fo.write(telem_command) fo.write('\n') #fo.write("cd,'../'\n") fo.write('exit\n') fo.close() fo = open('./Telem/header', 'w') fo.write(daylist) fo.write('\n') fo.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' idl ./run_telem > out2' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv2) ##cmd = 'idl ./run_telem > out2' ##os.system(cmd) ##cmd = 'rm ./run_telem' ##os.system(cmd) cmd = 'cat ./Telem/header ./Telem/telem.txt > temp' os.system(cmd) cmd = 'mv temp ./Telem/telem.txt' os.system(cmd) cmd = 'cd ./Telem; perl ./telem.pl ./telem.txt ./telem.html > zzz' os.system(cmd) cmd = 'cd ./Telem; perl ./zx_find_error2.perl zzz > zx_error_list' os.system(cmd) cmd = 'cd ./Telem; perl ./zx_remove.perl > ./clean_list' os.system(cmd) cmd = 'cd ./Telem; perl ./reduce_entry.perl' os.system(cmd) cmd = 'cd ./Telem; perl ./telem.pl ./reduced_clean_list telem.html' os.system(cmd) os.system(cmd) f = open('./Telem/telem.html', 'r') tdata = f.read() f.close() cmd = 'mv ./Telem/telem.html ' + outdir os.system(cmd) cmd = ' cd ./Telem; rm -rf zzz zx_error_list ./clean_list ./reduced_clean_list ./header ./telem.txt ./run_telem' os.system(cmd) return tdata
def extract_stat_fits_file(obsid, out_dir='./'): """ extract acis stat fits files using arc5gl Input: obsid --- obsid out_dir --- a directory in which the fits file is deposited. default is "./" Output: acis stat fits file (decompressed) in out_dir data --- a list of fits files extracted """ line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' line = line + 'filetype=expstats\n' line = line + 'obsid=' + str(obsid) + '\n' line = line + 'go\n' with open(zspace, 'w') as fo: fo.write(line) try: try: cmd = ' /proj/sot/ska/bin/arc5gl -user isobe -script ' + zspace os.system(cmd) except: try: cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace os.system(cmd) except: cmd1 = "/usr/bin/env PERL5LIB= " cmd2 = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd = 'ls ' + exc_dir + '> ' + zspace os.system(cmd) with open(zspace, 'r') as f: test = f.read() mcf.rm_files(zspace) m1 = re.search('stat1.fits.gz', test) if m1 is not None: cmd = 'mv ' + exc_dir + '/*stat1.fits.gz ' + out_dir + '/.' os.system(cmd) cmd = 'gzip -d ' + out_dir + '/*stat1.fits.gz' os.system(cmd) cmd = 'ls ' + out_dir + '/*' + str( obsid) + '*stat1.fits > ' + zspace os.system(cmd) data = mcf.read_data_file(zspace, remove=1) return data else: return [] except: mcf.rm_file(zspace) return []
def find_two_sigma_value(fits): # #-- make histgram # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimghist infile=' + fits + ' outfile=outfile.fits hist=1::1 strict=yes clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmlist infile=outfile.fits outfile=' + zspace + ' opt=data' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) # #--- read bin # and its count rate # hbin = [] hcnt = [] vsum = 0 for ent in data: atemp = re.split('\s+|\t+', ent) if mcf.chkNumeric(atemp[0]): hbin.append(float(atemp[1])) val = int(atemp[4]) hcnt.append(val) vsum += val # #--- checking one sigma and two sigma counts # if len(hbin) > 0: v68 = int(0.68 * vsum) v95 = int(0.95 * vsum) v99 = int(0.997 * vsum) sigma1 = -999 sigma2 = -999 sigma3 = -999 acc = 0 for i in range(0, len(hbin)): acc += hcnt[i] if acc > v68 and sigma1 < 0: sigma1 = hbin[i] elif acc > v95 and sigma2 < 0: sigma2 = hbin[i] elif acc > v99 and sigma3 < 0: sigma3 = hbin[i] break return (sigma1, sigma2, sigma3) else: return (0, 0, 0)
def run_ascds(cmd2): """ set ascds environment and run the command input: cmd2 --- command line output: results of the command """ cmd1 = "/usr/bin/env PERL5LIB= " cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv)
def test_logfile(self): logfile = StringIO() cmd = 'echo line1; echo line2' bash(cmd, logfile=logfile) outlines = logfile.getvalue().splitlines() assert outlines[0].endswith(cmd) assert outlines[1] == 'line1' assert outlines[2] == 'line2' assert outlines[3].startswith('Bash')
def test_logfile(self): logfile = StringIO() bash('echo line1; echo line2', logfile=logfile) logfile.seek(0) outlines = logfile.read().splitlines() assert outlines[0].startswith('Bash-') assert outlines[1] == 'line1' assert outlines[2] == 'line2' assert outlines[3].startswith('Bash')
def run_ascds(cmd2): """ running ascds related command input: cmd2 --- command to be run output: result of cmd2 """ cmd1 = '/usr/bin/env PERL5LIB= ' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv)
def run_focal_temp_data(outdir, start, stop, fptemp): """ run focal temp script and create a plot, read a table input: outdir --- output direcotry name start --- start time in seconds from 1998.1.1 stop --- stop time in seconds from 1998.1.1 fptemp --- plot name output: fcnt --- number of peaks observed fdata --- table input """ cmd = 'cp -f Templates/test .' os.system(cmd) cmd1 = '/usr/bin/env PERL5LIB="" ' #cmd2 = ' source /home/mta/bin/reset_param ; ' cmd2 = ' /usr/local/bin/perl ' + outdir + 'get_ftemp_data.perl ' + str( start) + ' ' + str(stop) cmd = cmd1 + cmd2 # #--- run the focal temp script to extract data # bash(cmd, env=ascdsenv) cmd = 'cp -f *fits ./Focal/.' os.system(cmd) cmd = 'rm ./test' os.system(cmd) cmd = 'mv -f *fits ./Focal' os.system(cmd) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' idl ./Focal/run_temp > out' cmd = cmd1 + cmd2 # #--- run the focal temp script to create a plot # bash(cmd, env=ascdsenv2) cmd = 'mv -f ./Focal/*fits ' + outdir os.system(cmd) cmd = 'mv ./Focal/*.gif ' + outdir + fptemp os.system(cmd) # #--- read focal temp data # [fcnt, fdata] = read_focal_temp_output() cmd = 'rm ./out' ###os.system(cmd) return [fcnt, fdata]
def extract_stat_fits_file(obsid, out_dir='./'): """ extract acis stat fits files using arc4gl Input: obsid --- obsid out_dir --- a directory in which the fits file is deposited. default is "./" Output: acis stat fits file (decompressed) in out_dir data --- a list of fits files extracted """ line = 'operation=retrieve\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' line = line + 'filetype=expstats\n' line = line + 'obsid=' + str(obsid) + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() try: cmd1 = '/usr/bin/env PERL5LIB=""' cmd2 = ' echo ' + hakama + '|arc4gl -U' + dare + ' -Sarcocc -i' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace) cmd = 'ls ' + exc_dir + '> ' + zspace os.system(cmd) test = open(zspace).read() mcf.rm_file(zspace) m1 = re.search('stat1.fits.gz', test) if m1 is not None: cmd = 'mv ' + exc_dir + '/*stat1.fits.gz ' + out_dir + '/.' os.system(cmd) cmd = 'gzip -d ' + out_dir + '/*stat1.fits.gz' os.system(cmd) cmd = 'ls ' + out_dir + '/*' + str( obsid) + '*stat1.fits > ' + zspace os.system(cmd) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) return data else: return [] except: mcf.rm_file(zspace) return []
def run_focal_temp_data(outdir, start, stop, fptemp): """ run focal temp script and create a plot, read a table input: outdir --- output direcotry name start --- start time in seconds from 1998.1.1 stop --- stop time in seconds from 1998.1.1 fptemp --- plot name output: fcnt --- number of peaks observed fdata --- table input """ cmd = 'cp -f Templates/test .' os.system(cmd) cmd1 = '/usr/bin/env PERL5LIB="" ' #cmd2 = ' source /home/mta/bin/reset_param ; ' cmd2 = ' /usr/local/bin/perl ' + outdir + 'get_ftemp_data.perl ' + str(start) + ' ' + str(stop) cmd = cmd1 + cmd2 # #--- run the focal temp script to extract data # bash(cmd, env=ascdsenv) cmd = 'cp -f *fits ./Focal/.' os.system(cmd) cmd = 'rm ./test' os.system(cmd) cmd = 'mv -f *fits ./Focal' os.system(cmd) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' idl ./Focal/run_temp > out' cmd = cmd1 + cmd2 # #--- run the focal temp script to create a plot # bash(cmd, env=ascdsenv2) cmd = 'mv -f ./Focal/*fits ' + outdir os.system(cmd) cmd = 'mv ./Focal/*.gif ' + outdir + fptemp os.system(cmd) # #--- read focal temp data # [fcnt, fdata] = read_focal_temp_output() cmd = 'rm ./out' ###os.system(cmd) return [fcnt, fdata]
def hrc_dose_conv_to_png_manual(indir, outdir, outdir2, year, month, scale='sqrt', color='sls', chk=0): """ convet fits files into png images using ds9. this must be used manually on screen input: indir --- a directory where to find the data outdir --- image output directory outdir2 --- image output directory for html page yeear --- year month --- month sclae --- scale, such sqrt, log, or linear color --- color map name chk --- if it is >0, 99.5% cut will be applied for the data output: <img_dir>/<Inst>/<Month>/Hrc<inst>_<month>_<year>.png <img_dir>/<Inst>/<Month>/Hrc<inst>_08_1999_<month>_<year>.png """ syear = str(year) smon = str(month) if month < 10: smon = '0' + smon hname = 'HRC*' + smon + '_' + syear + '*.fits*' for ifile in os.listdir(indir): if fnmatch.fnmatch(ifile, hname): btemp = re.split('\.fits', ifile) out = btemp[0] outfile = outdir + out + '.png' outfile2 = outdir2 + out + '.png' ifits = indir + ifile cmd = "/usr/bin/env PERL5LIB= " cmd = cmd + ' ds9 ' + ifits + ' -geometry 760x1024 -zoom to fit ' if chk > 0: cmd = cmd + '-scale mode 99.5 -scale ' + scale + ' -cmap ' + color else: cmd = cmd + '-scale ' + scale + ' -cmap ' + color cmd = cmd + ' -colorbar yes -colorbar vertical -colorbar numerics yes -colorbar space value ' cmd = cmd + ' -colorbar fontsize 12 -saveimage png ' + outfile + ' -exit' bash(cmd, env=ascdsenv) cmd = 'cp -f ' + outfile + ' ' + outfile2 os.system(cmd) else: pass
def run_get_hrc_5_eng(tstart, year, mon, day): """ extract hrc eng5 fits file and creates combined eng5 input: tstart --- time in seconds from 1998.1.1 year --- year mon --- month day --- day of the month output: hrc_5_eng0_<yyy><mm><dd>.fits hrc_rates_<yyy><mm><dd>.fits mcptot_a_stats.rdb --- updated shield_a_stats.rdb --- updated """ cyear = str(year) cmon = str(mon) cday = str(day) if mon < 10: cmon = '0' + cmon if day < 10: cday = '0' + cday # #--- set the data extract interval to a day # tstop = tstart + 86400.0 # #--- extract hrc4eng data with arc5gl # run_arc5gl(tstart, tstop, 'hrc5eng') # #--- merge all hrc4eng data into one fits file # cmd = 'ls hrcf*_5_eng0.fits.gz >dat.lis' os.system(cmd) cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmmerge [email protected]"[time=' + str(tstart) + ':' + str( tstop) + ',quality=0000000000000000000,mnf=0]" ' cmd2 = cmd2 + 'outfile=hrc_5_eng0_' + cyear + cmon + cday + '.fits mode=h' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- move the fits files to the saving direcotry # cmd = 'mv hrc_5_eng0*fits ' + outdir os.system(cmd) # #--- clean up the original files # cmd = 'rm `cat dat.lis`' os.system(cmd) mcf.rm_file('./dat.lis')
def read_hrc_data(start, stop): """ extract hrc sheild rate for a given time span input: start --- starting time stop --- stopping time vito.fits --- this fits file must exist, created by get_hrc_veto.perl output: time --- time in sec from 1998.1.1 rate --- hrc shield rate """ # #--- create a dammy file # mcf.rm_file('./test') fo = open("./test", 'w') fo.close() # #--- call dataseeker # cmd1 = '/usr/bin/env PERL5LIB="" ' #cmd2 = ' source /home/mta/bin/reset_param; ' cmd2 = "" cmd2 = cmd2 + ' /home/ascds/DS.release/bin/dataseeker.pl ' cmd2 = cmd2 + 'infile=test outfile=ztemp.fits search_crit="columns=_shevart_avg timestart=' + str( start) cmd2 = cmd2 + ' timestop=' + str( stop) + '" loginFile=' + house_keeping + 'loginfile' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- read the data # data = pyfits.getdata('./ztemp.fits') stime = data.field('time') shd = data.field('shevart_avg') time = [] rate = [] for i in range(0, len(stime)): if stime[i] >= start and stime[i] < stop: time.append(stime[i]) rate.append(shd[i]) elif stime[i] >= stop: break else: continue mcf.rm_file('./test') mcf.rm_file('./ztemp.fits') return [time, rate]
def cpd(asol, evtfile, x, y, radius, poly_degree, out): """ Run the correct periscope drift tool. """ bash("""./correct_periscope_drift infile= {asol} \ evtfile= {evtfile} \ x={x} y={y} radius={radius} \ corr_poly_degree={poly_degree} \ outfile= {out}_asol1.fits \ corr_plot_root= {out} \ clobber+""".format(testdir=TESTDIR, evtfile=evtfile, asol=asol, y=y, x=x, radius=radius, poly_degree=poly_degree, out=out), env=ciaoenv)
def run_ascds(cmd, clean =0): """ run the command in ascds environment input: cmd --- command line clean --- if 1, it also resets parameters default: 0 output: command results """ if clean == 1: acmd = '/usr/bin/env PERL5LIB="" source /home/mta/bin/reset_param ;' + cmd else: acmd = '/usr/bin/env PERL5LIB="" ' + cmd bash(acmd, env=ascdsenv)
def extract_stat_result(file): """ extract stat informaiton: Input: file --- image fits file Output: avg minp maxp devp """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmstat infile=' + file + ' centroid=no >' + zspace cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f = open(zspace, 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file(zspace) # #--- extract mean, dev, min, and max # for ent in data: atemp = re.split('\s+|\t+', ent) m1 = re.search('mean', ent) m2 = re.search('min', ent) m3 = re.search('max', ent) m4 = re.search('sigma',ent) if m1 is not None: avg = atemp[1] if m2 is not None: minv = atemp[1] btemp = re.split('\(', ent) ctemp = re.split('\s+|\t+', btemp[1]) minp = '(' + ctemp[1] + ',' + ctemp[2] + ')' if m3 is not None: maxv = atemp[1] btemp = re.split('\(', ent) ctemp = re.split('\s+|\t+', btemp[1]) maxp = '(' + ctemp[1] + ',' + ctemp[2] + ')' if m4 is not None: dev = atemp[1] return [avg, minv, minp, maxv, maxp, dev]
def read_hrc_data(start, stop): """ extract hrc sheild rate for a given time span input: start --- starting time stop --- stopping time vito.fits --- this fits file must exist, created by get_hrc_veto.perl output: time --- time in sec from 1998.1.1 rate --- hrc shield rate """ # #--- create a dammy file # mcf.rm_file('./test') fo = open("./test", 'w') fo.close() # #--- call dataseeker # cmd1 = '/usr/bin/env PERL5LIB="" ' #cmd2 = ' source /home/mta/bin/reset_param; ' cmd2 = "" cmd2 = cmd2 + ' /home/ascds/DS.release/bin/dataseeker.pl ' cmd2 = cmd2 + 'infile=test outfile=ztemp.fits search_crit="columns=_shevart_avg timestart=' + str(start) cmd2 = cmd2 + ' timestop=' + str(stop) +'" loginFile='+ house_keeping + 'loginfile' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #--- read the data # data = pyfits.getdata('./ztemp.fits') stime = data.field('time') shd = data.field('shevart_avg') time = [] rate = [] for i in range(0, len(stime)): if stime[i] >= start and stime[i] < stop: time.append(stime[i]) rate.append(shd[i]) elif stime[i] >= stop: break else: continue mcf.rm_file('./test') mcf.rm_file('./ztemp.fits') return [time, rate]
def filters_sim(unprocessed_data): """ run acorn for sim filter input: unprocessed_data --- list of data output: various *.tl files """ for ent in unprocessed_data: cmd = ' /home/ascds/DS.release/bin/acorn -nOC msids_sim.list -f ' + ent try: print 'Data: ' + ent bash(cmd, env=ascdsenv) except: pass
def filters_ccdm(unprocessed_data): """ run acorn for ccdm filter input: unprocessed_data --- list of data output: various *.tl files """ for ent in unprocessed_data: cmd = '/home/ascds/DS.release/bin/acorn -nOC msids.list -f ' + ent try: #os.system(cmd) bash(cmd, env=ascdsenv) except: pass
def create_image(line, outfile): """ create image file according to instruction "line". input line: instruction,, outfile: output file name """ # try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmcopy "' + line + '" out.fits option=image clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # except: # pass try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmstat out.fits centroid=no > stest' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) except: pass # #--- if there is actually data, condense the iamge so that it won't take too much space # f = open('stest', 'r') sdata = [line.strip() for line in f.readlines()] f.close() os.system('rm stest') val = 'NA' for lent in sdata: m = re.search('mean', lent) if m is not None: atemp = re.split('\s+|\t+', lent) val = atemp[1] break if val != 'NA' and float(val) > 0: # line = 'out.fits[opt type=i2,null=-99,mem=80]' # cmd = 'dmcopy infile="' +line + '" outfile=' + outfile + ' clobber=yes' # os.system(cmd) cmd = 'mv out.fits ' + outfile os.system(cmd) return 1 #--- the image file was created else: return 0 #--- the image file was not created
def create_exposure_map(fits): """ create an exposure map from a fits file using ds9 input: fits --- fits file name output: out --- png file """ atemp = re.split('fits', fits) out = atemp[0] + 'png' cmd = 'ds9 ' + fits + ' -zoom to fit -scale histequ -cmap Heat -export png ' + out + ' -quit' try: bash(cmd, env=ascdsenv) except: pass
def getstat(fits): """ compute stat for fits image input: fits name output: (mean, std, min, max) """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmstat ' + fits + ' centroid=no > ./ztemp' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) f = open('./ztemp', 'r') data = [line.strip() for line in f.readlines()] f.close() for ent in data: m1 = re.search('min', ent) m2 = re.search('max', ent) m3 = re.search('mean', ent) m4 = re.search('sigma', ent) if m1 is not None: atemp = re.split('\s+|\t+', ent) min = float(atemp[1]) if m2 is not None: atemp = re.split('\s+|\t+', ent) atemp = re.split('\s+|\t+', ent) max = int(atemp[1]) if m3 is not None: atemp = re.split('\s+|\t+', ent) mean = float(atemp[1]) if m4 is not None: atemp = re.split('\s+|\t+', ent) std = float(atemp[1]) return (mean, std, min, max)
def run_idl_scripts(): """ run cell detect script list and then analyze the data and make plots input: none but just run: "./run_script" output: *src2.fits """ # #--- run celldetect script # cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' run_script > /dev/null' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace) # #--- run the rest of the idl scripts # cmd = 'rm -rf *_evt2.fits.gz' os.system(cmd) cmd = 'ls *src2.fits* > src_mon.list' os.system(cmd) cmd = 'idl ' + tdir + 'Scripts/run' os.system(cmd) cmd = 'cat src_mon.tab >> src_mon.txt' #----!!! move sc_mon.txt to house_keeping !!! os.system(cmd) cmd = 'idl ' + tdir + 'Scripts/run_txt' os.system(cmd) cmd = 'mv -f *.html *.gif ' + tdir + '/.' os.system(cmd) cmd = 'rm -f *.fits xafit* xtmpsrcdata' os.system(cmd)
def comp_stat(line, year, month, outfile, comp_test='NA'): """ compute statistics and print them out input: command line, year, month, and output file name command line is used by dmcopy to extract a specific location Example: ACIS_04_2012.fits.gz[1:1024,1:256] """ cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmcopy ' + line + ' temp.fits clobber="yes"' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #-- to avoid get min from outside of the edge of a CCD # ### try: cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' dmimgthresh infile=temp.fits outfile=zcut.fits cut="0:1e10" value=0 clobber=yes' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) # #-- find avg, min, max and deviation # [avg, minv, minp, maxv, maxp, dev] = extract_stat_result('zcut.fits') # #-- find the one sigma and two sigma count rate: # [sigma1, sigma2, sigma3] = find_two_sigma_value('zcut.fits') print_stat(avg, minv, minp, maxv, maxp, dev, sigma1, sigma2, sigma3, year, month, outfile, comp_test) ### except: ### pass os.system('rm temp.fits')
def find_acis_evt1(start, stop): """ find acis evt1 files for a given time period input: start --- start time in the format of mm/dd/yy (e.g. 05/01/15) stop --- sop time in the format of mm/dd/yy output: acisf*evt1.fits.gz """ # #--- write required arc4gl command # line = 'operation=browse\n' line = line + 'dataset=flight\n' line = line + 'detector=acis\n' line = line + 'level=1\n' # line = line + 'version=last\n' line = line + 'filetype=evt1\n' line = line + 'tstart=' + start + '\n' line = line + 'tstop=' + stop + '\n' line = line + 'go\n' f = open(zspace, 'w') f.write(line) f.close() cmd1 = "/usr/bin/env PERL5LIB=" cmd2 = ' echo ' + hakama + ' |arc4gl -U' + dare + ' -Sarcocc -i' + zspace + '> ./ztemp' cmd = cmd1 + cmd2 bash(cmd, env=ascdsenv) mcf.rm_file(zspace) f = open('./ztemp', 'r') data = [line.strip() for line in f.readlines()] f.close() mcf.rm_file('./ztemp') return data