Exemplo n.º 1
0
def extract_archive_data(tstart, tstop):
    """
    extract fits data files from archive
    input:  tstart  --- starting time
            tstop   --- stopping time
    output: extracted epheio fits files
    """
    #
    #--- extract data of the given data period
    #
    line = 'operation=retrieve\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=ephin\n'
    line = line + 'level=0\n'
    line = line + 'filetype=epheio\n'
    line = line + 'tstart=' + str(tstart) + '\n'
    line = line + 'tstop=' + str(tstop) + '\n'
    line = line + 'go\n'

    flist = mcf.run_arc5gl_process(line)

    if len(flist) > 0:
        combine_fits_files(flist)
        return 'ztemp.fits'
    else:
        return False
Exemplo n.º 2
0
def run_arc5gl(operation, dataset, detector, level, filetype, obsid):
    """
    run arc5gl to extract fits file
    input:  operation   --- operation, retrive/browse
            dataset     --- data set, usually flight
            level       --- level
            filetype    --- file type, eg evt1
            obsid       --- obsid
    output: fits        --- extracted fits file name
    """
    line = 'operation='       + operation + '\n'
    line = line + 'dataset='  + dataset   + '\n'
    line = line + 'detector=' + detector  + '\n'
    line = line + 'level='    + level     + '\n'
    line = line + 'filetype=' + filetype  + '\n'
    line = line + 'obsid='    + str(obsid)+ '\n'
    line = line + 'go\n'
    
    out  = mcf.run_arc5gl_process(line)
    try:
        fits = out[0]
    except:
        fits =''

    return fits
Exemplo n.º 3
0
def call_arc5gl(op, detector, level, tstart='', tstop='', sub='', ifile=''):
    """
    using arc5gl to extract a fits file list or a file itself
    input:  op      ---- operation: retreive/browse
            detector    --- detector
            level       --- level
            tstart      --- starting time if file name is provided, it is ignored
            tstop       --- stopping time if file name is provided, it is ignored
            sub         --- sub detector name; default ""
            ifile       --- file name; default ""
    output: flist       --- a list of fits file, either results of browse or extracted file name
    """
    line = 'operation=' + op + '\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=' + detector + '\n'

    if sub != '':
        line = line + 'subdetector=' + sub + '\n'

    line = line + 'level=' + str(level) + '\n'

    if ifile == '':
        line = line + 'tstart=' + str(tstart) + '\n'
        line = line + 'tstop=' + str(tstop) + '\n'
    else:
        line = line + 'filename=' + ifile + '\n'

    line = line + 'go\n'

    flist = mcf.run_arc5gl_process(line)

    return flist
Exemplo n.º 4
0
def run_arc5gl_browse(start, stop, lev='Lev1'):
    """
    run arc5gl to get a list of fits files in the given time period
    input:  start   --- starting time in the format of <yyyy>-<mm>-<dd>T<hh>:<mm>:<ss>
            stop    --- stoping time in the format of <yyyy>-<mm>-<dd>T<hh>:<mm>:<ss>
            lev     --- data level
            outfile --- output file name, default: zspace
    output: a list of fits file names in <outfile>
    """

    line = 'operation=browse\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=acis\n'
    if lev == 'Lev1':
        line = line + 'level=1\n'
        line = line + 'filetype=evt1\n'
    else:
        line = line + 'level=2\n'
        line = line + 'filetype=evt2\n'
    line = line + 'tstart=' + start + '\n'
    line = line + 'tstop=' + stop + '\n'
    line = line + 'go\n'

    f_list = mcf.run_arc5gl_process(line)

    return f_list
Exemplo n.º 5
0
def find_pitch_angle(start, stop):
    """
    create a table of time and sun pitch angle
    input:  start   --- starting time in seconds from 1998.1.1
            stop    --- stopping time in seconds from 1998.1.1
    output: time    --- a list of time in seconds from 1998.1.1
            pitch   --- a list of ptich in degree
    """
    #
    #--- extract pcad aspsol fits files for the given time period
    #
    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = pcad\n'
    line = line + 'subdetector = aca\n'
    line = line + 'level = 1\n'
    line = line + 'filetype =aspsol\n'
    line = line + 'tstart = ' + str(start) + '\n'
    line = line + 'tstop = ' + str(stop) + '\n'
    line = line + 'go\n'
    data_list = mcf.run_arc5gl_process(line)
    [cols, tbdata] = ecf.read_fits_file(data_list[0])

    for ent in data_list:
        cmd = 'rm -rf ' + ent
        os.system(cmd)

    time = tbdata['time']
    ra = tbdata['ra']
    dec = tbdata['dec']

    time_list = []
    pitch_list = []
    prev = 0.0
    m = 0
    for k in range(0, len(time)):
        #
        #--- select one data every 5 mins
        #
        itime = int(time[k])
        if itime == prev:
            continue

        if m % 300 == 0:
            #
            #--- get the sun angle
            #
            pitch = find_chandra_pitch(time[k], ra[k], dec[k])
            time_list.append(itime)
            pitch_list.append(pitch)

        prev = itime
        m += 1

    time = numpy.array(time_list)
    pitch = numpy.array(pitch_list)

    return [time, pitch]
Exemplo n.º 6
0
def get_ephin_data(start, stop, out_dir):
    """
    extract ephin data and create ephin_data file
    input:  start   --- start time in seconds from 1998.1.1
            stop    --- stop time in seconds from 1998.1.1
            out_dir --- output directory
    output: out_dir/ehin_data
    """
    #
    #--- first create a list of ephin fits file for the month
    #
    line = 'operation=browse\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=ephin\n'
    line = line + 'level=1\n'
    line = line + 'filetype=ephrates\n'
    line = line + 'tstart=' + start + '\n'
    line = line + 'tstop=' + stop + '\n'
    line = line + 'go\n'

    data = mcf.run_arc5gl_process(line)
    #
    #--- extract ephin fits file one by one and analyze
    #
    for ent in data:
        mc = re.search('fits', ent)
        if mc is not None:
            atemp = re.split('\s+', ent)
            fits = atemp[0]
            line = 'operation=retrieve\n'
            line = line + 'dataset=flight\n'
            line = line + 'detector=ephin\n'
            line = line + 'level=1\n'
            line = line + 'filetype=ephrates\n'
            line = line + 'filename=' + fits + '\n'
            line = line + 'go\n'

            chk = mcf.run_arc5gl_process(line)

            cmd = 'gzip -d *fits.gz'
            os.system(cmd)

            extract_ephin_data(fits, out_dir)
Exemplo n.º 7
0
def get_data(start, stop, year, msid_list, out_dir):
    """
    update eph l1 related data for the given data peirod
    input:  start   --- start time in seconds from 1998.1.1
            stop    --- stop time in seconds from 1998.1.1
            year    --- data extracted year
            msid_list   --- list of msids
            out_dir --- output_directory
    output: <out_dir>/<msid>_full_data_<year>.fits
    """
    print(str(start) + '<-->' + str(stop))

    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ephin\n'
    line = line + 'level = 0\n'
    line = line + 'filetype =ephhk \n'
    line = line + 'tstart = ' + str(start) + '\n'
    line = line + 'tstop = ' + str(stop) + '\n'
    line = line + 'go\n'

    data_list = mcf.run_arc5gl_process(line)
    #
    #--- uppend the data to the local fits data files
    #
    for fits in data_list:

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = tbdata['time']

        for col in msid_list:
            #
            #--- ignore columns with "ST_" (standard dev) and time
            #
            mdata = tbdata[col]
            cdata = [time, mdata]
            ocols = ['time', col.lower()]

            if not os.path.isdir(out_dir):
                cmd = 'mkdir ' + out_dir
                os.system(cmd)

            ofits = out_dir + col.lower() + '_full_data_' + str(year) + '.fits'
            if os.path.isfile(ofits):
                ecf.update_fits_file(ofits, ocols, cdata)
            else:
                ecf.create_fits_file(ofits, ocols, cdata)

        mcf.rm_files(fits)
Exemplo n.º 8
0
def run_script():

    for year in range(1999,2015):

        if mcf.is_leapyear(year):
            dend = 367
        else:
            dend = 366
        syear = str(year)
        lyear = syear[2] + syear[3]

        for yday in range(1,dend):
            if year == 1999 and yday < 239:
                continue

            if year == 2014 and yday > 316:
                break

            lyday = str(yday)
            if yday < 10:
                lyday = '00' + lyday
            elif yday < 100:
                lyday = '0'  + lyday
            dtime = str(year) + ':' + lyday

            start = dtime + ':00:00:00'
            stop  = dtime + ':23:59:59'

            line  = 'operation = retrieve\n'
            line  = line + 'dataset = flight\n'
            line  = line + 'detector = telem\n'
            line  = line + 'level = raw\n'
            line  = line + 'tstart = ' + start + '\n'
            line  = line + 'tstop  = ' + stop  + '\n'
            line  = line + 'go\n'

            out   = mcf.run_arc5gl_process(line)

            cmd = 'ls * > ' + zspace
            os.system(cmd)
            test = open(zspace, 'r').read()
            mc   = re.search('sto', test)
            if mc is not None:
                os.system('rm *log*')
                os.system('gzip -fd *gz')
                os.system('ls *.sto > xtmpnew')
                os.system('nice  ./filters_ccdm')
                esd.extract_sim_data()

            os.system('rm  -rf *.sto *.tl')
Exemplo n.º 9
0
def get_data(start, stop, year, msid_list):

    print(str(start) + '<-->' + str(stop))

    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ephin\n'
    line = line + 'level = 0\n'
    line = line + 'filetype =ephhk \n'
    line = line + 'tstart = ' + start + '\n'
    line = line + 'tstop = ' + stop + '\n'
    line = line + 'go\n'

    data_list = mcf.run_arc5gl_process(line)
    #
    #--- uppend the data to the local fits data files
    #
    for fits in data_list:

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = tbdata['time']

        for col in msid_list:
            #
            #--- ignore columns with "ST_" (standard dev) and time
            #
            mdata = tbdata[col]
            cdata = [time, mdata]
            ocols = ['time', col.lower()]

            if not os.path.isdir(out_dir):
                cmd = 'mkdir ' + out_dir
                os.system(cmd)

            ofits = out_dir + col.lower() + '_full_data_' + str(year) + '.fits'
            if os.path.isfile(ofits):
                update_fits_file(ofits, ocols, cdata)
            else:
                create_fits_file(ofits, ocols, cdata)

        mcf.rm_files(fits)
Exemplo n.º 10
0
def extract_dither_info_from_pacd(start, stop):
    """
    extract dither information from archieved pacad data
    input:  start   --- start time
            stop    --- stop time
    output: t_list  --- a list of time
            d_list  --- a list of data (DISA/ENAB)
    """
#
#--- set arc5gl command
#
    line = 'operation=retrieve\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=pcad\n'
    line = line + 'subdetector=eng\n'
    line = line + 'level=0\n'
    line = line + 'filetype=pcad8eng\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop='  + str(stop)  + '\n'
    line = line + 'go\n'
#
#--- run arc5gl
#
    f_list = mcf.run_arc5gl_process(line)
    if len(f_list) < 1:
        return []
#
#--- extract the data
#
    t_list = []
    d_list = []
    for ent in f_list:
        hout   = pyfits.open(ent)
        data   = hout[1].data
        t_list = t_list + list(data['time'])
        d_list = d_list + list(data['AODITHEN'])
        hout.close()

        mcf.rm_files(ent)
    t_list = [float(ent) for ent in t_list]

    return [t_list, d_list]
Exemplo n.º 11
0
def extract_evt1_data(start, stop):
    """
    extract all acis evt1 file during the period
    input:  start   --- start time
            stop    --- stop time
    output: ./acisf_evt1.fits
            out     --- a list of acis evt1 file extracted
    """
    line = 'operation=retrieve\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=acis\n'
    line = line + 'level=1\n'
    line = line + 'filetype=evt1\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop=' + str(stop) + '\n'
    line = line + 'go\n'

    out = mcf.run_arc5gl_process(line)

    return out
Exemplo n.º 12
0
def extract_data(start, stop):
    """
    using arc5gl, extract hk data
    input:  start   --- start time (any format accepted by arc5gl)
            stop    --- stop time  (any format accepted by arc5gl)
    output: extracted hrc hk0 fits data files
            flist   --- a list of hk0 fits files
    """
    line = 'operation=retrieve\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=hrc\n'
    line = line + 'level=0\n'
    line = line + 'filetype=hrchk\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop=' + str(stop) + '\n'
    line = line + 'go\n'

    flist = mcf.run_arc5gl_process(line)

    return flist
Exemplo n.º 13
0
def extract_acis_count_rate(start, stop, dir_name):
    """
    extract acis count rate data
    input:  year        --- year    
            month       --- month
            dir_name    --- output dir name
    output: <dir_name>/ccd<#ccd>
    """
    #
    #--- make a list of data fits file
    #
    data_list = get_data_list_from_archive(start, stop)

    if len(data_list) == 0:
        print("No data")
        exit(1)

    for ifile in data_list:
        #
        #--- extract the fits file with arc5gl
        #
        line = 'operation=retrieve\n'
        line = line + 'dataset=flight\n'
        line = line + 'detector=acis\n'
        line = line + 'level=1\n'
        line = line + 'filetype=evt1\n'
        line = line + 'filename=' + ifile + '\n'
        line = line + 'go\n'

        out = mcf.run_arc5gl_process(line)

        cmd = 'gzip -d ' + ifile + '.gz'
        os.system(cmd)
        #
        #--- extract data and update/create the count rate data
        #
        print("Extracting: " + ifile)
        extract_data(ifile, dir_name)

        mcf.rm_files(ifile)
Exemplo n.º 14
0
def get_data_list_from_archive(start, stop):
    """
    compare the current input list to the old one and select out the data which are not used
    input:  start       --- start time in seconds from 1998.1.1
            stop        --- stop time in seconds from 1998.1.1
    output: file_list   --- a list of acis evt1 file list
    """
    #
    #--- create data list with arc5gl
    #
    line = 'operation=browse\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=acis\n'
    line = line + 'level=1\n'
    line = line + 'filetype=evt1\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop=' + str(stop) + '\n'
    line = line + 'go\n'

    data = mcf.run_arc5gl_process(line)
    #
    #--- choose files with only non-calibration data
    #
    file_list = []
    for ent in data:
        mc = re.search('acisf', ent)
        if mc is None:
            continue

        ftemp = re.split('\s+', ent)
        atemp = re.split('acisf', ftemp[0])
        btemp = re.split('_', atemp[1])
        ctemp = re.split('N', btemp[0])
        mark = int(ctemp[0])

        if mark < 50000:
            file_list.append(ftemp[0])

    return file_list
Exemplo n.º 15
0
def run_arc5gl(start, stop):
    """
    extract acis and hrc evt1a.fits files using arc4gl
    input:  start   --- start time in the format of 2018-01-01:00:00:00
            stop    --- stop time
    output: fits files (e.g., acisf17108_001N002_evt1a.fits.gz)
    """
#
#--- read a template and create the current command file
#
    ifile = house_keeping +'arc_template'
    with open(ifile, 'r') as f:
        line = f.read()

    line = line.replace('#START#', start)
    line = line.replace('#STOP#',  stop)

    flist = mcf.run_arc5gl_process(line)
#
#--- remove unwanted fits files
#
    os.system('rm *src1a*')
Exemplo n.º 16
0
def use_arc5gl(operation,
               dataset,
               detector,
               level,
               filetype,
               start,
               stop,
               deposit='./'):
    """
    extract data using arc5gl. 
    input:  start ---   stop (year and ydate) 
            operation   --- (e.g., retrive) 
            dataset     ---(e.g. flight) 
            detector    --- (e.g. hrc) 
            level       --- (eg 0, 1, 2) 
            filetype    ---(e.g, evt1)
            start       --- starting time
            stop        --- stopping time
            deposit     --- where to deposit output fits file
    output: data        --- a list of fits file extracted
    """
    #
    #--- set arc5gl commend
    #
    line = 'operation=' + operation + '\n'
    line = line + 'dataset=' + dataset + '\n'
    line = line + 'detector=' + detector + '\n'
    line = line + 'level=' + str(level) + '\n'
    line = line + 'filetype=' + filetype + '\n'

    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop=' + str(stop) + '\n'

    line = line + 'go\n'

    data = mcf.run_arc5gl_process(line)

    return data
Exemplo n.º 17
0
def extract_hrchk(start, stop):
    """
    extract hrchk fits data files from archive
    input:  start   --- starting time in yyyy-mm-ddThh:mm:ss
            stop    --- stopping time in yyyy-mm-ddThh:mm:ss
    output: data    --- a list of fits files extracted
    """
    line = 'operation=retrieve\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=hrc\n'
    line = line + 'level=0\n'
    line = line + 'filetype=hrchk\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop='  + str(stop)  + '\n'
    line = line + 'go\n'

    cdata = mcf.run_arc5gl_process(line)

    if len(cdata) > 0:
        cmd = 'chmod 777 *fits.gz'
        os.system(cmd)

    return cdata
Exemplo n.º 18
0
def run_arc5gl(start, stop):
    """
    extract data from archive using arc5gl
    input:  start   --- starting time in the format of mm/dd/yy,hh/mm/ss. hh/mm/ss is optional
            stop    --- stoping time
    output: extracted data set
    """
#
#--- write arc5gl command
#
    line = 'operation=retrieve\n'
    line = line + 'dataset =flight\n'
    line = line + 'detector=telem\n'
    line = line + 'level =raw\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop='  + str(stop)  + '\n'
    line = line + 'go\n'
#
#--- extract data
#
    out  = mcf.run_arc5gl_process(line)

    return out
Exemplo n.º 19
0
def make_obsid_list(start, stop):
    """
    create a list of acis observation for a given period
    input:  start   --- starting time
            stop    --- stopping time
    output: a_lis   --- a list of obsids
    """
    line = 'operation=browse\n'
    line = line + 'dataset=flight\n'
    line = line + 'detector=acis\n'
    line = line + 'level=1\n'
    line = line + 'filetype=evt1\n'
    line = line + 'tstart=' + str(start) + '\n'
    line = line + 'tstop=' + str(stop) + '\n'
    line = line + 'go\n'

    out = mcf.run_arc5gl_process(line)
    #
    #--- save obsids
    #
    a_list = []
    if len(out) > 0:
        for ent in out:
            atemp = re.split('acisf', ent)
            btemp = re.split('_', atemp[1])
            obsid = btemp[0]
            #
            #--- make sure that obsid is numeric
            #
            try:
                chk = float(obsid)
            except:
                continue

            a_list.append(obsid)

    return a_list
Exemplo n.º 20
0
def extract_data_arc5gl(detector, level, filetype, tstart, tstop, sub=''):
    """
    extract data using arc5gl
    input:  detector    --- detector name
            level       --- level
            filetype    --- file name
            tstart      --- starting time
            tstop       --- stopping time
            sub         --- subdetector name; defalut "" --- no sub detector
    output: cols        --- a list of col name
            tdata       --- a list of arrays of data
    """
    #
    #--- extract ephin hk lev 0 fits data
    #
    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ' + detector + '\n'

    if sub != '':
        line = line + 'subdetector = ' + sub + '\n'

    line = line + 'level = ' + level + '\n'
    line = line + 'filetype = ' + filetype + '\n'
    line = line + 'tstart = ' + str(tstart) + '\n'
    line = line + 'tstop = ' + str(tstop) + '\n'
    line = line + 'go\n'

    flist = mcf.run_arc5gl_process(line)

    if len(flist) < 1:
        print("\t\tNo data")
        return [[], []]
#
#--- combined them
#
    flen = len(flist)

    if flen == 0:
        return [[], []]

    elif flen == 1:
        cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
        os.system(cmd)

    else:
        mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
        if flen > 2:
            for k in range(2, flen):
                mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                cmd = 'mv out.fits ztemp.fits'
                os.system(cmd)
#
#--- remove indivisual fits files
#

    for ent in flist:
        cmd = 'rm -rf ' + ent
        os.system(cmd)
#
#--- return data
#
    [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

    cmd = 'rm -f ztemp.fits out.fits'
    os.system(cmd)

    return [cols, tbdata]
Exemplo n.º 21
0
def update_simdiag_data(date=''):
    """
    collect sim diag msids
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = house_keeping + 'msid_list_simdiag'
    sfile = './msid_list_simsupple'
    data = mcf.read_data_file(sfile)
    cols = []
    g_dir = {}
    for ent in data:
        atemp = re.split('\s+', ent)
        cols.append(atemp[0])
        g_dir[atemp[0]] = atemp[1]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()

    day_list = []
    for year in range(1999, 2021):
        cyear = str(year)
        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:
                    continue
            if year == 2020:
                if mon > 1:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            if mcf.is_leapyear(year):
                lday = mday_list2[mon - 1]
            else:
                lday = mday_list[mon - 1]

            for day in range(1, lday + 1):
                cday = str(day)
                if day < 10:
                    cday = '0' + cday

                sday = cyear + '-' + cmon + '-' + cday
                day_list.append(sday)

    for sday in day_list:
        if sday == '2020-01-17':
            break
        print("Date: " + sday)

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        line = 'operation=retrieve\n'
        line = line + 'dataset = flight\n'
        line = line + 'detector = sim\n'
        line = line + 'level = 0\n'
        line = line + 'filetype = sim\n'
        line = line + 'tstart = ' + start + '\n'
        line = line + 'tstop = ' + stop + '\n'
        line = line + 'go\n'

        flist = mcf.run_arc5gl_process(line)

        if len(flist) < 1:
            print("\t\tNo data")
            continue
#
#--- combined them
#
        flen = len(flist)

        if flen == 0:
            continue

        elif flen == 1:
            cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
            os.system(cmd)

        else:
            mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
            if flen > 2:
                for k in range(2, flen):
                    mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                    cmd = 'mv out.fits ztemp.fits'
                    os.system(cmd)
#
#--- remove indivisual fits files
#
        for ent in flist:
            cmd = 'rm -rf ' + ent
            os.system(cmd)
#
#--- read out the data for the full day
#
        [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits')

        cmd = 'rm -f ztemp.fits out.fits'
        os.system(cmd)
        #
        #--- get time data in the list form
        #
        dtime = list(tbdata.field('time'))

        for k in range(0, len(cols)):
            #
            #---- extract data in a list form
            #
            col = cols[k]
            data = list(tbdata.field(col))
            #
            #--- change col name to msid
            #
            msid = col.lower()
            #
            #--- get limit data table for the msid
            #
            try:
                tchk = convert_unit_indicator(udict[msid])
            except:
                tchk = 0

            glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
            #
            #--- update database
            #
            tstart = convert_time_format(start)
            tstop = convert_time_format(stop)

            update_database(msid,
                            g_dir[msid],
                            dtime,
                            data,
                            glim,
                            pstart=tstart,
                            pstop=tstop)
Exemplo n.º 22
0
def acis_dose_get_data(startYear='', startMonth='', stopYear='', stopMonth=''):
    """
    extract ACIS evt1 data from a month and create combined image file. 
    input:  startYear   --- year of starting time
            startMonth  --- month of starting time
            stopYear    --- year of stopping time
            stopMonth   --- month of stopping time
    """
    if startYear == '' or startMonth == '' or stopYear == '' or stopMonth == '':

        startYear = raw_input('Start Year: ')
        startYear = int(float(startYear))
        startMonth = raw_input('Start Month: ')
        startMonth = int(float(startMonth))

        stopYear = raw_input('Stop Year: ')
        stopYear = int(float(stopYear))
        stopMonth = raw_input('Stop Month: ')
        stopMonth = int(float(stopMonth))
#
#--- start extracting the data for the year/month period
#
    for year in range(startYear, stopYear + 1):
        #
        #--- create a list of month appropriate for the year
        #
        month_list = expf.make_month_list(year, startYear, stopYear,
                                          startMonth, stopMonth)

        for month in month_list:
            smon = mcf.add_leading_zero(month)
            start = str(year) + '-' + smon + '-01T00:00:00'

            nextMonth = month + 1
            nyear = year
            if nextMonth > 12:
                nextMonth = 1
                nyear += 1
            smon = mcf.add_leading_zero(nextMonth)
            stop = str(nyear) + '-' + smon + '-01T00:00:00'
            #
            #--- using ar5gl, get a list of file names
            #
            line = 'operation=browse\n'
            line = line + 'dataset=flight\n'
            line = line + 'detector=acis\n'
            line = line + 'level=1\n'
            line = line + 'filetype=evt1\n'
            line = line + 'tstart=' + start + '\n'
            line = line + 'tstop=' + stop + '\n'
            line = line + 'go\n'

            fitsList = mcf.run_arc5gl_process(line)
            #
            #--- extract each evt1 file, extract the central part, and combine them into a one file
            #
            for fits in fitsList:
                print("FITS File: " + fits)
                atemp = re.split('\s+', line)
                line = 'operation=retrieve\n'
                line = line + 'dataset=flight\n'
                line = line + 'detector=acis\n'
                line = line + 'level=1\n'
                line = line + 'filetype=evt1\n'
                line = line + 'filename=' + fits + '\n'
                line = line + 'go\n'

                out = mcf.run_arc5gl_process(line)
                #
                #--- check whether the fits file actually extracted and if so, ungip the file
                #
                if len(out) < 1:
                    continue
                cmd = 'gzip -d ' + out[0]
                os.system(cmd)

                line = fits + '[EVENTS][bin tdetx=2800:5200:1, tdety=1650:4150:1][option type=i4]'
                #
                #--- create an image file
                #
                ichk = expf.create_image(line, 'ztemp.fits')
                #
                #--- combined images
                #
                if ichk > 0:
                    expf.combine_image('ztemp.fits', 'total.fits')

                mcf.rm_files(fits)
                mcf.rm_files('ztemp.fits')
#
#--- rename the file
#
            lyear = str(startYear)
            lmon = mcf.add_leading_zero(startMonth)
            outfile = './ACIS_' + lmon + '_' + lyear + '_full.fits'
            cmd = 'mv total.fits ' + outfile
            os.system(cmd)
            #
            #--- trim the extreme values
            #
            upper = find_10th(outfile)
            if mcf.is_neumeric(upper):
                outfile2 = './ACIS_' + lmon + '_' + lyear + '.fits'
                cmd = ' dmimgthresh infile=' + outfile + ' outfile='
                cmd = cmd + outfile2 + ' cut="0:' + str(
                    upper) + '" value=0 clobber=yes'
                expf.run_ascds(cmd)
            else:
                cmd = 'cp -f ' + outfile + ' ' + outfile2
                os.system(cmd)

            cmd = 'gzip ' + outfile
            os.system(cmd)
            #
            #--- move full one to the data dir; keep other in <exc_dir> to be used to create cumlative files
            #
            cmd = 'mv ' + outfile + '* ' + mon_acis_dir + '/.'
            os.system(cmd)
Exemplo n.º 23
0
def update_eph_data(date=''):
    """
    collect grad and  comp data for trending
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = 'eph_list'
    #glist = mcf.read_data_file(sfile)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()

    day_list = []
    for year in range(2000, 2019):  #---- CHANGE CHANGE CHAGE!!!!!
        lyear = year
        for mon in range(1, 13):
            #if year == 2018 and mon > 1:
            #    break
            #if year == 2017 and mon < 11:
            #    continue

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            nmon = mon + 1
            if nmon > 12:
                nmon = 1
                lyear += 1

            clmon = str(nmon)
            if nmon < 10:
                clmon = '0' + clmon

            start = str(year) + '-' + cmon + '-01T00:00:00'
            stop = str(lyear) + '-' + clmon + '-01T00:00:00'

            print "Period: " + str(start) + "<--->" + str(stop)

            for group in glist:
                print "Group: " + group
                #
                #---CHANGE THE DETECTOR/FILETYPE BEFORE RUNNING IF IT IS DIFFERENT FROM EPHHK
                #
                line = 'operation=retrieve\n'
                line = line + 'dataset=flight\n'
                line = line + 'detector=ephin\n'
                line = line + 'level=0\n'
                line = line + 'filetype=ephhk\n'
                line = line + 'tstart=' + start + '\n'
                line = line + 'tstop=' + stop + '\n'
                line = line + 'go\n'

                flist = mcf.run_arc5gl_process(line)

                if len(flist) < 1:
                    print "\t\tNo data"
                    continue
#
#--- combined them
#
                flen = len(flist)

                if flen == 0:
                    continue

                elif flen == 1:
                    cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                    os.system(cmd)

                else:
                    mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                    if flen > 2:
                        for k in range(2, flen):
                            mfo.appendFitsTable('ztemp.fits', flist[k],
                                                'out.fits')
                            cmd = 'mv out.fits ztemp.fits'
                            os.system(cmd)
#
#--- remove indivisual fits files
#

                for ent in flist:
                    cmd = 'rm -rf ' + ent
                    os.system(cmd)
#
#--- read out the data
#
                [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

                cmd = 'rm -f ztemp.fits out.fits'
                os.system(cmd)
                #
                #--- get time data in the list form
                #
                dtime = list(tbdata.field('time'))

                for k in range(1, len(cols)):
                    #
                    #--- select col name without ST_ (which is standard dev)
                    #
                    col = cols[k]
                    mc = re.search('ST_', col)
                    if mc is not None:
                        continue
                    mc = re.search('quality', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('mjf', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('gap', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('dataqual', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('tlm_fmt', col, re.IGNORECASE)
                    if mc is not None:
                        continue
#
#---- extract data in a list form
#
                    data = list(tbdata.field(col))
                    #
                    #--- change col name to msid
                    #
                    msid = col.lower()
                    #
                    #--- get limit data table for the msid
                    #
                    try:
                        tchk = convert_unit_indicator(udict[msid])
                    except:
                        tchk = 0

                    glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
                    #
                    #--- update database
                    #
                    update_database(msid, group, dtime, data, glim)
Exemplo n.º 24
0
def process_evt(lev='Lev1'):
    """
    process lev1 or 2  ACIS SIB data
    input:  lev --  which level data to process; defalut: 'Lev1'
            it also reads acis_obs file to find fits file names
    output: processed fits files in <out_dir>/lres/
    """
    ldir = cor_dir + lev + '/'
    indir = ldir + 'Input/'
    outdir = ldir + 'Outdir/'
    repdir = ldir + 'Reportdir/'

    data = mcf.read_data_file('./acis_obs')

    for obs in data:
        atemp = re.split('\s+', obs)
        obsid = atemp[0].strip()
        print("OBSID: " + str(obsid))
        #
        #--- extract evt 1/2 file
        #
        line = 'operation=retrieve\n'
        line = line + 'dataset=flight\n'
        line = line + 'detector=acis\n'
        if lev == 'Lev1':
            line = line + 'level=1\n'
            line = line + 'filetype=evt1\n'
        else:
            line = line + 'level=2\n'
            line = line + 'filetype=evt2\n'
        line = line + 'obsid=' + str(obsid) + '\n'
        line = line + 'go\n'

        flist = mcf.run_arc5gl_process(line)

        for fits in flist:
            #
            #--- exclude bright sources from the file
            #
            es.exclude_sources(fits)

            cmd = 'mv *cleaned*fits ' + indir + '/. 2>/dev/null'
            os.system(cmd)

        cmd = 'rm -rf *fits*'
        os.system(cmd)
        #
        #--- extract acis evt1 files from archeive, and compute SIB
        #--- at some occasion, the process dies with unknown reason; so repeat twice
        #--- to cover the failed case
        #
        try:
            scf.sib_corr_comp_sib(lev)
        except:
            try:
                scf.sib_corr_comp_sib(lev)
            except:
                pass
#
#--- clean up the files
#
        cmd = 'rm -rf ' + indir + '/*fits ' + outdir + '/*fits ' + outdir + '/*ped* '
        os.system(cmd)
Exemplo n.º 25
0
def recover_hrcveto_data():
    """
    recover hrc veto data
    input:  none
    output: fits file data related to grad and comp
    """
#
#--- read group names which need special treatment
#
    #sfile = 'eph_list'
    #glist = mcf.read_data_file(sfile)
    glist  = ['Hrcveto']
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()
#
#--- read mta msid <---> sql msid conversion list
#
    mta_cross = ecf.read_cross_check_table()


    day_list = []
    for year in range(1999, 2018):
        lyear = year
        cyear = str(year)
        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:
                    continue

            if year == 2017:
                if mon > 10:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            nmon = mon + 1
            if nmon > 12:
                nmon = 1
                lyear += 1

            cnmon = str(nmon)
            if nmon < 10:
                cnmon = '0' + cnmon


            start = str(year)  + '-' + cmon  + '-01T00:00:00'
            stop  = str(lyear) + '-' + cnmon  + '-01T00:00:00'
    
            for group in glist:
                print "Group: " + group + ' : ' + str(start) + '<-->' + str(stop)
    
                line = 'operation=retrieve\n'
                line = line + 'dataset = flight\n'
                line = line + 'detector = hrc\n'
                line = line + 'level = 0\n'
                line = line + 'filetype = hrcss\n'
                line = line + 'tstart = '   + start + '\n'
                line = line + 'tstop = '    + stop  + '\n'
                line = line + 'go\n'
    
                flinst = mcf.run_arc5gl_process(line)
    
                if len(flist) < 1:
                    print "\t\tNo data"
                    continue
#
#--- combined them
#
                flen = len(flist)
     
                if flen == 0:
                    continue
     
                elif flen == 1:
                    cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                    os.system(cmd)
     
                else:
                    mfo. appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                    if flen > 2:
                        for k in range(2, flen):
                            mfo. appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                            cmd = 'mv out.fits ztemp.fits'
                            os.system(cmd)
#
#--- remove indivisual fits files
#
    
                for ent in flist:
                    cmd = 'rm -rf ' + ent 
                    os.system(cmd)

#
#--- read out the data for the full day
#
                [cols, tbdata] = ecf.read_fits_file('ztemp.fits')
    
                cols = ['TLEVART', 'VLEVART', 'SHEVART']
     
                cmd = 'rm -f ztemp.fits out.fits'
                os.system(cmd)
#
#--- get time data in the list form
#
                dtime = list(tbdata.field('time'))
     
                for col in cols:
#
#---- extract data in a list form
#
                    data = list(tbdata.field(col))
#
#--- change col name to msid
#
                    msid = col.lower()
#
#--- get limit data table for the msid
#
                    try:
                        tchk  = convert_unit_indicator(udict[msid])
                    except:
                        tchk  = 0
     
                    glim  = ecf.get_limit(msid, tchk, mta_db, mta_cross)
#
#--- update database
#
                    update_database(msid, group, dtime, data, glim)
Exemplo n.º 26
0
def hrc_dose_get_data(startYear='NA',
                      startMonth='NA',
                      stopYear='NA',
                      stopMonth='NA'):
    """
    extract HRC evt1 data from a month and create cumulative data fits file. 
    input:  startYear   --- start year 
            startMonth  --- start month 
            stopYear    --- stop year 
            stopMonth   --- stop month
    output: image fits files for the month and cumulative cases
    """
    #
    #--- if the dates are not given, set them to the last month
    #
    if startYear == 'NA':
        [stopYear, stopMonth, day] = mcf.today_date()

        startYear = stopYear
        stopMonth -= 1
        startMonth = stopMonth
        if startMonth < 1:
            startMonth = 12
            startYear -= 1
#
#--- start extracting the data for the year/month period
#
    for year in range(startYear, stopYear + 1):
        lyear = str(year)
        syear = lyear[2] + lyear[3]
        #
        #--- create a list of month appropriate for the year
        #
        month_list = expf.make_month_list(year, startYear, stopYear,
                                          startMonth, stopMonth)

        for month in month_list:
            smonth = mcf.add_leading_zero(month)
            #
            #--- output file name settings
            #
            outfile_i = './HRCI_' + str(smonth) + '_' + str(lyear) + '.fits'
            outfile_s = './HRCS_' + str(smonth) + '_' + str(lyear) + '.fits'
            #
            #--- using ar5gl, get file names
            #
            smonth = mcf.add_leading_zero(month)
            syear = str(year)
            start = syear + '-' + smonth + '-01T00:00:00'

            nextMonth = month + 1
            if nextMonth > 12:
                lyear = year + 1
                nextMonth = 1
            else:
                lyear = year

            smonth = mcf.add_leading_zero(nextMonth)
            syear = str(lyear)
            stop = str(lyear) + '-' + smonth + '-01T00:00:00'

            line = 'operation=browse\n'
            line = line + 'dataset=flight\n'
            line = line + 'detector=hrc\n'
            line = line + 'level=1\n'
            line = line + 'filetype=evt1\n'
            line = line + 'tstart=' + start + '\n'
            line = line + 'tstop=' + stop + '\n'
            line = line + 'go\n'

            fitsList = mcf.run_arc5gl_process(line)
            #
            #--- extract each evt1 file, extract the central part, and combine them into a one file
            #
            #--- set counters for how many hrc-i and hrc-s are extracted
            #
            hrci_cnt = 0
            hrcs_cnt = 0
            for fitsName in fitsList:
                print("Fits file: " + fitsName)
                m = re.search('fits', fitsName)
                if m is None:
                    continue
                try:
                    line = 'operation=retrieve\n'
                    line = line + 'dataset=flight\n'
                    line = line + 'detector=hrc\n'
                    line = line + 'level=1\n'
                    line = line + 'filetype=evt1\n'
                    line = line + 'filename=' + fitsName + '\n'
                    line = line + 'go\n'
                    out = mcf.run_arc5gl_process(line)

                    if len(out) < 1:
                        continue

                    ofits = out[0]
                    cmd = 'gzip -d ' + ofits
                    os.system(cmd)

                    ofits = ofits.replace('.gz', '')
                except:
                    continue
#
#--- checking which HRC (S or I)
#
                hout = pyfits.open(ofits)
                data = hout[1].header
                detector = data['DETNAM']
                hout.close()
                #
                #--- creating the center part image
                #
                line = set_cmd_line(ofits, detector)
                ichk = expf.create_image(line, 'ztemp.fits')
                #
                #--- for HRC S
                #
                if detector == 'HRC-S' and ichk > 0:
                    expf.combine_image('ztemp.fits', 'total_s.fits')
                    hrcs_cnt += 1
#
#--- for HRC I
#
                elif detector == 'HRC-I' and ichk > 0:
                    expf.combine_image('ztemp.fits', 'total_i.fits')
                    hrci_cnt += 1

                mcf.rm_files('out.fits')
                mcf.rm_files(ofits)
#
#--- move the file to a depository
#
            if hrcs_cnt > 0:
                cmd = 'mv total_s.fits ' + web_dir + 'Month_hrc/' + outfile_s
                os.system(cmd)
                cmd = 'gzip ' + web_dir + '/Month_hrc/*.fits'
                os.system(cmd)

            if hrci_cnt > 0:
                cmd = 'mv total_i.fits ' + web_dir + 'Month_hrc/' + outfile_i
                os.system(cmd)
                cmd = 'gzip ' + web_dir + '/Month_hrc/*.fits'
                os.system(cmd)

            createCumulative(year, month, 'HRC-S', web_dir)
            createCumulative(year, month, 'HRC-I', web_dir)
Exemplo n.º 27
0
def get_data(tstart, tstop, year, grad_list, out_dir):
    """
    update msid data in msid_list for the given data period
    input:  start   --- start time in seconds from 1998.1.1
            stop    --- stop time in seconds from 1998.1.1
            year    --- the year in which data is extracted
            grad_list   --- a list of  group name in grads
            out_dir --- output_directory
    """
    print("Period: " + str(tstart) + '<-->' + str(tstop) + ' in Year: ' +
          str(year))
    #
    #--- extract ecach group data
    #
    for group in grad_list:
        print(group)

        line = 'operation=retrieve\n'
        line = line + 'dataset = mta\n'
        line = line + 'detector = grad\n'
        line = line + 'level = 0.5\n'
        line = line + 'filetype = ' + group + '\n'
        line = line + 'tstart = ' + str(tstart) + '\n'
        line = line + 'tstop = ' + str(tstop) + '\n'
        line = line + 'go\n'

        data_list = mcf.run_arc5gl_process(line)
        #
        #---  read the first fits file and prep for the data list
        #
        [cols, tbdata] = ecf.read_fits_file(data_list[0])
        col_list = []
        for ent in cols:
            if ent.lower() == 'time':
                continue
            mc = re.search('st_', ent.lower())
            if mc is not None:
                continue

            col_list.append(ent)

        mcf.rm_files(data_list[0])
        tdata = tbdata['time']
        mdata = []
        for col in col_list:
            mdata.append(tbdata[col])
#
#--- read the rest of the data
#
        clen = len(col_list)
        for k in range(1, len(data_list)):
            fits = data_list[k]
            [cols, tbdata] = ecf.read_fits_file(fits)
            tdata = numpy.append(tdata, tbdata['time'])

            for m in range(0, clen):
                cdata = tbdata[col_list[m]]
                mdata[m] = numpy.append(mdata[m], cdata)

            mcf.rm_files(fits)

        dout = out_dir + group.capitalize() + '/'

        if not os.path.isdir(dout):
            cmd = 'mkdir ' + dout
            os.system(cmd)
#
#--- write out the data to fits file
#
        for k in range(0, clen):
            col = col_list[k]
            ocols = ['time', col.lower()]
            cdata = [tdata, mdata[k]]

            ofits = dout + col.lower() + '_full_data_' + str(year) + '.fits'

            if os.path.isfile(ofits):
                ecf.update_fits_file(ofits, ocols, cdata)
            else:
                ecf.create_fits_file(ofits, ocols, cdata)

#
#--- zip the fits file from the last year at the beginning of the year
#
        ecf.check_zip_possible(dout)