def get_data(msid, start, stop):
    """
    extract data for the given data period
    input:  msid    --- msid
            start   --- starting time in seconds from 1998.1.1
            stop    --- stopping time in seconds from 1998.1.1
    output: data    --- a list of msid values
    """
    #
    #--- extract data with dataseeker
    #
    try:
        ecf.data_seeker(start, stop, msid)
        [col, tbdata] = ecf.read_fits_file('temp_out.fits')
        mcf.rm_file('temp_out.fits')
    except:
        return []

    time = tbdata.field('time')
    #
    #--- if the dataseeker's data is not filled for the given data period
    #--- stop any farther data proccess
    #
    if time[-1] < 0.95 * stop:
        data = []
    else:
        try:
            name = msid + '_avg'
            data = tbdata.field(name)
        except:
            data = []

    return data
def get_data(msid, start, stop):
    """
    extract data for the given data period
    input:  msid    --- msid
            start   --- starting time in seconds from 1998.1.1
            stop    --- stopping time in seconds from 1998.1.1
    output: data    --- a list of msid values
    """
#
#--- extract data with dataseeker
#
    try:
        ecf.data_seeker(start, stop, msid)
        [col, tbdata] = ecf.read_fits_file('temp_out.fits')
        mcf.rm_file('temp_out.fits')
    except:
        return []

    time = tbdata.field('time')
#
#--- if the dataseeker's data is not filled for the given data period
#--- stop any farther data proccess
#
    if time[-1] < 0.95 * stop:
        data = []
    else:
        try:
            name = msid + '_avg'
            data = tbdata.field(name)
        except:
            data = []

    return data
Esempio n. 3
0
def find_pitch_angle(start, stop):
    """
    create a table of time and sun pitch angle
    input:  start   --- starting time in seconds from 1998.1.1
            stop    --- stopping time in seconds from 1998.1.1
    output: time    --- a list of time in seconds from 1998.1.1
            pitch   --- a list of ptich in degree
    """
    #
    #--- extract pcad aspsol fits files for the given time period
    #
    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = pcad\n'
    line = line + 'subdetector = aca\n'
    line = line + 'level = 1\n'
    line = line + 'filetype =aspsol\n'
    line = line + 'tstart = ' + str(start) + '\n'
    line = line + 'tstop = ' + str(stop) + '\n'
    line = line + 'go\n'
    data_list = mcf.run_arc5gl_process(line)
    [cols, tbdata] = ecf.read_fits_file(data_list[0])

    for ent in data_list:
        cmd = 'rm -rf ' + ent
        os.system(cmd)

    time = tbdata['time']
    ra = tbdata['ra']
    dec = tbdata['dec']

    time_list = []
    pitch_list = []
    prev = 0.0
    m = 0
    for k in range(0, len(time)):
        #
        #--- select one data every 5 mins
        #
        itime = int(time[k])
        if itime == prev:
            continue

        if m % 300 == 0:
            #
            #--- get the sun angle
            #
            pitch = find_chandra_pitch(time[k], ra[k], dec[k])
            time_list.append(itime)
            pitch_list.append(pitch)

        prev = itime
        m += 1

    time = numpy.array(time_list)
    pitch = numpy.array(pitch_list)

    return [time, pitch]
Esempio n. 4
0
def get_data(start, stop, year, msid_list):

    print str(start) + '<-->' + str(stop)

    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ephin\n'
    line = line + 'level = 0\n'
    line = line + 'filetype =ephhk \n'
    line = line + 'tstart = ' + start + '\n'
    line = line + 'tstop = ' + stop + '\n'
    line = line + 'go\n'

    fo = open(zspace, 'w')
    fo.write(line)
    fo.close()

    try:
        cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
        os.system(cmd)
    except:
        cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
        os.system(cmd)

    mcf.rm_file(zspace)

    data_list = ecf.read_file_data('ztemp_out')
    data_list = data_list[1:]
    #
    #--- uppend the data to the local fits data files
    #
    for fits in data_list:

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = tbdata['time']

        for col in msid_list:
            #
            #--- ignore columns with "ST_" (standard dev) and time
            #
            mdata = tbdata[col]
            cdata = [time, mdata]
            ocols = ['time', col.lower()]

            if not os.path.isdir(out_dir):
                cmd = 'mkdir ' + out_dir
                os.system(cmd)

            ofits = out_dir + col.lower() + '_full_data_' + str(year) + '.fits'
            if os.path.isfile(ofits):
                update_fits_file(ofits, ocols, cdata)
            else:
                create_fits_file(ofits, ocols, cdata)

        mcf.rm_file(fits)
Esempio n. 5
0
def get_data(start, stop, year, msid_list, out_dir):
    """
    update eph l1 related data for the given data peirod
    input:  start   --- start time in seconds from 1998.1.1
            stop    --- stop time in seconds from 1998.1.1
            year    --- data extracted year
            msid_list   --- list of msids
            out_dir --- output_directory
    output: <out_dir>/<msid>_full_data_<year>.fits
    """
    print(str(start) + '<-->' + str(stop))

    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ephin\n'
    line = line + 'level = 0\n'
    line = line + 'filetype =ephhk \n'
    line = line + 'tstart = ' + str(start) + '\n'
    line = line + 'tstop = ' + str(stop) + '\n'
    line = line + 'go\n'

    data_list = mcf.run_arc5gl_process(line)
    #
    #--- uppend the data to the local fits data files
    #
    for fits in data_list:

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = tbdata['time']

        for col in msid_list:
            #
            #--- ignore columns with "ST_" (standard dev) and time
            #
            mdata = tbdata[col]
            cdata = [time, mdata]
            ocols = ['time', col.lower()]

            if not os.path.isdir(out_dir):
                cmd = 'mkdir ' + out_dir
                os.system(cmd)

            ofits = out_dir + col.lower() + '_full_data_' + str(year) + '.fits'
            if os.path.isfile(ofits):
                ecf.update_fits_file(ofits, ocols, cdata)
            else:
                ecf.create_fits_file(ofits, ocols, cdata)

        mcf.rm_files(fits)
Esempio n. 6
0
def get_data(start, stop, year, msid_list):

    print(str(start) + '<-->' + str(stop))

    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ephin\n'
    line = line + 'level = 0\n'
    line = line + 'filetype =ephhk \n'
    line = line + 'tstart = ' + start + '\n'
    line = line + 'tstop = ' + stop + '\n'
    line = line + 'go\n'

    data_list = mcf.run_arc5gl_process(line)
    #
    #--- uppend the data to the local fits data files
    #
    for fits in data_list:

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = tbdata['time']

        for col in msid_list:
            #
            #--- ignore columns with "ST_" (standard dev) and time
            #
            mdata = tbdata[col]
            cdata = [time, mdata]
            ocols = ['time', col.lower()]

            if not os.path.isdir(out_dir):
                cmd = 'mkdir ' + out_dir
                os.system(cmd)

            ofits = out_dir + col.lower() + '_full_data_' + str(year) + '.fits'
            if os.path.isfile(ofits):
                update_fits_file(ofits, ocols, cdata)
            else:
                create_fits_file(ofits, ocols, cdata)

        mcf.rm_files(fits)
Esempio n. 7
0
def update_eph_data(date=''):
    """
    collect grad and  comp data for trending
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = 'eph_list'
    #glist = ecf.read_file_data(sfile)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = read_cross_check_table()

    day_list = []
    for year in range(1999, 2018):  #---- CHANGE CHANGE CHAGE!!!!!
        lyear = year
        for mon in range(1, 13):
            if year == 2016 and mon < 9:
                continue
            if year == 2017 and mon > 10:
                continue

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            nmon = mon + 1
            if nmon > 12:
                nmon = 1
                lyear += 1

            clmon = str(nmon)
            if nmon < 10:
                clmon = '0' + clmon

            start = str(year) + '-' + cmon + '-01T00:00:00'
            stop = str(lyear) + '-' + clmon + '-01T00:00:00'

            print "Period: " + str(start) + "<--->" + str(stop)

            for group in glist:
                print "Group: " + group
                #
                #---CHANGE THE DETECTOR/FILETYPE BEFORE RUNNING IF IT IS DIFFERENT FROM EPHHK
                #
                line = 'operation=retrieve\n'
                line = line + 'dataset=flight\n'
                line = line + 'detector=ephin\n'
                line = line + 'level=0\n'
                line = line + 'filetype=epheio\n'
                line = line + 'tstart=' + start + '\n'
                line = line + 'tstop=' + stop + '\n'
                line = line + 'go\n'

                fo = open(zspace, 'w')
                fo.write(line)
                fo.close()

                try:
                    cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
                    os.system(cmd)
                except:
                    cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
                    os.system(cmd)

                mcf.rm_file(zspace)
                #
                #--- find the names of the fits files of the day of the group
                #
                try:
                    flist = ecf.read_file_data('ztemp_out', remove=1)
                    flist = flist[1:]
                except:
                    print "\t\tNo data"
                    continue

                if len(flist) < 1:
                    print "\t\tNo data"
                    continue
#
#--- combined them
#
                flen = len(flist)

                if flen == 0:
                    continue

                elif flen == 1:
                    cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                    os.system(cmd)

                else:
                    mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                    if flen > 2:
                        for k in range(2, flen):
                            mfo.appendFitsTable('ztemp.fits', flist[k],
                                                'out.fits')
                            cmd = 'mv out.fits ztemp.fits'
                            os.system(cmd)
#
#--- remove indivisual fits files
#

                for ent in flist:
                    cmd = 'rm -rf ' + ent
                    os.system(cmd)

#
#--- read out the data
#
                [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

                cmd = 'rm -f ztemp.fits out.fits'
                os.system(cmd)
                #
                #--- get time data in the list form
                #
                dtime = list(tbdata.field('time'))

                for k in range(1, len(cols)):
                    #
                    #--- select col name without ST_ (which is standard dev)
                    #
                    col = cols[k]
                    mc = re.search('ST_', col)
                    if mc is not None:
                        continue
                    mc = re.search('quality', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('mjf', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('gap', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('dataqual', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('tlm_fmt', col, re.IGNORECASE)
                    if mc is not None:
                        continue
#
#---- extract data in a list form
#
                    data = list(tbdata.field(col))
                    #
                    #--- change col name to msid
                    #
                    msid = col.lower()
                    #
                    #--- get limit data table for the msid
                    #
                    try:
                        tchk = convert_unit_indicator(udict[msid])
                    except:
                        tchk = 0

                    glim = get_limit(msid, tchk, mta_db, mta_cross)
                    #
                    #--- update database
                    #
                    update_database(msid, group, dtime, data, glim)
Esempio n. 8
0
def update_compdatabase():
    """
    update comp related data using mp data
    input: none but read from /data/mta_www/mp_reports and <house_keeping>/<comp_group>_past
    output: <out_dir>/<comp msid>_full_data.fits
    """
    for comp_group in comp_entry:
        #
        #--- read the last set of the input data and find the last entry
        #
        past = house_keeping + comp_group + '_past'
        past = mcf.read_data_file(past)

        last = past[-1]
        #
        #--- find today's data entry
        #
        cmd = 'ls /data/mta_www/mp_reports/*/' + comp_group + '/data/mta*fits* >' + zspace
        os.system(cmd)
        current = mcf.read_data_file(zspace)

        cmd = 'mv ' + zspace + ' ' + house_keeping + comp_group + '_past'
        os.system(cmd)
        #
        #--- find the data which are not read
        #
        new_fits = []
        chk = 0
        for ent in current:
            if chk == 0:
                if ent == last:
                    chk = 1
                continue
            new_fits.append(ent)
#
#--- uppend the data to the local fits data files
#
        for fits in new_fits:
            [cols, tbdata] = ecf.read_fits_file(fits)

            time = tbdata['time']

            for col in cols:
                #
                #--- ignore columns with "ST_" (standard dev) and time
                #
                if col.lower() == 'time':
                    continue

                mc = re.search('st_', col.lower())
                if mc is not None:
                    continue

                mdata = tbdata[col]
                cdata = [time, mdata]
                ocols = ['time', col.lower()]

                ofits = out_dir + col.lower() + '_full_data.fits'
                if os.path.isfile(ofits):
                    update_fits_file(ofits, ocols, cdata)
                else:
                    create_fits_file(ofits, ocols, cdata)
Esempio n. 9
0
def extract_hrcveto_data():
    """
    extract hrc veto data
    input:  none
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    glist = ['Hrcveto']
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = read_cross_check_table()
    #
    #--- find the date to be filled
    #
    day_list = find_the_last_entry_time()

    for sday in day_list:
        print "Date: " + sday

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        for group in glist:
            print "Group: " + group

            line = 'operation=retrieve\n'
            line = line + 'dataset = flight\n'
            line = line + 'detector = hrc\n'
            line = line + 'level = 0\n'
            line = line + 'filetype = hrcss\n'
            line = line + 'tstart = ' + start + '\n'
            line = line + 'tstop = ' + stop + '\n'
            line = line + 'go\n'

            fo = open(zspace, 'w')
            fo.write(line)
            fo.close()

            try:
                cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
                os.system(cmd)
            except:
                cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
                os.system(cmd)

            mcf.rm_file(zspace)
            #
            #--- find the names of the fits files of the day of the group
            #
            try:
                flist = ecf.read_file_data('ztemp_out', remove=1)
                flist = flist[1:]
            except:
                print "\t\tNo data"
                continue

            if len(flist) < 1:
                print "\t\tNo data"
                continue
#
#--- combined them
#
            flen = len(flist)

            if flen == 0:
                continue

            elif flen == 1:
                cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                os.system(cmd)

            else:
                mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                if flen > 2:
                    for k in range(2, flen):
                        mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                        cmd = 'mv out.fits ztemp.fits'
                        os.system(cmd)
#
#--- remove indivisual fits files
#

            for ent in flist:
                cmd = 'rm -rf ' + ent
                os.system(cmd)

#
#--- read out the data for the full day
#
            [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

            cols = ['TLEVART', 'VLEVART', 'SHEVART']

            cmd = 'rm -f ztemp.fits out.fits'
            os.system(cmd)
            #
            #--- get time data in the list form
            #
            dtime = list(tbdata.field('time'))

            for col in cols:
                #
                #---- extract data in a list form
                #
                data = list(tbdata.field(col))
                #
                #--- change col name to msid
                #
                msid = col.lower()
                #
                #--- get limit data table for the msid
                #
                try:
                    tchk = convert_unit_indicator(udict[msid])
                except:
                    tchk = 0

                glim = get_limit(msid, tchk, mta_db, mta_cross)
                #
                #--- update database
                #
                update_database(msid, group, dtime, data, glim)
Esempio n. 10
0
def get_data(tstart, tstop, year, grad_list, out_dir):
    """
    update msid data in msid_list for the given data period
    input:  start   --- start time in seconds from 1998.1.1
            stop    --- stop time in seconds from 1998.1.1
            year    --- the year in which data is extracted
            grad_list   --- a list of  group name in grads
            out_dir --- output_directory
    """
    print("Period: " + str(tstart) + '<-->' + str(tstop) + ' in Year: ' +
          str(year))
    #
    #--- extract ecach group data
    #
    for group in grad_list:
        print(group)

        line = 'operation=retrieve\n'
        line = line + 'dataset = mta\n'
        line = line + 'detector = grad\n'
        line = line + 'level = 0.5\n'
        line = line + 'filetype = ' + group + '\n'
        line = line + 'tstart = ' + str(tstart) + '\n'
        line = line + 'tstop = ' + str(tstop) + '\n'
        line = line + 'go\n'

        data_list = mcf.run_arc5gl_process(line)
        #
        #---  read the first fits file and prep for the data list
        #
        [cols, tbdata] = ecf.read_fits_file(data_list[0])
        col_list = []
        for ent in cols:
            if ent.lower() == 'time':
                continue
            mc = re.search('st_', ent.lower())
            if mc is not None:
                continue

            col_list.append(ent)

        mcf.rm_files(data_list[0])
        tdata = tbdata['time']
        mdata = []
        for col in col_list:
            mdata.append(tbdata[col])
#
#--- read the rest of the data
#
        clen = len(col_list)
        for k in range(1, len(data_list)):
            fits = data_list[k]
            [cols, tbdata] = ecf.read_fits_file(fits)
            tdata = numpy.append(tdata, tbdata['time'])

            for m in range(0, clen):
                cdata = tbdata[col_list[m]]
                mdata[m] = numpy.append(mdata[m], cdata)

            mcf.rm_files(fits)

        dout = out_dir + group.capitalize() + '/'

        if not os.path.isdir(dout):
            cmd = 'mkdir ' + dout
            os.system(cmd)
#
#--- write out the data to fits file
#
        for k in range(0, clen):
            col = col_list[k]
            ocols = ['time', col.lower()]
            cdata = [tdata, mdata[k]]

            ofits = dout + col.lower() + '_full_data_' + str(year) + '.fits'

            if os.path.isfile(ofits):
                ecf.update_fits_file(ofits, ocols, cdata)
            else:
                ecf.create_fits_file(ofits, ocols, cdata)

#
#--- zip the fits file from the last year at the beginning of the year
#
        ecf.check_zip_possible(dout)
Esempio n. 11
0
def update_grad_and_comp_data(date=''):
    """
    collect grad and  comp data for trending
    input:  date    ---- the data colletion  end date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    sfile = house_keeping + 'mp_process_list'
    glist = ecf.read_file_data(sfile)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- find date to read the data
    #
    if date == '':
        yesterday = datetime.date.today() - datetime.timedelta(1)
        yesterday = str(yesterday).replace('-', '')
        date_list = find_the_last_entry_time(yesterday)

    else:
        date_list = [date]

    for day in date_list:
        #
        #--- find the names of the fits files of the day of the group
        #
        print "Date: " + str(day)

        for group in glist:
            print "Group: " + str(group)
            cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/mta*fits* > ' + zspace
            os.system(cmd)

            flist = ecf.read_file_data(zspace, remove=1)
            #
            #--- combined them
            #
            flen = len(flist)

            if flen == 0:
                continue

            elif flen == 1:
                cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                os.system(cmd)

            else:
                mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                if flen > 2:
                    for k in range(2, flen):
                        mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                        cmd = 'mv out.fits ztemp.fits'
                        os.system(cmd)
#
#--- read out the data for the full day
#
            [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

            cmd = 'rm -f ztemp.fits out.fits'
            os.system(cmd)
            #
            #--- get time data in the list form
            #
            dtime = list(tbdata.field('time'))

            for k in range(1, len(cols)):
                #
                #--- select col name without ST_ (which is standard dev)
                #
                col = cols[k]
                mc = re.search('ST_', col)
                if mc is not None:
                    continue
#
#---- extract data in a list form
#
                data = list(tbdata.field(col))
                #
                #--- change col name to msid
                #
                msid = col.lower()
                #
                #--- get limit data table for the msid
                #
                try:
                    tchk = ecf.convert_unit_indicator(udict[msid])
                except:
                    tchk = 0

                glim = get_limit(msid, tchk, mta_db, mta_cross)
                #
                #--- update database
                #
                update_database(msid, group, dtime, data, glim)
Esempio n. 12
0
def update_mta_comp_database():
    """
    updata database of mta computed msids
    input:  none but read from /data/mta4/Deriv/*fits files
    outpu:  updated data file: <data_dir>/<msid>_data
    """
#
#--- get a list of data fits file names
#
    infile = house_keeping + 'mta_comp_fits_files'
    data   = ecf.read_file_data(infile)

    for fits in data:
#
#--- hrc has 4 different cases (all data, hrc i, hrc s, and off). tail contain which one this one is
#--- if this is not hrc (or hrc all), tail = 2
#
        mc = re.search('hrc', fits)
        if mc is not None:
            atemp = re.split('_', fits)
            btemp = re.split('.fits', atemp[1])
            tail  =  btemp[0]
        else:
            tail  = 2

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = []
        for ent in tbdata.field('time'):
            stime = float(ent)
#
#--- check whether the time is in dom 
#   
            if stime < 31536000:
                stime = ecf.dom_to_stime(float(ent))

            time.append(stime)

        for col in cols:
            col = col.lower()
#
#--- we need only *_avg columns
#
            mc = re.search('_avg', col)
            if mc is not None:

                vals = tbdata.field(col)
             
                ctime = []
                cvals = []
                for m in range(0, len(time)):
#
#--- skip the data value "nan" and dummy values (-999, -998, -99, 99, 998, 999)
#
                    if str(vals[m]) in  ['nan', 'NaN', 'NAN']:
                        continue

                    nval = float(vals[m])
                    if nval in [-999, -998, -99, 99, 998, 999]:
                        continue
                    else:
                        ctime.append(time[m])
                        cvals.append(nval)
    
                atemp = re.split('_', col)
                msid  = atemp[-2]

                if mcf.chkNumeric(tail):
                    oname = msid
                else:
                    oname = msid + '_' + tail
    
                print "MSID: " + str(oname)

                cmd = 'rm ' + data_dir + oname + '_data'
                os.system(cmd)
#
#--- read limit table for the msid
#
                l_list   = ecf.set_limit_list(msid)
                if len(l_list) == 0:
                    try:
                        l_list = mta_db[msid]
                    except:
                        l_list = []
    
                update_data(msid, l_list, dset = tail, time=ctime, vals=cvals)
Esempio n. 13
0
def update_simsuppl_data(date=''):
    """
    collect sim diag msids
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to simdiag
    """
    #
    #--- read group names which need special treatment
    #
    sfile = house_keeping + 'msid_list_simactu_supple'
    data = ecf.read_file_data(sfile)
    cols = []
    g_dir = {}
    for ent in data:
        atemp = re.split('\s+', ent)
        cols.append(atemp[0])
        g_dir[atemp[0]] = atemp[1]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = read_cross_check_table()

    #
    #--- find date to read the data
    #
    if date == '':
        yesterday = datetime.date.today() - datetime.timedelta(1)
        yesterday = str(yesterday).replace('-', '')
        date_list = find_the_last_entry_time(yesterday)
    else:
        date_list = [date]

    for sday in date_list:
        print "Date: " + sday

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        line = 'operation=retrieve\n'
        line = line + 'dataset = flight\n'
        line = line + 'detector = sim\n'
        line = line + 'level = 0\n'
        line = line + 'filetype = sim\n'
        line = line + 'tstart = ' + start + '\n'
        line = line + 'tstop = ' + stop + '\n'
        line = line + 'go\n'

        fo = open(zspace, 'w')
        fo.write(line)
        fo.close()

        try:
            cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
            os.system(cmd)
        except:
            cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
            os.system(cmd)

        mcf.rm_file(zspace)
        #
        #--- find the names of the fits files of the day of the group
        #
        try:
            flist = ecf.read_file_data('ztemp_out', remove=1)
            flist = flist[1:]
        except:
            print "\t\tNo data"
            continue

        if len(flist) < 1:
            print "\t\tNo data"
            continue
#
#--- combined them
#
        flen = len(flist)

        if flen == 0:
            continue

        elif flen == 1:
            cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
            os.system(cmd)

        else:
            mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
            if flen > 2:
                for k in range(2, flen):
                    mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                    cmd = 'mv out.fits ztemp.fits'
                    os.system(cmd)
#
#--- remove indivisual fits files
#

        for ent in flist:
            cmd = 'rm -rf ' + ent
            os.system(cmd)

#
#--- read out the data for the full day
#
        [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits')

        cmd = 'rm -f ztemp.fits out.fits'
        os.system(cmd)
        #
        #--- get time data in the list form
        #
        dtime = list(tbdata.field('time'))

        for k in range(0, len(cols)):
            #
            #--- select col name without ST_ (which is standard dev)
            #
            col = cols[k]
            #
            #---- extract data in a list form
            #
            data = list(tbdata.field(col))
            #
            #--- change col name to msid
            #
            msid = col.lower()
            #
            #--- get limit data table for the msid
            #
            try:
                tchk = convert_unit_indicator(udict[msid])
            except:
                tchk = 0

            glim = get_limit(msid, tchk, mta_db, mta_cross)
            #
            #--- update database
            #
            update_database(msid, g_dir[msid], dtime, data, glim)
Esempio n. 14
0
def recover_hrcveto_data():
    """
    recover hrc veto data
    input:  none
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = 'eph_list'
    #glist = ecf.read_file_data(sfile)
    glist = ['Hrcveto']
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = read_cross_check_table()

    day_list = []
    for year in range(1999, 2018):
        lyear = year
        cyear = str(year)
        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:
                    continue

            if year == 2017:
                if mon > 10:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            nmon = mon + 1
            if nmon > 12:
                nmon = 1
                lyear += 1

            cnmon = str(nmon)
            if nmon < 10:
                cnmon = '0' + cnmon

            start = str(year) + '-' + cmon + '-01T00:00:00'
            stop = str(lyear) + '-' + cnmon + '-01T00:00:00'

            for group in glist:
                print "Group: " + group + ' : ' + str(start) + '<-->' + str(
                    stop)

                line = 'operation=retrieve\n'
                line = line + 'dataset = flight\n'
                line = line + 'detector = hrc\n'
                line = line + 'level = 0\n'
                line = line + 'filetype = hrcss\n'
                line = line + 'tstart = ' + start + '\n'
                line = line + 'tstop = ' + stop + '\n'
                line = line + 'go\n'

                fo = open(zspace, 'w')
                fo.write(line)
                fo.close()

                try:
                    cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
                    os.system(cmd)
                except:
                    cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
                    os.system(cmd)

                mcf.rm_file(zspace)
                #
                #--- find the names of the fits files of the day of the group
                #
                try:
                    flist = ecf.read_file_data('ztemp_out', remove=1)
                    flist = flist[1:]
                except:
                    print "\t\tNo data"
                    continue

                if len(flist) < 1:
                    print "\t\tNo data"
                    continue
#
#--- combined them
#
                flen = len(flist)

                if flen == 0:
                    continue

                elif flen == 1:
                    cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                    os.system(cmd)

                else:
                    mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                    if flen > 2:
                        for k in range(2, flen):
                            mfo.appendFitsTable('ztemp.fits', flist[k],
                                                'out.fits')
                            cmd = 'mv out.fits ztemp.fits'
                            os.system(cmd)
#
#--- remove indivisual fits files
#

                for ent in flist:
                    cmd = 'rm -rf ' + ent
                    os.system(cmd)

#
#--- read out the data for the full day
#
                [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

                cols = ['TLEVART', 'VLEVART', 'SHEVART']

                cmd = 'rm -f ztemp.fits out.fits'
                os.system(cmd)
                #
                #--- get time data in the list form
                #
                dtime = list(tbdata.field('time'))

                for col in cols:
                    #
                    #---- extract data in a list form
                    #
                    data = list(tbdata.field(col))
                    #
                    #--- change col name to msid
                    #
                    msid = col.lower()
                    #
                    #--- get limit data table for the msid
                    #
                    try:
                        tchk = convert_unit_indicator(udict[msid])
                    except:
                        tchk = 0

                    glim = get_limit(msid, tchk, mta_db, mta_cross)
                    #
                    #--- update database
                    #
                    update_database(msid, group, dtime, data, glim)
Esempio n. 15
0
def dea_full_data_update(chk):
    """
    update deahk search database
    input:  chk --- whether to request full data update: chk == 1:yes
    output: <deposit_dir>/Deahk/<group>/<msid>_full_data_<year>fits
    """
    tyear = int(float(time.strftime("%Y", time.gmtime())))

    cmd = 'ls ' + data_dir + 'Deahk_*/*_week_data.fits > ' + zspace
    os.system(cmd)
    data = mcf.read_data_file(zspace, remove=1)

    for ent in data:
        atemp = re.split('\/', ent)
        group = atemp[-2]
        btemp = re.split('_', atemp[-1])
        msid = btemp[0]
        print("MSID: " + str(msid) + ' in ' + group)

        [cols, tbdata] = ecf.read_fits_file(ent)

        time = tbdata['time']
        tdata = tbdata[msid]
        cols = ['time', msid]
        #
        #--- regular data update
        #
        if chk == 0:
            #
            #--- normal daily data update
            #
            ofits = deposit_dir + 'Deahk_save/' + group + '/' + msid + '_full_data_'
            ofits = ofits + str(tyear) + '.fits'
            if os.pathisfile(ofits):
                ltime = ecf.find_the_last_entry_time(ofits)
                ctime = str(tyear + 1) + ':001:00:00:00'
                nchk = 0
#
#--- if the data is over the year boundray, fill up the last year and create a new one for the new year
#
            else:
                ofits = deposit_dir + 'Deahk_save/' + group + '/' + msid
                ofits = ofits + '_full_data_' + str(tyear - 1) + '.fits'
                nfits = deposit_dir + 'Deahk_save/' + group + '/' + msid
                nfits = nfits + '_full_data_' + str(tyear) + '.fits'

                ltime = ecf.find_the_last_entry_time(ofits)
                ctime = str(tyear) + ':001:00:00:00'
                nchk = 1

            select = [(time > ltime) & (time < ctime)]
            stime = time[select]
            sdata = tdata[select]
            cdata = [stime, sdata]
            ecf.update_fits_file(ofits, cols, cdata)

            if nchk > 0:
                select = [time >= ctime]
                stime = time[select]
                sdata = tdata[select]
                cdata = [stime, sdata]
                ecf.create_fits_file(nfits, cols, cdata)
#
#--- start from beginning (year 1999)
#
        else:
            for year in range(1999, tyear + 1):
                tstart = str(year) + ':001:00:00:00'
                tstart = Chandra.Time.DateTime(tstart).secs
                tstop = str(year + 1) + ':001:00:00:00'
                tstop = Chandra.Time.DateTime(tstop).secs

                select = [(time >= tstart) & (time < tstop)]
                stime = time[select]
                sdata = tdata[select]
                cdata = [stime, sdata]

                out = deposit_dir + 'Deahk_save/' + group + '/'
                if not os.path.isdir(out):
                    cmd = 'mkdir ' + out

                out = out + msid + '_full_data_' + str(year) + '.fits'

                ecf.create_fits_file(out, cols, cdata)
Esempio n. 16
0
def get_data(start, stop, year, msid_list, out_dir):
    """
    update eph l1 related data for the given data peirod
    input:  start   --- start time in seconds from 1998.1.1
            stop    --- stop time in seconds from 1998.1.1
            year    --- data extracted year
            msid_list   --- list of msids
            out_dir --- output_directory
    output: <out_dir>/<msid>_full_data_<year>.fits
    """

    print str(start) + '<-->' + str(stop)

    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ephin\n'
    line = line + 'level = 0\n'
    line = line + 'filetype =ephhk \n'
    line = line + 'tstart = ' + str(start) + '\n'
    line = line + 'tstop = '  + str(stop)  + '\n'
    line = line + 'go\n'

    fo = open(zspace, 'w')
    fo.write(line)
    fo.close()

    try:
        cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
        os.system(cmd)
    except:
        cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
        os.system(cmd)

    mcf.rm_file(zspace)

    data_list = ecf.read_file_data('ztemp_out', remove=1)
    data_list = data_list[1:]
#
#--- uppend the data to the local fits data files
#
    for fits in data_list:

        [cols, tbdata] = ecf.read_fits_file(fits)

        time  = tbdata['time']

        for col in msid_list:
#
#--- ignore columns with "ST_" (standard dev) and time
#
            mdata = tbdata[col]
            cdata = [time, mdata]
            ocols = ['time', col.lower()]

            if not os.path.isdir(out_dir):
                cmd = 'mkdir ' + out_dir
                os.system(cmd)

            ofits = out_dir + col.lower()+ '_full_data_' + str(year) +'.fits'
            if os.path.isfile(ofits):
                ecf.update_fits_file(ofits, ocols, cdata)
            else:
                ecf.create_fits_file(ofits, ocols, cdata)

        mcf.rm_file(fits)
Esempio n. 17
0
def update_mta_comp_database():
    """
    updata database of mta computed msids
    input:  none but read from /data/mta4/Deriv/*fits files
    outpu:  updated data file: <data_dir>/<msid>_data
    """
    #
    #--- get a list of data fits file names
    #
    infile = house_keeping + 'mta_comp_fits_files'
    data = ecf.read_file_data(infile)

    for fits in data:
        #
        #--- hrc has 4 different cases (all data, hrc i, hrc s, and off). tail contain which one this one is
        #--- if this is not hrc (or hrc all), tail = 2
        #
        mc = re.search('hrc', fits)
        if mc is not None:
            atemp = re.split('_', fits)
            btemp = re.split('.fits', atemp[1])
            tail = btemp[0]
        else:
            tail = 2

        [cols, tbdata] = ecf.read_fits_file(fits)

        time = []
        for ent in tbdata.field('time'):
            stime = float(ent)
            #
            #--- check whether the time is in dom
            #
            if stime < 31536000:
                stime = ecf.dom_to_stime(float(ent))

            time.append(stime)

        for col in cols:
            col = col.lower()
            #
            #--- we need only *_avg columns
            #
            mc = re.search('_avg', col)
            if mc is not None:

                vals = tbdata.field(col)

                ctime = []
                cvals = []
                for m in range(0, len(time)):
                    #
                    #--- skip the data value "nan" and dummy values (-999, -998, -99, 99, 998, 999)
                    #
                    if str(vals[m]) in ['nan', 'NaN', 'NAN']:
                        continue

                    nval = float(vals[m])
                    if nval in [-999, -998, -99, 99, 998, 999]:
                        continue
                    else:
                        ctime.append(time[m])
                        cvals.append(nval)

                atemp = re.split('_', col)
                msid = atemp[-2]

                if mcf.chkNumeric(tail):
                    oname = msid
                else:
                    oname = msid + '_' + tail

                print "MSID: " + str(oname)

                cmd = 'rm ' + data_dir + oname + '_data'
                os.system(cmd)
                #
                #--- read limit table for the msid
                #
                l_list = ecf.set_limit_list(msid)
                if len(l_list) == 0:
                    try:
                        l_list = mta_db[msid]
                    except:
                        l_list = []

                update_data(msid, l_list, dset=tail, time=ctime, vals=cvals)
Esempio n. 18
0
def update_ephhk_data():
    """
    update eph hk related msid data
    input:  none
    output: updated data fits files
    """
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- find the data period
    #
    [tstart, tstop] = find_data_period()
    #
    #--- extract fits files from archive
    #
    ofits = extract_archive_data(tstart, tstop)
    #
    #--- if no data is extracted, stop
    #
    if ofits == False:
        exit(1)
#
#--- read out the data
#
    [cols, tbdata] = ecf.read_fits_file(ofits)

    cmd = 'rm -f out.fits ' + ofits
    os.system(cmd)
    #
    #--- get time data in the list form
    #
    dtime = list(tbdata.field('time'))
    #
    #--- find useable colnames
    #
    col_list = find_col_names(cols)

    for col in col_list:
        #
        #---- extract data in a list form
        #
        data = list(tbdata.field(col))
        #
        #--- change col name to msid
        #
        msid = col.lower()
        #
        #--- get limit data table for the msid
        #
        try:
            tchk = convert_unit_indicator(udict[msid])
        except:
            tchk = 0

        glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
        #
        #--- update database
        #
        update_database(msid, 'ephhk', dtime, data, glim)
Esempio n. 19
0
def update_simdiag_data(date=''):
    """
    collect sim diag msids
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = house_keeping + 'msid_list_simdiag'
    sfile = './msid_list_simsupple'
    data = mcf.read_data_file(sfile)
    cols = []
    g_dir = {}
    for ent in data:
        atemp = re.split('\s+', ent)
        cols.append(atemp[0])
        g_dir[atemp[0]] = atemp[1]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()

    day_list = []
    for year in range(1999, 2021):
        cyear = str(year)
        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:
                    continue
            if year == 2020:
                if mon > 1:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            if mcf.is_leapyear(year):
                lday = mday_list2[mon - 1]
            else:
                lday = mday_list[mon - 1]

            for day in range(1, lday + 1):
                cday = str(day)
                if day < 10:
                    cday = '0' + cday

                sday = cyear + '-' + cmon + '-' + cday
                day_list.append(sday)

    for sday in day_list:
        if sday == '2020-01-17':
            break
        print("Date: " + sday)

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        line = 'operation=retrieve\n'
        line = line + 'dataset = flight\n'
        line = line + 'detector = sim\n'
        line = line + 'level = 0\n'
        line = line + 'filetype = sim\n'
        line = line + 'tstart = ' + start + '\n'
        line = line + 'tstop = ' + stop + '\n'
        line = line + 'go\n'

        flist = mcf.run_arc5gl_process(line)

        if len(flist) < 1:
            print("\t\tNo data")
            continue
#
#--- combined them
#
        flen = len(flist)

        if flen == 0:
            continue

        elif flen == 1:
            cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
            os.system(cmd)

        else:
            mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
            if flen > 2:
                for k in range(2, flen):
                    mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                    cmd = 'mv out.fits ztemp.fits'
                    os.system(cmd)
#
#--- remove indivisual fits files
#
        for ent in flist:
            cmd = 'rm -rf ' + ent
            os.system(cmd)
#
#--- read out the data for the full day
#
        [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits')

        cmd = 'rm -f ztemp.fits out.fits'
        os.system(cmd)
        #
        #--- get time data in the list form
        #
        dtime = list(tbdata.field('time'))

        for k in range(0, len(cols)):
            #
            #---- extract data in a list form
            #
            col = cols[k]
            data = list(tbdata.field(col))
            #
            #--- change col name to msid
            #
            msid = col.lower()
            #
            #--- get limit data table for the msid
            #
            try:
                tchk = convert_unit_indicator(udict[msid])
            except:
                tchk = 0

            glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
            #
            #--- update database
            #
            tstart = convert_time_format(start)
            tstop = convert_time_format(stop)

            update_database(msid,
                            g_dir[msid],
                            dtime,
                            data,
                            glim,
                            pstart=tstart,
                            pstop=tstop)
Esempio n. 20
0
def update_eph_data_from_comm(date = ''):
    """
    collect eph data for trending
    input:  date    ---- the data collection end date in yyyymmdd format. 
                        if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
#
#--- read group names which need special treatment
#
    #sfile = house_keeping + 'eph_list'
    #glist = mcf.read_data_file(sfile)
    glist = ['ephhk',]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()
#
#--- read mta msid <---> sql msid conversion list
#
    mta_cross = ecf.read_cross_check_table()
#
#--- find date to read the data
#
    if date == '':
        yesterday = datetime.date.today() - datetime.timedelta(1)
        yesterday = str(yesterday).replace('-', '')
        date_list = create_date_list(yesterday)

    else:
        date_list = [date]

    error_message = ''
    for day in date_list:
#
#--- find the names of the fits files of the day of the group
#
        dline = "Date: " + str(day)
        print(dline)
    
        for group in glist:
            print("Group: " + str(group))
            cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/* > ' + zspace
            os.system(cmd)
    
            tlist = mcf.read_data_file(zspace, remove=1)
            flist = []
            for ent in tlist:
                mc = re.search('_STephhk_static_eio0.fits',  ent)
                if mc is not None:
                    flist.append(ent)
#
#--- combined them
#
            flen = len(flist)
    
            if flen == 0:
                continue
    
            elif flen == 1:
                cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                os.system(cmd)
    
            else:
                mcf.rm_files('ztemp.fits')
                mfo. appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                if flen > 2:
                    for k in range(2, flen):
                        mfo. appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                        cmd = 'mv out.fits ztemp.fits'
                        os.system(cmd)
#
#--- read out the data for the full day
#
            [cols, tbdata] = ecf.read_fits_file('ztemp.fits')
    
            cmd = 'rm -f ztemp.fits out.fits'
            os.system(cmd)
#
#--- get time data in the list form
#
            dtime = list(tbdata.field('time'))
    
            for k in range(1, len(cols)):
#
#--- select col name without ST_ (which is standard dev)
#
                col = cols[k]
                mc  = re.search('ST_', col)
                if mc is not None:
                    continue
                mc  = re.search('quality', col, re.IGNORECASE)
                if mc is not None:
                    continue
#
#---- extract data in a list form
#
                data = list(tbdata.field(col))
#
#--- change col name to msid
#
                msid = col.lower()
#
#--- get limit data table for the msid
#
                try:
                    tchk  = ecf.convert_unit_indicator(udict[msid])
                except:
                    tchk  = 0
    
                glim  = ecf.get_limit(msid, tchk, mta_db, mta_cross)
#
#--- update database
#
                wline = uds.update_database(msid, group.capitalize(), dtime, data, glim)

                if wline != "":
                    error_message = error_message + dline + '\n' + wline
#
#--- if there are errors, sending error message
#
    if error_message != "":
        error_message = 'MTA limit trend EPH got problems: \n' + error_message

        fo = open(zspace, 'w')
        fo.write(error_message)
        fo.close()
        cmd  = 'cat ' + zspace + ' | mailx -s "Subject: EPH data update problem "'
        cmd  = cmd    + '*****@*****.**'
        os.system(cmd)
        mcf.rm_files(zspace)
Esempio n. 21
0
def update_simdiag_data(date=''):
    """
    collect sim diag msids
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = house_keeping + 'msid_list_simdiag'
    sfile = './msid_list_ephkey'
    data = ecf.read_file_data(sfile)
    cols = []
    g_dir = {}
    for ent in data:
        atemp = re.split('\s+', ent)
        cols.append(atemp[0])
        g_dir[atemp[0]] = atemp[1]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = read_cross_check_table()

    day_list = []
    for year in range(2017, 2019):
        cyear = str(year)
        for mon in range(1, 13):
            if year == 2017:
                if mon < 7:
                    continue
            if year == 2018:
                if mon > 1:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            if tcnv.isLeapYear(year) == 1:
                lday = mday_list2[mon - 1]
            else:
                lday = mday_list[mon - 1]

            for day in range(1, lday + 1):
                cday = str(day)
                if day < 10:
                    cday = '0' + cday

                sday = cyear + '-' + cmon + '-' + cday
                day_list.append(sday)

    chk = 0
    for sday in day_list:
        if sday == '2018-07-17':
            chk = 1
        if chk == 0:
            continue
        if sday == '2018-01-21':
            break
        print "Date: " + sday

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        line = 'operation=retrieve\n'
        line = line + 'dataset = flight\n'
        line = line + 'detector = ephin\n'
        line = line + 'level = 0\n'
        line = line + 'filetype =ephhk\n'
        line = line + 'tstart = ' + start + '\n'
        line = line + 'tstop = ' + stop + '\n'
        line = line + 'go\n'

        fo = open(zspace, 'w')
        fo.write(line)
        fo.close()

        try:
            cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
            os.system(cmd)
        except:
            cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
            os.system(cmd)

        mcf.rm_file(zspace)
        #
        #--- find the names of the fits files of the day of the group
        #
        try:
            flist = ecf.read_file_data('ztemp_out', remove=1)
            flist = flist[1:]
        except:
            print "\t\tNo data"
            continue

        if len(flist) < 1:
            print "\t\tNo data"
            continue
#
#--- combined them
#
        flen = len(flist)

        if flen == 0:
            continue

        elif flen == 1:
            cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
            os.system(cmd)

        else:
            appendFitsTable_ascds(flist[0], flist[1], 'ztemp.fits')
            if flen > 2:
                for k in range(2, flen):
                    appendFitsTable_ascds('ztemp.fits', flist[k], 'out.fits')
                    cmd = 'mv out.fits ztemp.fits'
                    os.system(cmd)
#
#--- remove indivisual fits files
#

        for ent in flist:
            cmd = 'rm -rf ' + ent
            os.system(cmd)

#
#--- read out the data for the full day
#
        [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits')

        cmd = 'rm -f ztemp.fits out.fits'
        os.system(cmd)
        #
        #--- get time data in the list form
        #
        dtime = list(tbdata.field('time'))

        for k in range(0, len(cols)):
            #
            #---- extract data in a list form
            #
            col = cols[k]
            data = list(tbdata.field(col))
            #
            #--- change col name to msid
            #
            msid = col.lower()
            #
            #--- get limit data table for the msid
            #
            try:
                tchk = convert_unit_indicator(udict[msid])
            except:
                tchk = 0

            glim = get_limit(msid, tchk, mta_db, mta_cross)
            #
            #--- update database
            #
            tstart = convert_time_format(start)
            tstop = convert_time_format(stop)

            update_database(msid,
                            g_dir[msid],
                            dtime,
                            data,
                            glim,
                            pstart=tstart,
                            pstop=tstop)
Esempio n. 22
0
def extract_data_arc5gl(detector, level, filetype, tstart, tstop, sub=''):
    """
    extract data using arc5gl
    input:  detector    --- detector name
            level       --- level
            filetype    --- file name
            tstart      --- starting time
            tstop       --- stopping time
            sub         --- subdetector name; defalut "" --- no sub detector
    output: cols        --- a list of col name
            tdata       --- a list of arrays of data
    """

    #
    #--- extract ephin hk lev 0 fits data
    #
    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ' + detector + '\n'

    if sub != '':
        line = line + 'subdetector = ' + sub + '\n'

    line = line + 'level = ' + level + '\n'
    line = line + 'filetype = ' + filetype + '\n'
    line = line + 'tstart = ' + str(tstart) + '\n'
    line = line + 'tstop = ' + str(tstop) + '\n'
    line = line + 'go\n'

    fo = open(zspace, 'w')
    fo.write(line)
    fo.close()

    try:
        cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
        os.system(cmd)
    except:
        cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
        os.system(cmd)

    mcf.rm_file(zspace)
    #
    #--- find the names of the fits files of the day of the group
    #
    try:
        flist = ecf.read_file_data('ztemp_out', remove=1)
        flist = flist[1:]
    except:
        print "\t\tNo data"
        #continue
        return [[], []]

    if len(flist) < 1:
        print "\t\tNo data"
        #continue
        return [[], []]
#
#--- combined them
#
    flen = len(flist)

    if flen == 0:
        #continue
        return [[], []]

    elif flen == 1:
        cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
        os.system(cmd)

    else:
        mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
        if flen > 2:
            for k in range(2, flen):
                mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                cmd = 'mv out.fits ztemp.fits'
                os.system(cmd)
#
#--- remove indivisual fits files
#

    for ent in flist:
        cmd = 'rm -rf ' + ent
        os.system(cmd)
#
#--- return data
#
    [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

    cmd = 'rm -f ztemp.fits out.fits'
    os.system(cmd)

    return [cols, tbdata]
Esempio n. 23
0
def extract_data_arc5gl(detector, level, filetype, tstart, tstop, sub=''):
    """
    extract data using arc5gl
    input:  detector    --- detector name
            level       --- level
            filetype    --- file name
            tstart      --- starting time
            tstop       --- stopping time
            sub         --- subdetector name; defalut "" --- no sub detector
    output: cols        --- a list of col name
            tdata       --- a list of arrays of data
    """
    #
    #--- extract ephin hk lev 0 fits data
    #
    line = 'operation=retrieve\n'
    line = line + 'dataset = flight\n'
    line = line + 'detector = ' + detector + '\n'

    if sub != '':
        line = line + 'subdetector = ' + sub + '\n'

    line = line + 'level = ' + level + '\n'
    line = line + 'filetype = ' + filetype + '\n'
    line = line + 'tstart = ' + str(tstart) + '\n'
    line = line + 'tstop = ' + str(tstop) + '\n'
    line = line + 'go\n'

    flist = mcf.run_arc5gl_process(line)

    if len(flist) < 1:
        print("\t\tNo data")
        return [[], []]
#
#--- combined them
#
    flen = len(flist)

    if flen == 0:
        return [[], []]

    elif flen == 1:
        cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
        os.system(cmd)

    else:
        mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
        if flen > 2:
            for k in range(2, flen):
                mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                cmd = 'mv out.fits ztemp.fits'
                os.system(cmd)
#
#--- remove indivisual fits files
#

    for ent in flist:
        cmd = 'rm -rf ' + ent
        os.system(cmd)
#
#--- return data
#
    [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

    cmd = 'rm -f ztemp.fits out.fits'
    os.system(cmd)

    return [cols, tbdata]
Esempio n. 24
0
def update_grad_and_comp_data(date=''):
    """
    collect grad and  comp data for trending
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    sfile = 'grad_special_list'
    glist = ecf.read_file_data(sfile)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = read_cross_check_table()

    day_list = []
    for year in range(1999, 2019):
        cyear = str(year)

        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:

                    continue
            if year == 2018:
                if mon > 2:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            if tcnv.isLeapYear(year) == 1:
                lday = mday_list2[mon - 1]
            else:
                lday = mday_list[mon - 1]

            for day in range(1, lday + 1):
                cday = str(day)
                if day < 10:
                    cday = '0' + cday

                sday = cyear + '-' + cmon + '-' + cday
                day_list.append(sday)

    for sday in day_list:
        print "Date: " + sday

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        for group in glist:
            print "Group: " + group

            line = 'operation=retrieve\n'
            line = line + 'dataset = mta\n'
            line = line + 'detector = grad\n'
            line = line + 'level = 0.5\n'
            line = line + 'filetype = ' + group + '\n'
            line = line + 'tstart = ' + start + '\n'
            line = line + 'tstop = ' + stop + '\n'
            line = line + 'go\n'

            fo = open(zspace, 'w')
            fo.write(line)
            fo.close()

            try:
                cmd = ' /proj/sot/ska/bin/arc5gl  -user isobe -script ' + zspace + '> ztemp_out'
                os.system(cmd)
            except:
                cmd = ' /proj/axaf/simul/bin/arc5gl -user isobe -script ' + zspace + '> ztemp_out'
                os.system(cmd)

            mcf.rm_file(zspace)
            #
            #--- find the names of the fits files of the day of the group
            #
            try:
                flist = ecf.read_file_data('ztemp_out', remove=1)
                flist = flist[1:]
            except:
                print "\t\tNo data"
                continue

            if len(flist) < 1:
                print "\t\tNo data"
                continue
#
#--- combined them
#
            flen = len(flist)

            if flen == 0:
                continue

            elif flen == 1:
                cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                os.system(cmd)

            else:
                mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                if flen > 2:
                    for k in range(2, flen):
                        mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                        cmd = 'mv out.fits ztemp.fits'
                        os.system(cmd)
#
#--- remove indivisual fits files
#

            for ent in flist:
                cmd = 'rm -rf ' + ent
                os.system(cmd)

#
#--- read out the data for the full day
#
            [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

            cmd = 'rm -f ztemp.fits out.fits'
            os.system(cmd)
            #
            #--- get time data in the list form
            #
            dtime = list(tbdata.field('time'))

            for k in range(1, len(cols)):
                #
                #--- select col name without ST_ (which is standard dev)
                #
                col = cols[k]
                mc = re.search('ST_', col)
                if mc is not None:
                    continue
#
#---- extract data in a list form
#
                data = list(tbdata.field(col))
                #
                #--- change col name to msid
                #
                msid = col.lower()
                #
                #--- get limit data table for the msid
                #
                try:
                    tchk = convert_unit_indicator(udict[msid])
                except:
                    tchk = 0

                glim = get_limit(msid, tchk, mta_db, mta_cross)
                #
                #--- update database
                #
                update_database(msid, group, dtime, data, glim)