示例#1
0
def compute_acis_power():
    """
    compute acis power from existing msid values and update database
    input:  none, but read data from achieve
    output: <msid>_data.fits/<msid>_short_data.fits/<msid>_week_data.fits
    """
#
#--- set a couple of values/list
#
    group     = 'Compacispwr'
    msid_list = ['1dppwra', '1dppwrb']
    msid_sub  = [['1dppwra', '1dp28avo', '1dpicacu', '*'], ['1dppwrb', '1dp28bvo', '1dpicbcu', '*']]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()

    for msid in msid_list:
#
#--- get limit data table for the msid
#
        try:
            tchk  = ecf.convert_unit_indicator(udict[msid])
        except:
            tchk  = 0

        glim  = get_limit_for_acis_power(msid, mta_db)
#
#--- update database
#
        uds.update_week_database(msid, group,  glim, msid_sub = msid_sub)
示例#2
0
def compute_sim_flex():
    """
    compute the difference between sim flex temp and set point 
    input:  none, but read data from achieve
    output: <msid>_data.fits/<msid>_short_data.fits/<msid>_week_data.fits
    """
    #
    #--- set a couple of values/lists
    #
    group = 'Compsimoffset'
    msid_list = ['flexadif', 'flexbdif', 'flexcdif']
    msid_sub  = [['flexadif', '3faflaat', '3sflxast', '-'],\
                 ['flexbdif', '3faflbat', '3sflxbst', '-'],\
                 ['flexcdif', '3faflcat', '3sflxcst', '-']]
    mta_db = ecf.read_mta_database()

    for msid in msid_list:
        #
        #--- get limit data table for the msid
        #
        try:
            tchk = ecf.convert_unit_indicator(udict[msid])
        except:
            tchk = 0

        glim = get_limit_for_acis_power(msid, mta_db)
        #
        #--- update database
        #
        uds.run_update_with_ska(msid, group, msid_sub_list=msid_sub, glim=glim)
示例#3
0
def get_data(msid, start, stop):
    """
    create an interactive html page for a given msid
    input:  msid    --- msid
            oup   --- group name
            start   --- start time
            stop    --- stop time
    output: ttime   --- a list of time data
            tdata   --- a list of data
    """
#    start = ecf.check_time_format(start)
#    stop  = ecf.check_time_format(stop)
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()
#
#--- read mta msid <---> sql msid conversion list
#
    mta_cross = ecf.read_cross_check_table()
#
#--- get limit data table for the msid
#
    try:
        uck   = udict[msid]
        if uck.lower() == 'k':
            tchk = 1
        else:
            tchk  = ecf.convert_unit_indicator(uchk)
    except:
        tchk  = 0

    glim  = ecf.get_limit(msid, tchk, mta_db, mta_cross)
#
#--- extract data from archive
#
    chk = 0
    try:
        out     = fetch.MSID(msid, start, stop)
        tdata   = out.vals
        ttime   = out.times
    except:
        tdata   = []
        ttime   = []


    return [ttime, tdata]
示例#4
0
def update_simdiag_data(date=''):
    """
    collect sim diag msids
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = house_keeping + 'msid_list_simdiag'
    sfile = './msid_list_simsupple'
    data = mcf.read_data_file(sfile)
    cols = []
    g_dir = {}
    for ent in data:
        atemp = re.split('\s+', ent)
        cols.append(atemp[0])
        g_dir[atemp[0]] = atemp[1]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()

    day_list = []
    for year in range(1999, 2021):
        cyear = str(year)
        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:
                    continue
            if year == 2020:
                if mon > 1:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            if mcf.is_leapyear(year):
                lday = mday_list2[mon - 1]
            else:
                lday = mday_list[mon - 1]

            for day in range(1, lday + 1):
                cday = str(day)
                if day < 10:
                    cday = '0' + cday

                sday = cyear + '-' + cmon + '-' + cday
                day_list.append(sday)

    for sday in day_list:
        if sday == '2020-01-17':
            break
        print("Date: " + sday)

        start = sday + 'T00:00:00'
        stop = sday + 'T23:59:59'

        line = 'operation=retrieve\n'
        line = line + 'dataset = flight\n'
        line = line + 'detector = sim\n'
        line = line + 'level = 0\n'
        line = line + 'filetype = sim\n'
        line = line + 'tstart = ' + start + '\n'
        line = line + 'tstop = ' + stop + '\n'
        line = line + 'go\n'

        flist = mcf.run_arc5gl_process(line)

        if len(flist) < 1:
            print("\t\tNo data")
            continue
#
#--- combined them
#
        flen = len(flist)

        if flen == 0:
            continue

        elif flen == 1:
            cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
            os.system(cmd)

        else:
            mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
            if flen > 2:
                for k in range(2, flen):
                    mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                    cmd = 'mv out.fits ztemp.fits'
                    os.system(cmd)
#
#--- remove indivisual fits files
#
        for ent in flist:
            cmd = 'rm -rf ' + ent
            os.system(cmd)
#
#--- read out the data for the full day
#
        [cols_xxx, tbdata] = ecf.read_fits_file('ztemp.fits')

        cmd = 'rm -f ztemp.fits out.fits'
        os.system(cmd)
        #
        #--- get time data in the list form
        #
        dtime = list(tbdata.field('time'))

        for k in range(0, len(cols)):
            #
            #---- extract data in a list form
            #
            col = cols[k]
            data = list(tbdata.field(col))
            #
            #--- change col name to msid
            #
            msid = col.lower()
            #
            #--- get limit data table for the msid
            #
            try:
                tchk = convert_unit_indicator(udict[msid])
            except:
                tchk = 0

            glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
            #
            #--- update database
            #
            tstart = convert_time_format(start)
            tstop = convert_time_format(stop)

            update_database(msid,
                            g_dir[msid],
                            dtime,
                            data,
                            glim,
                            pstart=tstart,
                            pstop=tstop)
def update_grad_and_comp_data(date=''):
    """
    collect grad and  comp data for trending
    input:  date    ---- the data colletion  end date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    sfile = house_keeping + 'mp_process_list'
    glist = ecf.read_file_data(sfile)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- find date to read the data
    #
    if date == '':
        yesterday = datetime.date.today() - datetime.timedelta(1)
        yesterday = str(yesterday).replace('-', '')
        date_list = find_the_last_entry_time(yesterday)

    else:
        date_list = [date]

    for day in date_list:
        #
        #--- find the names of the fits files of the day of the group
        #
        print "Date: " + str(day)

        for group in glist:
            print "Group: " + str(group)
            cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/mta*fits* > ' + zspace
            os.system(cmd)

            flist = ecf.read_file_data(zspace, remove=1)
            #
            #--- combined them
            #
            flen = len(flist)

            if flen == 0:
                continue

            elif flen == 1:
                cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                os.system(cmd)

            else:
                mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                if flen > 2:
                    for k in range(2, flen):
                        mfo.appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                        cmd = 'mv out.fits ztemp.fits'
                        os.system(cmd)
#
#--- read out the data for the full day
#
            [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

            cmd = 'rm -f ztemp.fits out.fits'
            os.system(cmd)
            #
            #--- get time data in the list form
            #
            dtime = list(tbdata.field('time'))

            for k in range(1, len(cols)):
                #
                #--- select col name without ST_ (which is standard dev)
                #
                col = cols[k]
                mc = re.search('ST_', col)
                if mc is not None:
                    continue
#
#---- extract data in a list form
#
                data = list(tbdata.field(col))
                #
                #--- change col name to msid
                #
                msid = col.lower()
                #
                #--- get limit data table for the msid
                #
                try:
                    tchk = ecf.convert_unit_indicator(udict[msid])
                except:
                    tchk = 0

                glim = get_limit(msid, tchk, mta_db, mta_cross)
                #
                #--- update database
                #
                update_database(msid, group, dtime, data, glim)
示例#6
0
def update_eph_data_from_comm(date = ''):
    """
    collect eph data for trending
    input:  date    ---- the data collection end date in yyyymmdd format. 
                        if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
#
#--- read group names which need special treatment
#
    #sfile = house_keeping + 'eph_list'
    #glist = mcf.read_data_file(sfile)
    glist = ['ephhk',]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()
#
#--- read mta msid <---> sql msid conversion list
#
    mta_cross = ecf.read_cross_check_table()
#
#--- find date to read the data
#
    if date == '':
        yesterday = datetime.date.today() - datetime.timedelta(1)
        yesterday = str(yesterday).replace('-', '')
        date_list = create_date_list(yesterday)

    else:
        date_list = [date]

    error_message = ''
    for day in date_list:
#
#--- find the names of the fits files of the day of the group
#
        dline = "Date: " + str(day)
        print(dline)
    
        for group in glist:
            print("Group: " + str(group))
            cmd = 'ls /data/mta_www/mp_reports/' + day + '/' + group + '/data/* > ' + zspace
            os.system(cmd)
    
            tlist = mcf.read_data_file(zspace, remove=1)
            flist = []
            for ent in tlist:
                mc = re.search('_STephhk_static_eio0.fits',  ent)
                if mc is not None:
                    flist.append(ent)
#
#--- combined them
#
            flen = len(flist)
    
            if flen == 0:
                continue
    
            elif flen == 1:
                cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                os.system(cmd)
    
            else:
                mcf.rm_files('ztemp.fits')
                mfo. appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                if flen > 2:
                    for k in range(2, flen):
                        mfo. appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                        cmd = 'mv out.fits ztemp.fits'
                        os.system(cmd)
#
#--- read out the data for the full day
#
            [cols, tbdata] = ecf.read_fits_file('ztemp.fits')
    
            cmd = 'rm -f ztemp.fits out.fits'
            os.system(cmd)
#
#--- get time data in the list form
#
            dtime = list(tbdata.field('time'))
    
            for k in range(1, len(cols)):
#
#--- select col name without ST_ (which is standard dev)
#
                col = cols[k]
                mc  = re.search('ST_', col)
                if mc is not None:
                    continue
                mc  = re.search('quality', col, re.IGNORECASE)
                if mc is not None:
                    continue
#
#---- extract data in a list form
#
                data = list(tbdata.field(col))
#
#--- change col name to msid
#
                msid = col.lower()
#
#--- get limit data table for the msid
#
                try:
                    tchk  = ecf.convert_unit_indicator(udict[msid])
                except:
                    tchk  = 0
    
                glim  = ecf.get_limit(msid, tchk, mta_db, mta_cross)
#
#--- update database
#
                wline = uds.update_database(msid, group.capitalize(), dtime, data, glim)

                if wline != "":
                    error_message = error_message + dline + '\n' + wline
#
#--- if there are errors, sending error message
#
    if error_message != "":
        error_message = 'MTA limit trend EPH got problems: \n' + error_message

        fo = open(zspace, 'w')
        fo.write(error_message)
        fo.close()
        cmd  = 'cat ' + zspace + ' | mailx -s "Subject: EPH data update problem "'
        cmd  = cmd    + '*****@*****.**'
        os.system(cmd)
        mcf.rm_files(zspace)
示例#7
0
def update_msid_data(msid_list='msid_list_fetch'):
    """
    update all msid listed in msid_list
    input:  msid_list   --- a list of msids to processed. default: msid_list_fetch
    output: <msid>_data.fits/<msid>_short_data.fits
    """
    start_time = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()

    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- read msid list
    #
    mfile = house_keeping + msid_list
    data = mcf.read_data_file(mfile)

    for ent in data:
        #
        #--- find msid and group name
        #
        mc = re.search('#', ent)
        if mc is not None:
            continue
        try:
            [msid, group] = re.split('\s+', ent)
        except:
            atemp = re.split('\s+', ent)
            msid = atemp[0]
            group = atemp[1]

        msid.strip()
        group.strip()
        #
        #--- get limit data table for the msid
        #
        try:
            tchk = convert_unit_indicator(udict[msid])
        except:
            tchk = 0
        ####glim  = gsr.read_glimmon(msid, tchk)
        if msid in sp_limt_case_c:
            tchk = 1
        ###(print "I AM HERE TCHK : " + str(tchk) + "<--->" + str(udict[msid]))
        glim = get_limit(msid, tchk, mta_db, mta_cross)
        ###(print "I AM HERE GLIM: " + str(glim))
        ###exit(1)
        #
        #--- update database
        #
        #        try:
        #            out = fetch.MSID(msid, '2017:001:00:00:00', '2017:002')
        #            print("MSID: " + msid)
        #        except:
        #            out = get_mta_fits_data(msid, '2017:001:00:00:00', '2017:002')
        #
        #            if out == False:
        #                missed = house_keeping + '/missing_data'
        #                fo     = open(missed, 'a')
        #                fo.write(msid)
        #                fo.write('\n')
        #                fo.close()
        #
        #                continue

        update_database(msid, group, glim)
#
#--- test entry to check how long it took
#
    end_time = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())

    line = "trending plotting: \n"
    line = line + "Started: " + start_time + '\n'
    line = line + "Ended: " + end_time + '\n'

    print(line)
示例#8
0
def recover_hrcveto_data():
    """
    recover hrc veto data
    input:  none
    output: fits file data related to grad and comp
    """
#
#--- read group names which need special treatment
#
    #sfile = 'eph_list'
    #glist = mcf.read_data_file(sfile)
    glist  = ['Hrcveto']
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()
#
#--- read mta msid <---> sql msid conversion list
#
    mta_cross = ecf.read_cross_check_table()


    day_list = []
    for year in range(1999, 2018):
        lyear = year
        cyear = str(year)
        for mon in range(1, 13):
            if year == 1999:
                if mon < 8:
                    continue

            if year == 2017:
                if mon > 10:
                    break

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            nmon = mon + 1
            if nmon > 12:
                nmon = 1
                lyear += 1

            cnmon = str(nmon)
            if nmon < 10:
                cnmon = '0' + cnmon


            start = str(year)  + '-' + cmon  + '-01T00:00:00'
            stop  = str(lyear) + '-' + cnmon  + '-01T00:00:00'
    
            for group in glist:
                print "Group: " + group + ' : ' + str(start) + '<-->' + str(stop)
    
                line = 'operation=retrieve\n'
                line = line + 'dataset = flight\n'
                line = line + 'detector = hrc\n'
                line = line + 'level = 0\n'
                line = line + 'filetype = hrcss\n'
                line = line + 'tstart = '   + start + '\n'
                line = line + 'tstop = '    + stop  + '\n'
                line = line + 'go\n'
    
                flinst = mcf.run_arc5gl_process(line)
    
                if len(flist) < 1:
                    print "\t\tNo data"
                    continue
#
#--- combined them
#
                flen = len(flist)
     
                if flen == 0:
                    continue
     
                elif flen == 1:
                    cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                    os.system(cmd)
     
                else:
                    mfo. appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                    if flen > 2:
                        for k in range(2, flen):
                            mfo. appendFitsTable('ztemp.fits', flist[k], 'out.fits')
                            cmd = 'mv out.fits ztemp.fits'
                            os.system(cmd)
#
#--- remove indivisual fits files
#
    
                for ent in flist:
                    cmd = 'rm -rf ' + ent 
                    os.system(cmd)

#
#--- read out the data for the full day
#
                [cols, tbdata] = ecf.read_fits_file('ztemp.fits')
    
                cols = ['TLEVART', 'VLEVART', 'SHEVART']
     
                cmd = 'rm -f ztemp.fits out.fits'
                os.system(cmd)
#
#--- get time data in the list form
#
                dtime = list(tbdata.field('time'))
     
                for col in cols:
#
#---- extract data in a list form
#
                    data = list(tbdata.field(col))
#
#--- change col name to msid
#
                    msid = col.lower()
#
#--- get limit data table for the msid
#
                    try:
                        tchk  = convert_unit_indicator(udict[msid])
                    except:
                        tchk  = 0
     
                    glim  = ecf.get_limit(msid, tchk, mta_db, mta_cross)
#
#--- update database
#
                    update_database(msid, group, dtime, data, glim)
def create_interactive_page(msid, group, mtype, start, stop, step):
    """
    create an interactive html page for a given msid
    input:  msid    --- msid
            group   --- group name
            mtype   --- mid, mde, min, or max
            start   --- start time
            stop    --- stop time
            step    --- bin size in seconds
    """
    start = ecf.check_time_format(start)
    stop = ecf.check_time_format(stop)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- get limit data table for the msid
    #
    try:
        uck = udict[msid]
        if uck.lower() == 'k':
            tchk = 1
        else:
            tchk = ecf.convert_unit_indicator(uchk)
    except:
        tchk = 0

    glim = make_glim(msid)
    #
    #--- extract data from archive
    #
    chk = 0
    try:
        [ttime, tdata] = rf.get_data(msid, start, stop)
    except:
        #
        #--- if no data in archive, try mta local database
        #
        try:
            [ttime, tdata] = get_mta_fits_data(msid, group, start, stop)
#
#--- if it is also failed, return the empty data set
#
        except:
            chk = 1
            ttime = []
            tdata = []
#
#--- only short_p can change step size (by setting "step")
#
    if chk == 0:
        data_p = process_day_data(msid, ttime, tdata, glim, step=step)
    else:
        data_p = 'na'
#
#--- create interactive html page
#
    create_html_page(msid, group, data_p, mtype, step)
def update_eph_data(date=''):
    """
    collect grad and  comp data for trending
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
    #
    #--- read group names which need special treatment
    #
    #sfile = 'eph_list'
    #glist = mcf.read_data_file(sfile)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()

    day_list = []
    for year in range(2000, 2019):  #---- CHANGE CHANGE CHAGE!!!!!
        lyear = year
        for mon in range(1, 13):
            #if year == 2018 and mon > 1:
            #    break
            #if year == 2017 and mon < 11:
            #    continue

            cmon = str(mon)
            if mon < 10:
                cmon = '0' + cmon

            nmon = mon + 1
            if nmon > 12:
                nmon = 1
                lyear += 1

            clmon = str(nmon)
            if nmon < 10:
                clmon = '0' + clmon

            start = str(year) + '-' + cmon + '-01T00:00:00'
            stop = str(lyear) + '-' + clmon + '-01T00:00:00'

            print "Period: " + str(start) + "<--->" + str(stop)

            for group in glist:
                print "Group: " + group
                #
                #---CHANGE THE DETECTOR/FILETYPE BEFORE RUNNING IF IT IS DIFFERENT FROM EPHHK
                #
                line = 'operation=retrieve\n'
                line = line + 'dataset=flight\n'
                line = line + 'detector=ephin\n'
                line = line + 'level=0\n'
                line = line + 'filetype=ephhk\n'
                line = line + 'tstart=' + start + '\n'
                line = line + 'tstop=' + stop + '\n'
                line = line + 'go\n'

                flist = mcf.run_arc5gl_process(line)

                if len(flist) < 1:
                    print "\t\tNo data"
                    continue
#
#--- combined them
#
                flen = len(flist)

                if flen == 0:
                    continue

                elif flen == 1:
                    cmd = 'cp ' + flist[0] + ' ./ztemp.fits'
                    os.system(cmd)

                else:
                    mfo.appendFitsTable(flist[0], flist[1], 'ztemp.fits')
                    if flen > 2:
                        for k in range(2, flen):
                            mfo.appendFitsTable('ztemp.fits', flist[k],
                                                'out.fits')
                            cmd = 'mv out.fits ztemp.fits'
                            os.system(cmd)
#
#--- remove indivisual fits files
#

                for ent in flist:
                    cmd = 'rm -rf ' + ent
                    os.system(cmd)
#
#--- read out the data
#
                [cols, tbdata] = ecf.read_fits_file('ztemp.fits')

                cmd = 'rm -f ztemp.fits out.fits'
                os.system(cmd)
                #
                #--- get time data in the list form
                #
                dtime = list(tbdata.field('time'))

                for k in range(1, len(cols)):
                    #
                    #--- select col name without ST_ (which is standard dev)
                    #
                    col = cols[k]
                    mc = re.search('ST_', col)
                    if mc is not None:
                        continue
                    mc = re.search('quality', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('mjf', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('gap', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('dataqual', col, re.IGNORECASE)
                    if mc is not None:
                        continue
                    mc = re.search('tlm_fmt', col, re.IGNORECASE)
                    if mc is not None:
                        continue
#
#---- extract data in a list form
#
                    data = list(tbdata.field(col))
                    #
                    #--- change col name to msid
                    #
                    msid = col.lower()
                    #
                    #--- get limit data table for the msid
                    #
                    try:
                        tchk = convert_unit_indicator(udict[msid])
                    except:
                        tchk = 0

                    glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
                    #
                    #--- update database
                    #
                    update_database(msid, group, dtime, data, glim)
示例#11
0
def create_interactive_page(msid, group, start, stop, step):
    """
    create an interactive html page for a given msid
    input:  msid    --- msid
            group   --- group name
            start   --- start time
            stop    --- stop time
            step    --- bin size in seconds
    """
    start = ecf.check_time_format(start)
    stop = ecf.check_time_format(stop)
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- get limit data table for the msid
    #
    try:
        tchk = ecf.convert_unit_indicator(udict[msid])
    except:
        tchk = 0

    glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
    #
    #--- extract data from archive
    #
    chk = 0
    try:
        out = fetch.MSID(msid, start, stop)
        tdata = out.vals
        ttime = out.times
    except:
        #
        #--- if no data in archive, try mta local database
        #
        try:
            [ttime, tdata] = uds.get_mta_fits_data(msid, start, stop)
#
#--- if it is also failed, return the empty data set
#
        except:
            chk = 1
#
#--- only short_p can change step size (by setting "step")
#
    if chk == 0:
        [week_p, short_p, long_p] = uds.process_day_data(msid,
                                                         ttime,
                                                         tdata,
                                                         glim,
                                                         step=step)
        #
        #--- try to find data from ska or mta local data base
        #
        try:
            fits_data = create_inter_fits(msid, short_p)
#
#--- for the case, the data is mta special cases
#
        except:
            fits_data = 'na'
    else:
        fits_data = 'na'
#
#--- create interactive html page
#
    create_html_page(msid, fits_data, step)
    #
    #--- remove fits file
    #
    if fits_data != 'na':
        cmd = 'rm -rf ' + fits_data
        os.system(cmd)
示例#12
0
def update_ephkey_l1_data(date = ''):
    """
    update ephkey L1 data 
    input:  date    ---- the date in yyyymmdd format. if not given, yesterday's date is used
    output: fits file data related to grad and comp
    """
#
#--- read group names which need special treatment
#
    file   = house_keeping + 'msid_list_ephkey'
    f      = open(file, 'r')
    data   = [line.strip() for line in f.readlines()]
    f.close()

    msid_list = []
    for ent in data:
        atemp = re.split('\s+', ent)
        msid_list.append(atemp[0])
        group = atemp[1]
#
#--- create msid <---> unit dictionary
#
    [udict, ddict] = ecf.read_unit_list()
#
#--- read mta database
#
    mta_db = ecf.read_mta_database()
#
#--- read mta msid <---> sql msid conversion list
#
    mta_cross = ecf.read_cross_check_table()
#
#--- create date list from the next day from the next entry to today
#
    if date == '':
#
#--- the date of the last entry
#
        stemp = ecf.find_the_last_entry_time(test_fits)
        stemp = Chandra.Time.DateTime(stemp).date
        atemp = re.split(':', stemp)
        syear = int(float(atemp[0]))
        sday  = int(float(atemp[1]))
#
#--- if the data is missing more than 6 hours, fill that day again
#
        shh   = int(float(atemp[2]))
        if shh < 18:
            sday -= 1
            if sday < 0:
                syear -= 1
                if tcnv.isLeapYear(syear) == 1:
                    sday = 366 
                else:
                    sday = 365
    
#
#--- find today's date
#
        stemp = time.strftime("%Y:%j", time.gmtime())
        atemp = re.split(':', stemp)
        lyear = int(float(atemp[0]))
        lday  = int(float(atemp[1]))
    
        date_list = []
        if syear == lyear:
            for day in range(sday+1, lday):
                lday = ecf.add_lead_zeros(day, 2)
    
                date = str(syear) + ':' + lday
                date_list.append(date)
        else:
            if tcnv.isLeapYear(syear) == 1:
                base = 367 
            else:
                base = 366
    
            for day in range(sday+1, base):
                lday = ecf.add_lead_zeros(day, 2)
    
                date = str(syear) + ':' + lday
                date_list.append(date)
    
            for day in range(1, lday):
                lday = ecf.add_lead_zeros(day, 2)
    
                date = str(lyear) + ':' + lday
                date_list.append(date)
    else:
        date_list.append(date)


    for date in (date_list):
        tstart = date + ':00:00:00'
        tstop  = date + ':23:59:59'

        uds.run_update_with_archive(msid_list, group, date_list, 'ephin', '0', 'ephhk', tstart, tstop)
示例#13
0
def update_ephhk_data():
    """
    update eph hk related msid data
    input:  none
    output: updated data fits files
    """
    #
    #--- create msid <---> unit dictionary
    #
    [udict, ddict] = ecf.read_unit_list()
    #
    #--- read mta database
    #
    mta_db = ecf.read_mta_database()
    #
    #--- read mta msid <---> sql msid conversion list
    #
    mta_cross = ecf.read_cross_check_table()
    #
    #--- find the data period
    #
    [tstart, tstop] = find_data_period()
    #
    #--- extract fits files from archive
    #
    ofits = extract_archive_data(tstart, tstop)
    #
    #--- if no data is extracted, stop
    #
    if ofits == False:
        exit(1)
#
#--- read out the data
#
    [cols, tbdata] = ecf.read_fits_file(ofits)

    cmd = 'rm -f out.fits ' + ofits
    os.system(cmd)
    #
    #--- get time data in the list form
    #
    dtime = list(tbdata.field('time'))
    #
    #--- find useable colnames
    #
    col_list = find_col_names(cols)

    for col in col_list:
        #
        #---- extract data in a list form
        #
        data = list(tbdata.field(col))
        #
        #--- change col name to msid
        #
        msid = col.lower()
        #
        #--- get limit data table for the msid
        #
        try:
            tchk = convert_unit_indicator(udict[msid])
        except:
            tchk = 0

        glim = ecf.get_limit(msid, tchk, mta_db, mta_cross)
        #
        #--- update database
        #
        update_database(msid, 'ephhk', dtime, data, glim)