Exemple #1
0
def plot_repeat_star_mags(repeats):
    plt.close(1)
    plt.figure(1, figsize=(6, 4))

    aca_db = DBI(server='sybase', dbi='sybase', user='******')
    for agasc_id in repeats['id']:
        print agasc_id
        if agasc_id in cache:
            obsdata = cache[agasc_id]
        else:
            obsdata = aca_db.fetchall("select * from trak_stats_data where id = {} "
                                      "order by kalman_tstart".format(agasc_id))
            cache[agasc_id] = obsdata
        years = DateTime(obsdata['kalman_tstart']).frac_year
        scatter = np.random.uniform(-0.5, 0.5, size=len(years))
        dmags = obsdata['aoacmag_mean'] - np.median(obsdata['aoacmag_mean'])
        plt.plot(years + scatter, dmags, '.', label='ID {}'.format(agasc_id))
    aca_db.conn.close()

    plt.xlabel('Year')
    plt.ylabel('Delta Mag')
    plt.grid()
    plt.ylim(-0.1, 0.1)
    plt.legend(loc='upper left', fontsize=10)
    plt.title('ACA Responsivity')
    plt.tight_layout()
    plt.savefig('responsivity.png')
Exemple #2
0
def get_repeats(n_repeats=300):  # default gives us 4 stars
    aca_db = DBI(server='sybase', dbi='sybase', user='******')
    repeats = aca_db.fetchall("select id, count(id) as num_obs from trak_stats_data "
                              "group by id having (count(id) > {})".format(n_repeats))
    repeats = repeats[repeats['id'] >= 20]
    aca_db.conn.close()

    return repeats
Exemple #3
0
    def cmd_states_fetch(self, tbegin, tend):
        """
        Search the TA database and retrieve all the command
                         state data between the given start/stop times.

        Returned - numpy array. Data types are:

             Data item and type
             ------------------
             ('datestart', '|S21'),
             ('datestop', '|S21'),
             ('tstart', '<f8'),
             ('tstop', '<f8'),
             ('obsid', '<i8'),
             ('power_cmd', '|S10'),
             ('si_mode', '|S8'),
             ('pcad_mode', '|S4'),
             ('vid_board', '<i8'),
             ('clocking', '<i8'),
             ('fep_count', '<i8'),
             ('ccd_count', '<i8'),
             ('simpos', '<i8'),
             ('simfa_pos', '<i8'),
             ('pitch', '<f8'),
             ('ra', '<f8'),
             ('dec', '<f8'),
             ('roll', '<f8'),
             ('q1', '<f8'),
             ('q2', '<f8'),
             ('q3', '<f8'),
             ('q4', '<f8'),
             ('trans_keys', '|S48')
             ('hetg', '|S4'),
             ('letg', '|S4'),
             ('dither', '|S4')

        """
        # convert begin and end into sybase query tstart and tstop
        tstart = DateTime(tbegin)
        tstop = DateTime(tend)
        #
        # form the query for everything, starting from tstart date to now
        #
        query = """select * from cmd_states where datestart >= '%s'
                   and datestop <= '%s' order by datestart asc """ % (
            tstart.date, tstop.date)
        #
        # set up a read to the data base
        #
        aca_read_db = DBI(dbi='sybase',
                          server='sybase',
                          user='******',
                          database='aca')

        #  Fetch all the data
        self.cmd_states = aca_read_db.fetchall(query)

        return self.cmd_states
Exemple #4
0
    def cmd_states_fetch(self, tbegin, tend):
        """
        Search the TA database and retrieve all the command
                         state data between the given start/stop times.
     
        Returned - numpy array. Data types are:
     
             Data item and type
             ------------------
             ('datestart', '|S21'), 
             ('datestop', '|S21'), 
             ('tstart', '<f8'), 
             ('tstop', '<f8'), 
             ('obsid', '<i8'), 
             ('power_cmd', '|S10'), 
             ('si_mode', '|S8'), 
             ('pcad_mode', '|S4'), 
             ('vid_board', '<i8'), 
             ('clocking', '<i8'), 
             ('fep_count', '<i8'), 
             ('ccd_count', '<i8'), 
             ('simpos', '<i8'), 
             ('simfa_pos', '<i8'), 
             ('pitch', '<f8'), 
             ('ra', '<f8'), 
             ('dec', '<f8'), 
             ('roll', '<f8'), 
             ('q1', '<f8'), 
             ('q2', '<f8'), 
             ('q3', '<f8'), 
             ('q4', '<f8'),
             ('trans_keys', '|S48')]
             ('hetg', '|S4'), 
             ('letg', '|S4'), 
             ('dither', '|S4')])

        """
        # convert begin and end into sybase query tstart and tstop
        tstart = Chandra.Time.DateTime(tbegin)
        tstop = Chandra.Time.DateTime(tend)
        #
        # form the query for everything, starting from tstart date to now
        #
        query = """select * from cmd_states where datestart >= '%s' and datestop <= '%s' order by datestart asc """ % (
            tstart.date,
            tstop.date,
        )
        #
        # set up a read to the data base
        #
        aca_read_db = DBI(dbi="sybase", server="sybase", user="******", database="aca")

        #  Fetch all the data
        self.cmd_states = aca_read_db.fetchall(query)

        return self.cmd_states
Exemple #5
0
def main(loadseg_rdb_dir, dryrun=False, test=False,
         dbi='sqlite', server='db_base.db3' ,database=None, user=None, verbose=False):
    """
    Command Load Segment Table Updater
    
    Read RDB table from SKA arc iFOT events area and update load_segments table
    Meant to be run as a cront task with no arguments.

    Details:
    Reads most recent RDB file from arc data iFOT events load_segments directory.
    Checks loads in that file for overlap and prolonged separations
    Removes outdated table entries
    Inserts new and newly modified entries

    Note that dryrun mode does not show timelines which *would* be updated,
    as an update to the load_segments table must happen prior to get_timelines()

    """

    dbh = DBI(dbi=dbi, server=server, database=database, user=user, verbose=verbose)
    ch = logging.StreamHandler()
    ch.setLevel(logging.WARN)
    if verbose:
        ch.setLevel(logging.DEBUG)
    log.addHandler(ch)
    if dryrun:
        log.info("LOAD_SEG INFO: Running in dryrun mode")
    loadseg_dir = loadseg_rdb_dir
    # get the loads from the arc ifot area
    all_rdb_files = glob.glob(os.path.join(loadseg_dir, "*"))
    rdb_file = max(all_rdb_files)
    log.debug("LOAD_SEG DEBUG: Updating from %s" % rdb_file)
    orig_rdb_loads = Ska.Table.read_ascii_table(rdb_file, datastart=3)
    ifot_loads = rdb_to_db_schema( orig_rdb_loads )
    if len(ifot_loads):
        # make any scripted edits to the tables of parsed files to override directory
        # mapping
        import fix_tl_processing
        fix_tl_processing.repair(dbh)
        # make any scripted edits to the load segments table
        import fix_load_segments
        ifot_loads = fix_load_segments.repair(ifot_loads)
        max_timelines_id = dbh.fetchone(
            'SELECT max(id) AS max_id FROM timelines')['max_id'] or 0
        if max_timelines_id == 0 and test == False:
            raise ValueError("TIMELINES: no timelines in database.")
        update_loads_db( ifot_loads, dbh=dbh, test=test, dryrun=dryrun )    
        db_loads = dbh.fetchall("""select * from load_segments 
                                   where datestart >= '%s' order by datestart   
                                  """ % ( ifot_loads[0]['datestart'] )
                                )
        update_timelines_db(loads=db_loads, dbh=dbh, max_id=max_timelines_id,
                            dryrun=dryrun, test=test)

    log.removeHandler(ch)
Exemple #6
0
def hrc_gain_test_obs(new_obs, test=''):
    """
    find  new AL Lac observations from a hrc obsid list
    Input: new_obs  --- a list of hrc obsids
           test     --- a test indicator. if it is other than "", test will run
    Output: "./candidate_list"  which lists obsids of new AR Lac observations
            candidate_list      it also returns the same list
    """

    if test == "":
        f1 = open('./candidate_list', 'w')

        file = house_keeping + 'hrc_obsid_list'
        file2 = house_keeping + 'hrc_obsid_list~'
        cmd = 'cp -f ' + file + ' ' + file2
        os.system(cmd)
        f2 = open(file, 'a')

    candidate_list = []
    for obsid in new_obs:
        #
        #--- open sql database and extract data we need
        #
        db = DBI(dbi='sybase',
                 server=db_server,
                 user=db_user,
                 passwd=db_passwd,
                 database='axafocat')

        cmd = 'select obsid,targid,seq_nbr,targname,grating,instrument from target where obsid=' + obsid
        query_results = db.fetchall(cmd)
        if len(query_results):
            query_results = Table(query_results)

        line = query_results['targname'].data
        targname = line[0]
        #
        #--- if the observation is AR Lac, write it down in candidate_list
        #
        m1 = re.search('arlac', targname.lower())
        if m1 is not None:
            line = obsid + '\n'
            candidate_list.append(obsid)

            if test == '':
                f1.write(line)
                f2.write(line)

    if test == '':
        f1.close()
        f2.close()

    return candidate_list
#!/usr/bin/env python

import sys
from Ska.DBI import DBI
from Chandra.Time import DateTime
import numpy as np

dbh = DBI(dbi='sybase', server='sybase', user='******')
mp = '/data/mpcrit1/mplogs'

t = DateTime()
if len(sys.argv) > 1:
    t = DateTime(sys.argv[1])
timelines = dbh.fetchall(
"""select * from timelines
   where datestart <= '%(date)s' and datestop > '%(date)s'"""
% {'date': t.date})

if not len(timelines):
    timelines = dbh.fetchall(
"""select * from timelines
   where ( datestart = (
       select max(datestart) from timelines where datestart < '%(date)s'))
    or ( datestart = (
       select min(datestart) from timelines where datestart > '%(date)s'))"""
% {'date': t.date})

for tdir in np.unique(timelines['dir']):
    print "file://%s%s%s" % (mp, tdir, 'starcheck.html')
Exemple #8
0
            bash('echo "False" > {}'.format(stat_file))
            print "updating %s NOT POINT" % src_dir
        if c == 'm':
            bash('echo "None" > {}'.format(stat_file))
            print "updating %s ..Eh " % src_dir
        if c == 'y':
            bash('echo "True" > {}'.format(stat_file))
            print "updating %s POINT" % src_dir
        os.kill(int(proc.group(1)), 1)


    src = Table.read(src_file, format='ascii.tab')
    # append via dictionaries to avoid dealing with fact that ascdsver is
    # read as a float on occasion
    srcdict = dict(zip(src[0].colnames, src[0].data))
    obs = acadb.fetchall("select * from observations where obsid = {}".format(srcdict['obsid']))
    for key in ['kalman_datestart', 'kalman_datestop', 'kalman_tstart', 'kalman_tstop', 'grating', 'readmode', 'datamode', 'detector', 'sim_z', 'sim_z_offset']:
        srcdict[key] = obs[0][key]
    tilt_db = acadb.fetchall(
        "select max_oobagrd3 - min_oobagrd3 as rd3diff, max_oobagrd6 - min_oobagrd6 as rd6diff from obs_periscope_tilt where obsid = {}".format(srcdict['obsid']))
    if not len(tilt_db):
        continue
    srcdict['oobagrd3_diff'] = tilt_db[0]['rd3diff']
    srcdict['oobagrd6_diff'] = tilt_db[0]['rd6diff']

    from Ska.Sun import pitch
    obs_pitch = pitch(obs['ra_pnt'], obs['dec_pnt'], obs['kalman_tstart'])
    srcdict['pitch'] = obs_pitch
    srcdict['point_source'] = stat
    srcs.append(srcdict)
LABEL = "Outstanding Targets"
PLANNING_LIMIT = opt.planning_limit
TASK_DATA = os.path.join(os.environ["SKA"], "data", "aca_lts_eval")

db = DBI(dbi="sybase", server="sqlsao", database="axafocat", user="******")
query = """SELECT t.obsid, t.ra, t.dec,
t.y_det_offset as y_offset, t.z_det_offset as z_offset,
t.approved_exposure_time, t.instrument, t.grating, t.obs_ao_str
FROM target t
WHERE
((t.status='unobserved' OR t.status='partially observed' OR t.status='untriggered' OR t.status='scheduled')
AND NOT(t.ra = 0 AND t.dec = 0)
AND NOT(t.ra IS NULL OR t.dec IS NULL))
ORDER BY t.obsid"""

targets = Table(db.fetchall(query))
targets.write(os.path.join(OUTDIR, "requested_targets.txt"), format="ascii.fixed_width_two_line")


stop = DateTime("{}-03-15".format(2000 + CYCLE))
start = stop - (365 + 210)
if opt.start is not None:
    start = DateTime(opt.start)
if opt.stop is not None:
    stop = DateTime(opt.stop)

targets["report_start"] = start.secs
targets["report_stop"] = stop.secs

last_data_file = os.path.join(OUTDIR, "target_table.dat")
last_data = None
def retrieve_perigee_telem(start='2009:100:00:00:00.000',
                           stop=None,
                           pass_data_dir='.',
                           redo=False):
    """
    Retrieve perigee pass and other 8x8 image telemetry.

    Telemetry is stored in directories named by datestart in the PASS_DATA
    directory.
    The file pass_times.txt in each directory contains the time range that
    has been queried for 8x8 image data

    :param start: Chandra.Time compatible time for beginning of range
    :param stop: Chandra.time compatible time for end of range
    :rtype: list of updated directories
    """

    tstart = DateTime(start)
    # default tstop should be now
    if stop is None:
        tstop = DateTime(time.time(), format='unix')

    log.info("retrieve_perigee_telem(): Checking for current telemetry from %s"

             % tstart.date)

    pass_time_file = 'pass_times.txt'
    aca_db = DBI(dbi='sybase', server='sybase',
                 user='******', database='aca')
    obsids = aca_db.fetchall("""SELECT obsid,obsid_datestart,obsid_datestop
                                from observations
                                where obsid_datestart > '%s'
                                and obsid_datestart < '%s' order by obsid_datestart"""
                             % (tstart.date, tstop.date))

    # Get contiguous ER chunks, which are largely perigee passes
    chunks = []
    chunk = {'start': None,
             'stop': None}
    for obsid in obsids:
        # If a OR, end a "chunk" of ER unless undefined
        # (this should only append on the first OR after one or more ERs)
        if obsid['obsid'] < 40000:
            if chunk['start'] is not None and chunk['stop'] is not None:
                chunks.append(chunk.copy())
                chunk = {'start': None,
                         'stop': None}
        else:
            if chunk['start'] is None:
                chunk['start'] = obsid['obsid_datestart']
            chunk['stop'] = obsid['obsid_datestop']

    pass_dirs = []
    # for each ER chunk get telemetry
    for chunk in chunks:
        er_start = chunk['start']
        er_stop = chunk['stop']
        log.debug("checking for %s pass" % er_start)
        er_year = DateTime(er_start).year
        year_dir = os.path.join(pass_data_dir, "%s" % er_year)
        if not os.access(year_dir, os.R_OK):
            os.mkdir(year_dir)
        pass_dir = os.path.join(pass_data_dir, "%s" % er_year, er_start)
        if not os.access(pass_dir, os.R_OK):
            os.mkdir(pass_dir)
        if (DateTime(er_stop).secs - DateTime(er_start).secs > 86400 * 2):
            if not os.path.exists(os.path.join(pass_dir, 'warned.txt')):
                log.warn("Skipping %s pass, more than 48 hours long" % er_start)
                continue
        pass_dirs.append(pass_dir)
        made_timefile = os.path.exists(os.path.join(pass_dir, pass_time_file))
        if made_timefile:
            pass_done = Ska.Table.read_ascii_table(
                os.path.join(pass_dir, pass_time_file))
            if ((pass_done['obsid_datestart'] == er_start)
                    & (pass_done['obsid_datestop'] == er_stop)):
                log.debug("%s times match" % pass_dir)
                continue
            else:
                log.info("pass %s exists but needs updating" % er_start)
                redo = True
        if not made_timefile or redo:
            f = open(os.path.join(pass_dir, pass_time_file), 'w')
            f.write("obsid_datestart,obsid_datestop\n")
            f.write("%s,%s\n" % (er_start, er_stop))
            f.close()
    return pass_dirs
                                  format="%(asctime)s %(message)s")

# Get options
opt = get_opt()
stop = DateTime(opt.stop)
start = stop - 10 if (opt.start is None) else DateTime(opt.start)
logger.info('Processsing from {} to {}'.format(start.date, stop.date))

# Define file names
h5_file = os.path.join(opt.data_root, 'aimpoint_asol_values.h5')
obsid_file = os.path.join(opt.data_root, 'aimpoint_obsid_index.shelve')

# Get obsids in date range
db = DBI(dbi='sqlite', server='/data/aca/archive/obspar/archfiles.db3')
obs = db.fetchall('select obsid, tstart from archfiles where tstart > {}'
                  ' and tstart < {}'
                  .format(start.secs, stop.secs))
db.conn.close()

# Get unique obsids and then sort by tstart
idx = np.unique(obs['obsid'], return_index=True)[1]
obs = Table(obs[idx])
obs.sort('tstart')
obs['datestart'] = Time(obs['tstart'], format='cxcsec').yday
obs.pprint(max_lines=-1)

obsid_index = shelve.open(obsid_file)

# Go through obsids and either process or skip
for obsid in obs['obsid']:
    if str(obsid) in obsid_index:
Exemple #12
0
        if c == 'n':
            bash('echo "False" > {}'.format(stat_file))
            print "updating %s NOT POINT" % src_dir
        if c == 'm':
            bash('echo "None" > {}'.format(stat_file))
            print "updating %s ..Eh " % src_dir
        if c == 'y':
            bash('echo "True" > {}'.format(stat_file))
            print "updating %s POINT" % src_dir
        os.kill(int(proc.group(1)), 1)

    src = Table.read(src_file, format='ascii.tab')
    # append via dictionaries to avoid dealing with fact that ascdsver is
    # read as a float on occasion
    srcdict = dict(zip(src[0].colnames, src[0].data))
    obs = acadb.fetchall("select * from observations where obsid = {}".format(
        srcdict['obsid']))
    for key in [
            'kalman_datestart', 'kalman_datestop', 'kalman_tstart',
            'kalman_tstop', 'grating', 'readmode', 'datamode', 'detector',
            'sim_z', 'sim_z_offset'
    ]:
        srcdict[key] = obs[0][key]
    tilt_db = acadb.fetchall(
        "select max_oobagrd3 - min_oobagrd3 as rd3diff, max_oobagrd6 - min_oobagrd6 as rd6diff from obs_periscope_tilt where obsid = {}"
        .format(srcdict['obsid']))
    if not len(tilt_db):
        continue
    srcdict['oobagrd3_diff'] = tilt_db[0]['rd3diff']
    srcdict['oobagrd6_diff'] = tilt_db[0]['rd6diff']

    from Ska.Sun import pitch