示例#1
0
def get_telem(msids, start=None, stop=None, sampling='full', unit_system='eng',
              interpolate_dt=None, remove_events=None, select_events=None,
              time_format=None, outfile=None, quiet=False,
              max_fetch_Mb=None, max_output_Mb=None):
    """
    High-level routine to get telemetry for one or more MSIDs and perform
    common post-processing functions.

    This is a non-public version that really does the work.  The public interface
    is fetch.get_telem(), which is a thin wrapper for this.  (Trying to factor code
    out to separate modules and keep import times down).  See get_telem() for param
    docs.
    """
    # Set up output logging
    from pyyaks.logger import get_logger
    logger = get_logger(name='Ska.engarchive.get_telem', level=(100 if quiet else -100))

    # Set defaults and translate to fetch keywords
    stop = DateTime(stop)
    start = stop - 30 if start is None else DateTime(start)
    stat = None if sampling == 'full' else sampling
    filter_bad = interpolate_dt is None
    if isinstance(msids, basestring):
        msids = [msids]

    logger.info('Fetching {}-resolution data for MSIDS={}\n  from {} to {}'
                .format(sampling, msids, start.date, stop.date))

    fetch.set_units(unit_system)

    # Make sure that the dataset being fetched is reasonable (if checking requested)
    if max_fetch_Mb is not None or max_output_Mb is not None:
        fetch_Mb, output_Mb = utils.get_fetch_size(msids, start, stop, stat=stat,
                                                   interpolate_dt=interpolate_dt, fast=True)
        if max_fetch_Mb is not None and fetch_Mb > max_fetch_Mb:
            raise MemoryError('Requested fetch requires {:.2f} Mb vs. limit of {:.2f} Mb'
                              .format(fetch_Mb, max_fetch_Mb))
        # If outputting to a file then check output size
        if outfile and max_output_Mb is not None and output_Mb > max_output_Mb:
            raise MemoryError('Requested fetch (interpolated) requires {:.2f} Mb '
                              'vs. limit of {:.2f} Mb'
                              .format(output_Mb, max_output_Mb))

    dat = fetch.MSIDset(msids, start, stop, stat=stat, filter_bad=filter_bad)

    if interpolate_dt is not None:
        logger.info('Interpolating at {} second intervals'.format(interpolate_dt))
        msidset_resample(dat, interpolate_dt)

    if remove_events is not None:
        logger.info('Removing events: {}'.format(remove_events))
        queryset = get_queryset(remove_events)
        for msid in dat:
            dat[msid].remove_intervals(queryset)

    if select_events is not None:
        logger.info('Selecting events: {}'.format(select_events))
        queryset = get_queryset(select_events)
        for msid in dat:
            dat[msid].select_intervals(queryset)

    if time_format not in (None, 'secs'):
        for dat_msid in dat.values():
            dat_msid.times = getattr(DateTime(dat_msid.times, format='secs'), time_format)

    if outfile:
        logger.info('Writing data to {}'.format(outfile))
        dat.write_zip(outfile)

    return dat
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt

from astropy import table
from astropy.table import Table

from pyyaks.logger import get_logger
from kadi import events
import Ska.DBI
from Ska.Matplotlib import plot_cxctime
from Ska.engarchive import fetch_eng as fetch
from Chandra.Time import DateTime

logger = get_logger()


def get_opt(args):
    parser = argparse.ArgumentParser(description='Commanded vs. telemetry fid positions')
    parser.add_argument('--start',
                        type=str,
                        help='Start date (default=NOW - 90 days)')
    parser.add_argument('--stop',
                        type=str,
                        help='Stop date (default=NOW)')
    parser.add_argument('--out',
                        type=str,
                        help='Output plot file')

    opt = parser.parse_args(args)
示例#3
0
def get_telem(msids,
              start=None,
              stop=None,
              sampling='full',
              unit_system='eng',
              interpolate_dt=None,
              remove_events=None,
              select_events=None,
              time_format=None,
              outfile=None,
              quiet=False,
              max_fetch_Mb=None,
              max_output_Mb=None):
    """
    High-level routine to get telemetry for one or more MSIDs and perform
    common post-processing functions.

    This is a non-public version that really does the work.  The public interface
    is fetch.get_telem(), which is a thin wrapper for this.  (Trying to factor code
    out to separate modules and keep import times down).  See get_telem() for param
    docs.
    """
    # Set up output logging
    from pyyaks.logger import get_logger
    logger = get_logger(name='Ska.engarchive.get_telem',
                        level=(100 if quiet else -100))

    # Set defaults and translate to fetch keywords
    stop = DateTime(stop)
    start = stop - 30 if start is None else DateTime(start)
    stat = None if sampling == 'full' else sampling
    filter_bad = interpolate_dt is None
    if isinstance(msids, six.string_types):
        msids = [msids]

    logger.info(
        'Fetching {}-resolution data for MSIDS={}\n  from {} to {}'.format(
            sampling, msids, start.date, stop.date))

    fetch.set_units(unit_system)

    # Make sure that the dataset being fetched is reasonable (if checking requested)
    if max_fetch_Mb is not None or max_output_Mb is not None:
        fetch_Mb, output_Mb = utils.get_fetch_size(
            msids,
            start,
            stop,
            stat=stat,
            interpolate_dt=interpolate_dt,
            fast=True)
        if max_fetch_Mb is not None and fetch_Mb > max_fetch_Mb:
            raise MemoryError(
                'Requested fetch requires {:.2f} Mb vs. limit of {:.2f} Mb'.
                format(fetch_Mb, max_fetch_Mb))
        # If outputting to a file then check output size
        if outfile and max_output_Mb is not None and output_Mb > max_output_Mb:
            raise MemoryError(
                'Requested fetch (interpolated) requires {:.2f} Mb '
                'vs. limit of {:.2f} Mb'.format(output_Mb, max_output_Mb))

    dat = fetch.MSIDset(msids, start, stop, stat=stat, filter_bad=filter_bad)

    if interpolate_dt is not None:
        logger.info(
            'Interpolating at {} second intervals'.format(interpolate_dt))
        msidset_resample(dat, interpolate_dt)

    if remove_events is not None:
        logger.info('Removing events: {}'.format(remove_events))
        queryset = get_queryset(remove_events)
        for msid in dat:
            dat[msid].remove_intervals(queryset)

    if select_events is not None:
        logger.info('Selecting events: {}'.format(select_events))
        queryset = get_queryset(select_events)
        for msid in dat:
            dat[msid].select_intervals(queryset)

    if time_format not in (None, 'secs'):
        for dat_msid in dat.values():
            dat_msid.times = getattr(DateTime(dat_msid.times, format='secs'),
                                     time_format)

    if outfile:
        logger.info('Writing data to {}'.format(outfile))
        dat.write_zip(outfile)

    return dat