예제 #1
0
def load_samnet_data(file_name, archive_data, 
                        project, site, data_type, channels, start_time, 
                        end_time, **kwargs):

    chan_tup = tuple(archive_data['channels'])
    col_idx = []
    for c in channels:
        col_idx.append(chan_tup.index(c))
    nominal_cadence_s = (archive_data['nominal_cadence'] / 
                         np.timedelta64(1000000, 'us'))
    try:
        try:
            conv = lambda s: (s.strip().startswith('9999.9') and np.nan) \
                or float(s.strip())
            comments = ap.get_site_info(project, site, 'samnet_code')[0]
            data = np.loadtxt(file_name, 
                              unpack=True, 
                              converters={0: conv, 1: conv, 2: conv},
                              comments=comments)
            # TODO: check correct settings for sample start/end time
            # for both 1s and 5s data. IIRC 1s is offset and 5s is
            # centred.
            sample_start_time = np.arange(0, 86400, nominal_cadence_s)\
                .astype('m8[s]') + start_time
            sample_end_time = sample_start_time + \
                archive_data['nominal_cadence']
                
            data = data[col_idx] * 1e-9
            integration_interval = np.ones_like(data) \
                * archive_data['nominal_cadence']

            r = MagData( \
                project=project,
                site=site,
                channels=channels,
                start_time=start_time,
                end_time=end_time,
                sample_start_time=sample_start_time, 
                sample_end_time=sample_end_time,
                integration_interval=integration_interval,
                nominal_cadence=archive_data['nominal_cadence'],
                data=data,
                units=archive_data['units'],
                sort=True)
            return r

        except Exception as e:
            logger.info('Could not read ' + file_name)
            logger.debug(str(e))
            logger.debug(traceback.format_exc())

        finally:
            # uh.close()
            pass
    except Exception as e:
        logger.info('Could not open ' + file_name)
        logger.debug(str(e))
        logger.debug(traceback.format_exc())

    return None
예제 #2
0
def k_index_plot(mag_data, mag_qdc, filename, exif_tags):

    md_filt = mag_data
    if ap.has_site_info(mag_data.project, mag_data.site, 
                        'k_index_filter'):
        kfilt = ap.get_site_info(mag_data.project, mag_data.site, 
                                  'k_index_filter')
        if kfilt is not None:
            md_filt = kfilt(mag_data)

    k_index = ap.auroralactivity.KIndex(magdata=md_filt, magqdc=mag_qdc)

    # Fix the start/end times to the data, not the 3h K index samples
    k_index.start_time = md_filt.start_time
    k_index.end_time = md_filt.end_time

    k_index.plot()
    fig = plt.gcf()
    fig.set_figwidth(6.4)
    fig.set_figheight(4.8)
    fig.subplots_adjust(bottom=0.1, top=0.85, 
                        left=0.15, right=0.925)
    mysavefig(fig, filename, exif_tags)
예제 #3
0
    # Get copyright and attribution data for all sites. Licenses had
    # better be compatible (or we have express permission) since we
    # are combining them.
    copyright_list = []
    attribution_list = []

    for site_num in range(len(site_list)):
        project_uc = project_list[site_num]
        project_lc = project_uc.lower()
        site_uc = site_list[site_num]
        site_lc = site_uc.lower()
        logger.debug('Processing %s/%s' % (project_uc, site_uc))

        # Ignore this 24 hour period if outside the site's listed
        # operational period
        site_start_time = ap.get_site_info(project_uc, site_uc, 
                                           info='start_time')
        site_end_time = ap.get_site_info(project_uc, site_uc, 
                                         info='end_time')
        if ((site_start_time and t2 <= site_start_time) or
            (site_end_time and t1 >= site_end_time)):
            continue
        
        copyright_ = ap.get_site_info(project_uc, site_uc, 'copyright')
        attribution = ap.get_site_info(project_uc, site_uc, 'attribution')
        
        exif_tags = {'Exif.Image.Copyright': \
                         ' '.join(['Copyright: ' + copyright_,
                                    'License: ' + \
                                        ap.get_site_info(project_uc, 
                                                         site_uc, 
                                                         'license'),
        logger.warning('realtime_qdc option not in %s archive for %s/%s',
                       an, project, site)
        
    qdc_fit_duration = ai['qdc_fit_duration']
    qdc_fit_offset = ai.get('qdc_fit_offset', -qdc_fit_duration/2 - 1.5*day)
    qdc_tries = ai.get('qdc_tries', 3)

    # Get mag data archive to use for source data
    if project in archive and site in archive[project]:
        md_archive = archive[project][site]
    else:
        md_archive = None

    # Tune start/end times to avoid requesting data outside of
    # operational period
    site_st = ap.get_site_info(project, site, 'start_time')
    if site_st is None or site_st < st:
        site_st = st
    else:
        site_st = dt64.floor(site_st, day)

    site_et = ap.get_site_info(project, site, 'end_time')        
    if site_et is None or site_et > et:
        site_et = et
    else:
        site_et = dt64.ceil(site_et, day)

    logger.info('Processing %s/%s %s', project, site, dt64.
                fmt_dt64_range(site_st, site_et))

    last_data = None
예제 #5
0
def load_samnet_data(file_name, archive_data, project, site, data_type,
                     channels, start_time, end_time, **kwargs):

    chan_tup = tuple(archive_data['channels'])
    col_idx = []
    for c in channels:
        col_idx.append(chan_tup.index(c))
    nominal_cadence_s = (archive_data['nominal_cadence'] /
                         np.timedelta64(1000000, 'us'))
    try:
        try:
            conv = lambda s: (s.strip().startswith('9999.9') and np.nan) \
                or float(s.strip())
            comments = ap.get_site_info(project, site, 'samnet_code')[0]
            data = np.loadtxt(file_name,
                              unpack=True,
                              converters={
                                  0: conv,
                                  1: conv,
                                  2: conv
                              },
                              comments=comments)
            # TODO: check correct settings for sample start/end time
            # for both 1s and 5s data. IIRC 1s is offset and 5s is
            # centred.
            sample_start_time = np.arange(0, 86400, nominal_cadence_s)\
                .astype('m8[s]') + start_time
            sample_end_time = sample_start_time + \
                archive_data['nominal_cadence']

            data = data[col_idx] * 1e-9
            integration_interval = np.ones_like(data) \
                * archive_data['nominal_cadence']

            r = MagData( \
                project=project,
                site=site,
                channels=channels,
                start_time=start_time,
                end_time=end_time,
                sample_start_time=sample_start_time,
                sample_end_time=sample_end_time,
                integration_interval=integration_interval,
                nominal_cadence=archive_data['nominal_cadence'],
                data=data,
                units=archive_data['units'],
                sort=True)
            return r

        except Exception as e:
            logger.info('Could not read ' + file_name)
            logger.debug(str(e))
            logger.debug(traceback.format_exc())

        finally:
            # uh.close()
            pass
    except Exception as e:
        logger.info('Could not open ' + file_name)
        logger.debug(str(e))
        logger.debug(traceback.format_exc())

    return None
예제 #6
0
    raise Exception('UIT password needed but could not be set')


# Load and plot the data for each site. 
for n_s in n_s_list:
    project, site = n_s.split('/')
    kwargs = {}
    if project in archives:
        kwargs['archive'] = archives[project]
    md = ap.load_data(project, site, 'MagData', st, et, **kwargs)
                      # archive=archives[project])
    # Is result is None then no data available, so ignore those
    # results.
    qdc = None
    if (md is not None and
        'MagQDC' in ap.get_site_info(project, site, 'data_types')):
        md = md.mark_missing_data(cadence=2*md.nominal_cadence)
        qdc_info = ap.magdata.load_qdc(project, site, dt64.mean(st, et),
                                       tries=args.tries, full_output=True)
        if qdc_info:
            qdc = qdc_info['magqdc']
        if qdc is not None and len(md.channels) != len(qdc.channels):
            qdc = None
    k = ap.auroralactivity.KIndex(magdata=md, magqdc=qdc)
    k.plot()
    fig = plt.gcf()
       
    # Override the labelling format.
    for ax in fig.axes:
        # Set maxticks so that for an entire day the ticks are at
        # 3-hourly intervals (to correspond with K index plots).
예제 #7
0
def activity_plot(mag_data, mag_qdc, filename, exif_tags, 
                  k_index_filename=None):
    global activity
    channel = mag_data.channels[0]
    pos = [0.15, 0.1, 0.775, 0.75]

    if mag_qdc is None:
        activity = None
        mag_data.plot(channels=channel, label=channel, color='black')
        fig = plt.gcf()
        ax2 = plt.gca()
    else:
        # assert np.all(mag_data.channels == mag_qdc.channels) \
        #     and len(mag_data.channels) == 1 \
        #     and len(mag_qdc.channels) == 1, \
        #     'Bad value for channels'
    
        activity = ap.auroralactivity.AuroraWatchActivity(magdata=mag_data, 
                                                          magqdc=mag_qdc,
                                                          channels=channel,
                                                          fit=None)
        # To get another axes the position must be different. It is made
        # the same position later.
        pos2 = copy.copy(pos)
        pos2[0] += 0.1 
        fig = plt.figure(facecolor='w')
        ax = plt.axes(pos)

        activity.plot(axes=ax, units_prefix='n', 
                      label='Activity (' + channel + ')')
        ax2 = plt.axes(pos2)

        # Set Y limit to be 1.5 times highest threshold. Units are
        # nanotesla since that was set when plotting.
        ax.set_ylim(0, activity.thresholds[-1] * 1.5 * 1e9)
    
        mag_data.plot(channels=channel, 
                      label=channel, 
                      color='black',
                      axes=ax2)

        # Align the QDC to regular intervals between start and end times
        qdc_cadence = np.timedelta64(1, 'm')
#        num = ((mag_data.end_time - mag_data.start_time)/ qdc_cadence) + 1
#        qdc_sample_times = np.linspace(mag_data.start_time.astype('M8[m]'),
#                                       mag_data.end_time.astype('M8[m]'),
#                                       num)
        qdc_sample_times = list(dt64.dt64_range(mag_data.start_time,
                                                mag_data.end_time,
                                                qdc_cadence))

        qdc_aligned = mag_qdc.align(qdc_sample_times)
        qdc_aligned.plot(channels=channel, 
                         label=channel + ' QDC', 
                         color='cyan', 
                         axes=ax2)

        ax.set_axis_bgcolor('w')
        ax.axison = False
        ax2.set_title(activity.make_title())

    ax2.set_axis_bgcolor('none')
    ax2.set_position(pos)

    min_ylim_range = 400
    ax2_ylim = ax2.get_ylim()
    if np.diff(ax2_ylim) < min_ylim_range:
        ax2.set_ylim(round_to(np.mean(ax2_ylim), 50) 
                     + min_ylim_range * np.array([-0.5, 0.5]))
    fig.set_figwidth(6.4)
    fig.set_figheight(4.8)

    mysavefig(fig, filename, exif_tags)

    r = [activity]
    if k_index_filename is not None:
        md_filt = mag_data
        if ap.has_site_info(mag_data.project, mag_data.site, 
                            'k_index_filter'):
            kfilt = ap.get_site_info(mag_data.project, mag_data.site, 
                                      'k_index_filter')
            if kfilt is not None:
                md_filt = kfilt(mag_data)

        k_index = ap.auroralactivity.KIndex(magdata=md_filt, magqdc=mag_qdc)
        # Fix the start/end times to the data, not the 3h K index samples
        k_index.start_time = md_filt.start_time
        k_index.end_time = md_filt.end_time

        k_index.plot()
        fig = plt.gcf()
        fig.set_figwidth(6.4)
        fig.set_figheight(4.8)
        fig.subplots_adjust(bottom=0.1, top=0.85, 
                            left=0.15, right=0.925)
        mysavefig(fig, k_index_filename, exif_tags)

        r.append(k_index)

    return r
예제 #8
0
def has_data_of_type(project, site, data_type):
    dti = ap.get_site_info(project, site, 'data_types')
    return dti.has_key(data_type)
예제 #9
0
                                           archive=archive)

    src_an, src_ai = ap.get_archive_info(project,
                                         site,
                                         'MagData',
                                         archive='original_' + dest_an)
    src_path = src_ai['path']
    dest_path = dest_ai['path']
    print('src_ai ' + src_an)
    print(repr(src_ai))
    print('dest_ai ' + dest_an)
    print(repr(dest_ai))

    # Tune start/end times to avoid requesting data outside of
    # operational period
    site_st = ap.get_site_info(project, site, 'start_time')
    if site_st is None or site_st < st:
        site_st = st
    else:
        site_st = dt64.floor(site_st, day)
    site_st = dt64.floor(site_st, src_ai['duration'])

    site_et = ap.get_site_info(project, site, 'end_time')
    if site_et is None or site_et > et:
        site_et = et
    else:
        site_et = dt64.ceil(site_et, day)
    site_et = dt64.ceil(site_et, src_ai['duration'])

    logger.info('Processing %s/%s %s', project, site,
                dt64.fmt_dt64_range(site_st, site_et))