Exemple #1
0
    final = {}
    for method in griddata_methods:
        print(' - Using the griddata method %s.' % method)
        final[method] = []

        # Which data to use
        for source in analysis_data_sources:
            print('Using the %s data source' % source)
            if source == 'finalmaps':
                # Get the final map out from the wave simulation
                mc = euv_wave_data['finalmaps']

                # Accumulate the data in space and time to increase the signal
                # to noise ratio
                print(' - Performing spatial summing of HPC data.')
                mc = mapcube_tools.accumulate(mapcube_tools.superpixel(mc, spatial_summing), temporal_summing)
                if develop is not None:
                    aware_utils.write_movie(mc, img_filepath + '_accummulated_data')
                # Swing the position of the start of the longitudinal
                # unwrapping
                for ils, longitude_start in enumerate(longitude_starts):

                    # Which angle to start the longitudinal unwrapping
                    transform_hpc2hg_parameters['longitude_start'] = longitude_start

                    # Which version of AWARE to use
                    if aware_version == 0:
                        #
                        # AWARE version 0 - first do the image processing
                        # to isolate the wave front, then do the transformation into
                        # heliographic co-ordinates to measure the wavefront.
Exemple #2
0
def create_input_to_aware_for_test_observational_data(wave_name,
                                                      spatial_summing, temporal_summing,
                                                      instrument='AIA',
                                                      wavelength=211,
                                                      event_type='FL',
                                                      root_directory=eitwave_data_root):
    # Set up the data
    if wave_name == 'longetal2014_figure4':
        hek_record_index = 0
        time_range = ['2011-06-07 06:16:00', '2011-02-15 07:16:00']

    if wave_name == 'longetal2014_figure7':
        hek_record_index = 0
        time_range = ['2011-02-13 17:28:00', '2011-02-13 18:28:00']

    if wave_name == 'longetal2014_figure8a':
        hek_record_index = 0
        #  time_range = ['2011-02-15 01:48:00', '2011-02-15 02:14:34']
        time_range = ['2011-02-15 01:48:00', '2011-02-15 02:48:00']

    if wave_name == 'longetal2014_figure6':
        hek_record_index = 0
        #  time_range = ['2011-02-08 21:05:00', '2011-02-08 21:15:00']
        time_range = ['2011-02-08 21:05:00', '2011-02-08 22:05:00']

    if wave_name == 'longetal2014_figure8e':
        hek_record_index = 0
        #  time_range = ['2011-02-16 14:22:36', '2011-02-16 14:39:48']
        time_range = ['2011-02-16 14:22:36', '2011-02-16 15:22:36']

    if wave_name == 'byrneetal2013_figure12':
        hek_record_index = 0
        #  time_range = ['2010-08-14 09:40:18', '2010-08-14 10:32:00']
        time_range = ['2010-08-14 09:40:18', '2010-08-14 10:40:18']

    # Where the data is stored
    wave_info_location = os.path.join(root_directory, wave_name)

    # Get the FITS files we are interested in
    fits_location = os.path.join(wave_info_location, instrument, str(wavelength), 'fits', '1.0')
    fits_file_list = aware_get_data.get_file_list(fits_location, '.fits')
    if len(fits_file_list) == 0:
        instrument_measurement, qr = aware_get_data.find_fits(time_range, instrument, wavelength)
        print('Downloading {:n} files'.format(len(qr)))
        fits_file_list = aware_get_data.download_fits(qr, instrument_measurement=instrument_measurement)

    # Get the source information
    source_location = os.path.join(wave_info_location, event_type)
    source_path = aware_get_data.get_file_list(source_location, '.pkl')
    if len(source_path) == 0:
        print('Querying HEK for trigger data.')
        hek_record = aware_get_data.download_trigger_events(time_range)
    else:
        f = open(source_path[0], 'rb')
        hek_record = pickle.load(f)
        f.close()

    analysis_time_range = TimeRange(hek_record[hek_record_index]['event_starttime'],
                                    time_from_file_name(fits_file_list[-1].split(os.path.sep)[-1]))
    print('Analysis time range')
    print(analysis_time_range)
    for_analysis = []
    for f in fits_file_list:
        g = f.split(os.path.sep)[-1]
        if (time_from_file_name(g) <= analysis_time_range.end) and (time_from_file_name(g) >= analysis_time_range.start):
            for_analysis.append(f)

    return {'finalmaps': mapcube_tools.accumulate(mapcube_tools.superpixel(Map(for_analysis, cube=True), spatial_summing), temporal_summing),
            'epi_lat': hek_record[hek_record_index]['hgs_y'],
            'epi_lon': hek_record[hek_record_index]['hgs_x']}
Exemple #3
0
 def spatial_summing(self, dimension, **kwargs):
     return self.__init__(mapcube_tools.superpixel(self.mc, dimension, **kwargs))