else:
            indices[wave_name][differencing_type] = 15

# Storage for the maps
maps = {}

# Go through each wave
for i, wave_name in enumerate(wave_names):

    # Storage by wave name
    print("\n----------------")
    print("Loading and accumulating {:s} data".format(wave_name))
    maps[wave_name] = {}

    # Load observational data from file
    euv_wave_data = aware_utils.create_input_to_aware_for_test_observational_data(wave_name, spatial_summing, temporal_summing)

    # Accumulate the AIA data
    mc = euv_wave_data['finalmaps']

    # Go through each of the differencing types
    for differencing_type in differencing_types:

        # Which layer in the mapcube to use
        index = indices[wave_name][differencing_type]

        if differencing_type == 'RD':
            mc_diff = mapcube_tools.running_difference(mc)
        elif differencing_type == 'BD':
            mc_diff = mapcube_tools.base_difference(mc)
        elif differencing_type == 'RDP':
Beispiel #2
0
                print(" - Saving test waves.")
                file_path = os.path.join(otypes_dir['dat'], otypes_filename['dat'] + '.pkl')
                print('Saving to %s' % file_path)
                f = open(file_path, 'wb')
                pickle.dump(euv_wave_data, f)
                f.close()
        else:
            print(" - Loading test waves.")
            file_path = os.path.join(otypes_dir['dat'], otypes_filename['dat'] + '.pkl')
            print('Loading from %s' % file_path)
            f = open(file_path, 'rb')
            out = pickle.load(f)
            f.close()
    else:
        # Load observational data from file
        euv_wave_data = aware_utils.create_input_to_aware_for_test_observational_data(wave_name)

        # Transform parameters used to convert HPC image data to HG data.
        # The HPC data is transformed to HG using the location below as the
        # "pole" around which the data is transformed
        transform_hpc2hg_parameters['epi_lon'] = euv_wave_data['epi_lon'] * u.deg
        transform_hpc2hg_parameters['epi_lat'] = euv_wave_data['epi_lat'] * u.deg

    # Storage for the results from all methods and polynomial fits

    final = {}
    for method in griddata_methods:
        print(' - Using the griddata method %s.' % method)
        final[method] = []

        # Which data to use
Beispiel #3
0
        idir = os.path.join(idir, loc)
        filename = filename + loc + '.'
    filename = filename[0: -1]
    if not(os.path.exists(idir)):
        os.makedirs(idir)
    otypes_dir[ot] = idir
    otypes_filename[ot] = filename

#
# Load in data
#
index = 20
create = False

if create:
    mc = aware_utils.create_input_to_aware_for_test_observational_data(wave_name)['finalmaps']
    develop = {'img': os.path.join(otypes_dir['img'], otypes_filename['img']),
               'dat': os.path.join(otypes_dir['dat'], otypes_filename['dat'])}
    aware_processed, develop_filepaths = aware3.processing(mc,
                                                        develop=develop,
                                                        radii=radii,
                                                        func=intensity_scaling_function,
                                                        histogram_clip=histogram_clip)
else:
    print('Loading datasets.')
    develop_filepaths = {}
    root = os.path.join(otypes_dir['dat'], otypes_filename['dat'])
    develop_filepaths['rdpi_mc'] = root + "_rdpi_mc.pkl"
    develop_filepaths['np_median_dc'] = root + "_np_median_dc_0.npy"
    develop_filepaths['np_meta'] = root + "_np_meta.pkl"
    develop_filepaths['np_nans'] = root + "_np_nans.npy"