Exemplo n.º 1
0
 # Which version of AWARE to use
 if aware_version == 0:
     #
     # AWARE version 0 - first do the image processing
     # to isolate the wave front, then do the transformation into
     # heliographic co-ordinates to measure the wavefront.
     #
     print(' - Performing AWARE v0 image processing.')
     aware_processed, develop_filepaths = aware3.processing(mc,
                                         develop=develop,
                                         radii=radii,
                                         func=intensity_scaling_function,
                                         histogram_clip=histogram_clip)
     print(' - Segmenting the data to get the emission due to wavefront')
     segmented_maps = mapcube_tools.multiply(aware_utils.progress_mask(aware_processed),
                                             mapcube_tools.running_difference(mapcube_tools.persistence(mc)))
     print(' - Performing HPC to HG unraveling.')
     """
     umc = mapcube_hpc_to_hg(aware_processed,
                             transform_hpc2hg_parameters,
                             verbose=False,
                             method=method)
     """
     umc = mapcube_hpc_to_hg(segmented_maps,
                             transform_hpc2hg_parameters,
                             verbose=False,
                             method=method)
     # Transformed data
     transformed = mapcube_hpc_to_hg(mc,
                                     transform_hpc2hg_parameters,
                                     verbose=False,
Exemplo n.º 2
0
def processing(mc, radii=[[11, 11]*u.degree],
               clip_limit=None,
               histogram_clip=[0.0, 99.],
               func=np.sqrt,
               three_d=False,
               develop=None):
    """
    Image processing steps used to isolate the EUV wave from the data.  Use
    this part of AWARE to perform the image processing steps that segment
    propagating features that brighten new pixels as they propagate.

    Parameters
    ----------

    mc : sunpy.map.MapCube
    radii : list of lists. Each list contains a pair of numbers that describe the
    radius of the median filter and the closing operation
    histogram_clip
    clip_limit :
    func :
    three_d :
    develop :

    """

    # Define the disks that will be used on all the images.
    # The first disk in each pair is the disk that is used by the median
    # filter.  The second disk is used by the morphological closing
    # operation.
    disks = []
    for r in radii:
        e1 = (r[0]/mc[0].scale.x).to('pixel').value  # median circle radius - across wavefront
        e3 = (r[1]/mc[0].scale.x).to('pixel').value  # closing circle width - across wavefront
        disks.append([disk(e1), disk(e3)])

    # For the dump images
    rstring = ''
    for r in radii:
        z = '%i_%i__' % (r[0].value, r[1].value)
        rstring += z

    # Calculate the persistence
    new = mapcube_tools.persistence(mc)
    if develop is not None:
        develop_filepaths = {}
        filename = develop['img'] + '_persistence_mc.mp4'
        print('\nWriting persistence movie to {:s}'.format(filename))
        aware_utils.write_movie(new, filename)

        filename = develop['dat'] + '_persistence_mc.pkl'
        develop_filepaths['persistence_mc'] = filename
        print('\nWriting persistence mapcube to {:s}'.format(filename))
        f = open(filename, 'wb')
        pickle.dump(new, f)
        f.close()

    # Calculate the running difference
    new = mapcube_tools.running_difference(new)
    if develop is not None:
        filename = develop['img'] + '_rdpi_mc.mp4'
        print('\nWriting RDPI movie to {:s}'.format(filename))
        aware_utils.write_movie(new, filename)

        filename = develop['dat'] + '_rdpi_mc.pkl'
        develop_filepaths['rdpi_mc'] = filename
        print('\nWriting RDPI mapcube to {:s}'.format(filename))
        f = open(filename, 'wb')
        pickle.dump(new, f)
        f.close()

    # Storage for the processed mapcube.
    new_mc = []

    # Only want positive differences, so everything lower than zero
    # should be set to zero
    mc_data = func(new.as_array())
    mc_data[mc_data < 0.0] = 0.0

    # Clip the data to be within a range, and then normalize it.
    if clip_limit is None:
        cl = np.nanpercentile(mc_data, histogram_clip)
    mc_data[mc_data > cl[1]] = cl[1]
    mc_data = (mc_data - cl[0]) / (cl[1]-cl[0])

    # Get rid of NaNs
    nans_here = np.logical_not(np.isfinite(mc_data))
    nans_replaced = deepcopy(mc_data)
    nans_replaced[nans_here] = 0.0

    # Clean the data to isolate the wave front.  Use three dimensional
    # operations from scipy.ndimage.  This approach should get rid of
    # more noise and have better continuity in the time-direction.
    final = np.zeros_like(mc_data, dtype=np.float32)

    # Do the cleaning and isolation operations on multiple length-scales,
    # and add up the final results.
    nr = deepcopy(nans_replaced)
    # Use three-dimensional filters
    for j, d in enumerate(disks):
        pancake = np.swapaxes(np.tile(d[0], (3, 1, 1)), 0, -1)

        print('\n', nr.shape, pancake.shape, '\n', 'started median filter.')
        nr = _apply_median_filter(nr, d[0], three_d)
        if develop is not None:
            filename = develop['dat'] + '_np_median_dc_{:n}.npy'.format(j)
            develop_filepaths['np_median_dc'] = filename
            print('\nWriting results of median filter to {:s}'.format(filename))
            f = open(filename, 'wb')
            np.save(f, nr)
            f.close()

        print(' started grey closing.')
        nr = _apply_closing(nr, d[0], three_d)
        if develop is not None:
            filename = develop['dat'] + '_np_closing_dc_{:n}.npy'.format(j)
            develop_filepaths['np_closing_dc'] = filename
            print('\nWriting results of closing to {:s}'.format(filename))
            f = open(filename, 'wb')
            np.save(f, nr)
            f.close()

        # Sum all the
        final += nr*1.0

    # If in development mode, now dump out the meta's and the nans
    if develop:
        filename = develop['dat'] + '_np_meta.pkl'
        develop_filepaths['np_meta'] = filename
        print('\nWriting all meta data information to {:s}'.format(filename))
        f = open(filename, 'wb')
        pickle.dump(mc.all_meta(), f)
        f.close()
        filename = develop['dat'] + '_np_nans.npy'
        develop_filepaths['np_nans'] = filename
        print('\nWriting all nans to {:s}'.format(filename))
        f = open(filename, 'wb')
        np.save(f, nans_here)
        f.close()

    # Create the list that will be turned in to a mapcube
    for i, m in enumerate(new):
        new_map = Map(ma.masked_array(final[:, :, i],
                                          mask=nans_here[:, :, i]),
                          m.meta)
        new_map.plot_settings = deepcopy(m.plot_settings)
        new_mc.append(new_map)

    # Return the cleaned mapcube
    if develop:
        return Map(new_mc, cube=True), develop_filepaths
    else:
        return Map(new_mc, cube=True)
Exemplo n.º 3
0
def processing(mc, radii=[[11, 11]*u.degree],
               clip_limit=None,
               histogram_clip=[0.0, 99.],
               func=np.sqrt,
               develop=False,
               verbose=True):
    """
    Image processing steps used to isolate the EUV wave from the data.  Use
    this part of AWARE to perform the image processing steps that segment
    propagating features that brighten new pixels as they propagate.

    Parameters
    ----------

    mc : sunpy.map.MapCube
    radii : list of lists. Each list contains a pair of numbers that describe the
    radius of the median filter and the closing operation
    histogram_clip
    func
    """

    # Define the disks that will be used on all the images.
    # The first disk in each pair is the disk that is used by the median
    # filter.  The second disk is used by the morphological closing
    # operation.
    disks = []
    for r in radii:
        e1 = (r[0]/mc[0].scale.x).to('pixel').value  # median ellipse width - across wavefront
        e2 = (r[0]/mc[0].scale.y).to('pixel').value  # median ellipse height - along wavefront

        e3 = (r[1]/mc[0].scale.x).to('pixel').value  # closing ellipse width - across wavefront
        e4 = (r[1]/mc[0].scale.y).to('pixel').value  # closing ellipse height - along wavefront

        disks.append([disk(e1), disk(e3)])

    # For the dump images
    rstring = ''
    for r in radii:
        z = '%i_%i__' % (r[0].value, r[1].value)
        rstring += z

    # Calculate the persistence
    new = mapcube_tools.persistence(mc)
    if develop:
        aware_utils.dump_images(new, rstring, '%s_1_persistence' % rstring)

    # Calculate the running difference
    new = mapcube_tools.running_difference(new)
    if develop:
        aware_utils.dump_images(new, rstring, '%s_2_rdiff' % rstring)

    # Storage for the processed mapcube.
    new_mc = []

    # Only want positive differences, so everything lower than zero
    # should be set to zero
    mc_data = func(new.as_array())
    mc_data[mc_data < 0.0] = 0.0

    # Clip the data to be within a range, and then normalize it.
    if clip_limit is None:
        cl = np.nanpercentile(mc_data, histogram_clip)
    mc_data[mc_data > cl[1]] = cl[1]
    mc_data = (mc_data - cl[0]) / (cl[1]-cl[0])

    # Get each map out of the cube an clean it up to better isolate the wave
    # front
    for im, m in enumerate(new):
        if verbose:
            print("  AWARE: processing map %i out of %i" % (im, len(new)))
        # Dump images - identities
        ident = (rstring, im)

        # Rescale the data using the input function, and subtract the lower
        # clip limit so it begins at zero.
        f_data = mc_data[:, :, im]

        # Replace the nans with zeros - the reason for doing this rather than
        # something more sophisticated is that nans will not contribute
        # greatly to the final answer.  The nans are put back in at the end
        # and get carried through in the maps.
        nans_here = np.logical_not(np.isfinite(f_data))
        nans_replaced = deepcopy(f_data)
        nans_replaced[nans_here] = 0.0

        # Byte scale the data - recommended input type for the median.
        new_data = bytescale(nans_replaced)
        if develop:
            aware_utils.dump_image(new_data, rstring, '%s_345_bytscale_%i_%05d.png' % (rstring, im, im))

        # Final image used to measure the location of the wave front
        final_image = np.zeros_like(new_data, dtype=np.float32)

        # Clean the data to isolate the wave front.
        for j, d in enumerate(disks):
            # Get rid of noise by applying the median filter.  Although the
            # input is a byte array make sure that the output is a floating
            # point array for use with the morphological closing operation.
            new_d = 1.0*median(new_data, d[0])
            if develop:
                aware_utils.dump_image(new_d, rstring, '%s_6_median_%i_%05d.png' % (rstring, radii[j][0].value, im))

            # Apply the morphological closing operation to rejoin separated
            # parts of the wave front.
            new_d = closing(new_d, d[1])
            if develop:
                aware_utils.dump_image(new_d, rstring, '%s_7_closing_%i_%05d.png' % (rstring, radii[j][1].value, im))

            # Further insurance that we get floating point arrays which are
            # summed below.
            final_image += new_d*1.0

        if develop:
            aware_utils.dump_image(final_image, rstring, '%s_final_%05d.png' % ident)

        # Put the NANs back in - useful to have them in.
        final_image[nans_here] = np.nan

        # New mapcube list
        new_mc.append(Map(ma.masked_array(final_image, mask=nans_here), m.meta))

    # Return the cleaned mapcube
    return Map(new_mc, cube=True)
Exemplo n.º 4
0
 def persistence(self):
     return self.__init__(mapcube_tools.persistence(self.mc))