Example #1
0
def running_difference(mc, offset=1, use_offset_for_meta='mean',
                       image_normalize=True):
    """
    Calculate the running difference of a mapcube.

    Parameters
    ----------
    mc : sunpy.map.MapCube
       A sunpy mapcube object

    offset : [ int ]
       Calculate the running difference between map 'i + offset' and image 'i'.

    use_offset_for_meta : {'ahead', 'behind', 'mean'}
       Which meta header to use in layer 'i' in the returned mapcube, either
       from map 'i + offset' (when set to 'ahead') and image 'i' (when set to
       'behind').  When set to 'mean', the ahead meta object is copied, with
       the observation date replaced with the mean of the ahead and behind
       observation dates.

    image_normalize : bool
        If true, return the mapcube with the same image normalization applied
        to all maps in the mapcube.

    Returns
    -------
    sunpy.map.MapCube
       A mapcube containing the running difference of the input mapcube.
       The value normalization function used in plotting the data is changed,
       prettifying movies of resultant mapcube.
    """
    # Create a list containing the data for the new map object
    new_mc = []
    for i in range(0, len(mc.maps) - offset):
        new_data = mc[i + offset].data - mc[i].data
        if use_offset_for_meta == 'ahead':
            new_meta = mc[i + offset].meta
            plot_settings = mc[i + offset].plot_settings
        elif use_offset_for_meta == 'behind':
            new_meta = mc[i].meta
            plot_settings = mc[i].plot_settings
        elif use_offset_for_meta == 'mean':
            new_meta = deepcopy(mc[i + offset].meta)
            new_meta['date_obs'] = _mean_time([parse_time(mc[i + offset].date),
                                               parse_time(mc[i].date)])
            plot_settings = mc[i + offset].plot_settings
        else:
            raise ValueError('The value of the keyword "use_offset_for_meta" has not been recognized.')

        # Update the plot scaling.  The default here attempts to produce decent
        # looking images
        new_map = Map(new_data, new_meta)
        new_map.plot_settings = plot_settings
        new_mc.append(new_map)

    # Create the new mapcube and return
    if image_normalize:
        return movie_normalization(Map(new_mc, cube=True), stretch=LinearStretch())
    else:
        return Map(new_mc, cube=True)
Example #2
0
def persistence(mc, func=np.max, image_normalize=True):
    """
    Parameters
    ----------
    mc : sunpy.map.MapCube
       A sunpy mapcube object

    Returns
    -------
    sunpy.map.MapCube
       A mapcube containing the persistence transform of the input mapcube.
       The value normalization function used in plotting the data is changed,
       prettifying movies of resultant mapcube.
    """

    # Get the persistence transform
    new_datacube = persistence_dc(mc.as_array(), func=func)

    # Create a list containing the data for the new map object
    new_mc = []
    for i, m in enumerate(mc):
        new_map = Map(new_datacube[:, :, i], m.meta)
        new_map.plot_settings = deepcopy(m.plot_settings)
        new_mc.append(new_map)

    # Create the new mapcube and return
    if image_normalize:
        return movie_normalization(Map(new_mc, cube=True))
    else:
        return Map(new_mc, cube=True)
Example #3
0
def persistence(mc, func=np.max):
    """
    Parameters
    ----------
    mc : sunpy.map.MapCube
       A sunpy mapcube object

    Returns
    -------
    sunpy.map.MapCube
       A mapcube containing the persistence transform of the input mapcube.
       The value normalization function used in plotting the data is changed,
       prettifying movies of resultant mapcube.
    """

    # Get the persistence transform
    new_datacube = persistence_dc(mc.as_array(), func=func)

    # Create a list containing the data for the new map object
    new_mc = []
    for i, m in enumerate(mc):
        new_map = Map(new_datacube[:, :, i], m.meta)
        new_map.plot_settings = deepcopy(m.plot_settings)
        new_mc.append(new_map)

    # Create the new mapcube and return
    return Map(new_mc, cube=True)
Example #4
0
def accumulate(mc, accum, normalize=True):
    """
    Parameters
    ----------
    mc : sunpy.map.MapCube
       A sunpy mapcube object

    accum :

    normalize :

    Returns
    -------
    sunpy.map.MapCube
       A summed mapcube in the map layer (time) direction.

    """

    # counter for number of maps.
    j = 0

    # storage for the returned maps
    maps = []
    nmaps = len(mc)

    while j + accum <= nmaps:
        i = 0
        these_map_times = []
        while i < accum:
            this_map = mc[i + j]
            these_map_times.append(parse_time(this_map.date))
            if normalize:
                normalization = this_map.exposure_time
            else:
                normalization = 1.0
            if i == 0:
                # Emission rate
                m = this_map.data / normalization
            else:
                # Emission rate
                m += this_map.data / normalization
            i += 1
        j += accum
        # Make a copy of the meta header and set the exposure time to accum,
        # indicating that 'n' normalized exposures were used.
        new_meta = deepcopy(this_map.meta)
        new_meta['exptime'] = np.float64(accum)

        # Set the observation time to the average of the times used to form
        # the map.
        new_meta['date_obs'] = _max_time(these_map_times)

        # Create the map list that will be used to make the mapcube
        new_map = Map(m, new_meta)
        new_map.plot_settings = deepcopy(this_map.plot_settings)
        maps.append(new_map)

    # Create the new mapcube and return
    return Map(maps, cube=True)
Example #5
0
def accumulate(mc, accum, normalize=True):
    """
    Parameters
    ----------
    mc : sunpy.map.MapCube
       A sunpy mapcube object

    accum :

    normalize :

    Returns
    -------
    sunpy.map.MapCube
       A summed mapcube in the map layer (time) direction.

    """

    # counter for number of maps.
    j = 0

    # storage for the returned maps
    maps = []
    nmaps = len(mc)

    while j + accum <= nmaps:
        i = 0
        these_map_times = []
        while i < accum:
            this_map = mc[i + j]
            these_map_times.append(parse_time(this_map.date))
            if normalize:
                normalization = this_map.exposure_time
            else:
                normalization = 1.0
            if i == 0:
                # Emission rate
                m = this_map.data / normalization
            else:
                # Emission rate
                m += this_map.data / normalization
            i += 1
        j += accum
        # Make a copy of the meta header and set the exposure time to accum,
        # indicating that 'n' normalized exposures were used.
        new_meta = deepcopy(this_map.meta)
        new_meta['exptime'] = np.float64(accum)

        # Set the observation time to the average of the times used to form
        # the map.
        new_meta['date_obs'] = _mean_time(these_map_times)

        # Create the map list that will be used to make the mapcube
        new_map = Map(m, new_meta)
        new_map.plot_settings = deepcopy(this_map.plot_settings)
        maps.append(new_map)

    # Create the new mapcube and return
    return Map(maps, cube=True)
Example #6
0
def running_difference(mc, offset=1, use_offset_for_meta='ahead'):
    """
    Calculate the running difference of a mapcube.

    Parameters
    ----------
    mc : sunpy.map.MapCube
       A sunpy mapcube object

    offset : [ int ]
       Calculate the running difference between map 'i + offset' and image 'i'.

    use_offset_for_meta : {'ahead', 'behind', 'mean'}
       Which meta header to use in layer 'i' in the returned mapcube, either
       from map 'i + offset' (when set to 'ahead') and image 'i' (when set to
       'behind').  When set to 'mean', the ahead meta object is copied, with
       the observation date replaced with the mean of the ahead and behind
       observation dates.

    Returns
    -------
    sunpy.map.MapCube
       A mapcube containing the running difference of the input mapcube.
       The value normalization function used in plotting the data is changed,
       prettifying movies of resultant mapcube.
    """
    # Create a list containing the data for the new map object
    new_mc = []
    for i in range(0, len(mc.maps) - offset):
        new_data = mc[i + offset].data - mc[i].data
        if use_offset_for_meta == 'ahead':
            new_meta = mc[i + offset].meta
            plot_settings = mc[i + offset].plot_settings
        elif use_offset_for_meta == 'behind':
            new_meta = mc[i].meta
            plot_settings = mc[i].plot_settings
        elif use_offset_for_meta == 'mean':
            new_meta = deepcopy(mc[i + offset].meta)
            new_meta['date_obs'] = _mean_time([parse_time(mc[i + offset].date),
                                               parse_time(mc[i].date)])
            plot_settings = mc[i + offset].plot_settings
        else:
            raise ValueError('The value of the keyword "use_offset_for_meta" has not been recognized.')

        # Update the plot scaling.  The default here attempts to produce decent
        # looking images
        new_map = Map(new_data, new_meta)
        new_map.plot_settings = plot_settings
        new_mc.append(new_map)

    # Create the new mapcube and return
    return Map(new_mc, cube=True)
Example #7
0
def add_noise(params, wave_maps, verbose=False):
    """
    Adds simulated noise to a list of maps
    """
    wave_maps_noise = []
    for current_wave_map in wave_maps:
        if verbose:
            print("  * Adding noise to map at " + str(current_wave_map.date))

        noise = noise_random(params, current_wave_map.data.shape)
        struct = noise_structure(params, current_wave_map.data.shape)

        noisy_wave_map = Map(current_wave_map.data + noise + struct,
                                       current_wave_map.meta)
        noisy_wave_map.plot_settings = deepcopy(current_wave_map.plot_settings)
        wave_maps_noise.append(noisy_wave_map)

    return Map(wave_maps_noise, cube=True)
Example #8
0
def clean(params, wave_maps, verbose=False):
    """
    Cleans a list of maps
    """
    wave_maps_clean = []
    for current_wave_map in wave_maps:
        if verbose:
            print("  * Cleaning map at "+str(current_wave_map.date))

        data = np.asarray(current_wave_map.data)
        if params.get("clean_nans"):
            data[np.isnan(data)] = 0.
                
        cleaned_wave_map = Map(data, current_wave_map.meta)
        # cleaned_wave_map.name = current_wave_map.name
        cleaned_wave_map.meta['date-obs'] = current_wave_map.date
        cleaned_wave_map.plot_settings = deepcopy(current_wave_map.plot_settings)
        wave_maps_clean.append(cleaned_wave_map)

    return Map(wave_maps_clean, cube=True)
Example #9
0
def synop_reproject(m, shape_out):
    """
    Reproject a helioprojective map into a synoptic map.
    """
    synop_map_path = synoptic_map_path(m.date.to_datetime())
    if not synop_map_path.exists():
        m.meta['rsun_ref'] = sunpy.sun.constants.radius.to_value(u.m)
        header = synop_header(shape_out, m.date)
        array, footprint = reproject_interp(m,
                                            WCS(header),
                                            shape_out=shape_out)
        new_map = Map((array, header))
        new_map.save(str(synop_map_path))

    print(f'Loading {synop_map_path}')
    new_map = Map(synop_map_path)
    new_map.plot_settings = m.plot_settings
    for key in m.meta:
        if key not in new_map.meta:
            new_map.meta[key] = m.meta[key]
    return new_map
Example #10
0
def create_synoptic_map(endtime):
    """
    Create an AIA synoptic map, using 27 daily AIA 193 maps ending on the
    endtime given. Note that the maps are taken from the start of each day.

    Returns
    -------
    sunpy.map.Map : synoptic map
    """
    shape = [720, 1440]
    data = np.zeros(shape)
    weight_sum = np.zeros(shape)
    nmaps = 23
    for i in range(nmaps)[::-1]:
        dtime = endtime - timedelta(days=i)
        try:
            euvi_map = load_start_of_day_map(dtime)
        except RuntimeError:
            print(f'Failed to load map for {dtime}')
            continue

        aia_synop_map = synop_reproject(euvi_map, shape)
        weights = synop_weights(aia_synop_map)

        aia_data = aia_synop_map.data
        aia_data[np.isnan(aia_data)] = 0
        data += (aia_data * weights)
        weight_sum += weights

    weight_sum[weight_sum == 0] = np.nan
    data /= weight_sum
    meta = aia_synop_map.meta
    meta['date-obs'] = dtime.strftime('%Y-%m-%dT%H:%M:%S')

    synop_map = Map((data, meta))
    synop_map.plot_settings = aia_synop_map.plot_settings
    synop_map.meta['crln_new'] = euvi_map.meta['crln_obs']
    return synop_map
Example #11
0
def transform(params, wave_maps, verbose=False):
    """
    Transform raw data in HG' coordinates to HPC coordinates
    
    HG' = HG, except center at wave epicenter
    """
    solar_rotation_rate = params["rotation"]

    hglt_obs = params["hglt_obs"].to('degree').value
    # crln_obs = params["crln_obs"]
    
    epi_lat = params["epi_lat"].to('degree').value
    epi_lon = params["epi_lon"].to('degree').value

    # Parameters for the HPC co-ordinates
    hpcx_min = params["hpcx_min"].to('arcsec').value
    hpcx_max = params["hpcx_max"].to('arcsec').value
    hpcx_bin = params["hpcx_bin"].to('arcsec').value

    hpcy_min = params["hpcy_min"].to('arcsec').value
    hpcy_max = params["hpcy_max"].to('arcsec').value
    hpcy_bin = params["hpcy_bin"].to('arcsec').value

    hpcx_num = int(round((hpcx_max-hpcx_min)/hpcx_bin))
    hpcy_num = int(round((hpcy_max-hpcy_min)/hpcy_bin))

    # Storage for the HPC version of the input maps
    wave_maps_transformed = []

    # The properties of this map are used in the transform
    smap = wave_maps[0]

    # Basic dictionary version of the HPC map header
    dict_header = {
        "CDELT1": hpcx_bin,
        "NAXIS1": hpcx_num,
        "CRVAL1": hpcx_min,
        "CRPIX1": crpix12_value_for_HPC,
        "CUNIT1": "arcsec",
        "CTYPE1": "HPLN-TAN",
        "CDELT2": hpcy_bin,
        "NAXIS2": hpcy_num,
        "CRVAL2": hpcy_min,
        "CRPIX2": crpix12_value_for_HPC,
        "CUNIT2": "arcsec",
        "CTYPE2": "HPLT-TAN",
        "HGLT_OBS": hglt_obs,
        "CRLN_OBS": smap.carrington_longitude.to('degree').value,
        "DSUN_OBS": sun.sunearth_distance(BASE_DATE.strftime(BASE_DATE_FORMAT)).to('meter').value,
        "DATE_OBS": BASE_DATE.strftime(BASE_DATE_FORMAT),
        "EXPTIME": 1.0
    }
    start_date = smap.date

    # Origin grid, HG'
    lon_grid, lat_grid = wcs.convert_pixel_to_data([smap.data.shape[1], smap.data.shape[0]],
                                                   [smap.scale.x.value, smap.scale.y.value],
                                                   [smap.reference_pixel.x.value, smap.reference_pixel.y.value],
                                                   [smap.reference_coordinate.x.value, smap.reference_coordinate.y.value])

    # Origin grid, HG' to HCC'
    # HCC' = HCC, except centered at wave epicenter
    x, y, z = wcs.convert_hg_hcc(lon_grid, lat_grid,
                                 b0_deg=smap.heliographic_latitude.to('degree').value,
                                 l0_deg=smap.carrington_longitude.to('degree').value,
                                 z=True)

    # Origin grid, HCC' to HCC''
    # Moves the wave epicenter to initial conditions
    # HCC'' = HCC, except assuming that HGLT_OBS = 0
    zxy_p = euler_zyz((z, x, y),
                      (epi_lon, 90.-epi_lat, 0.))

    # Destination HPC grid
    hpcx_grid, hpcy_grid = wcs.convert_pixel_to_data([dict_header['NAXIS1'], dict_header['NAXIS2']],
                                                     [dict_header['CDELT1'], dict_header['CDELT2']],
                                                     [dict_header['CRPIX1'], dict_header['CRPIX2']],
                                                     [dict_header['CRVAL1'], dict_header['CRVAL2']])

    for icwm, current_wave_map in enumerate(wave_maps):
        print(icwm, len(wave_maps))
        # Elapsed time
        td = parse_time(current_wave_map.date) - parse_time(start_date)

        # Update the header
        dict_header['DATE_OBS'] = current_wave_map.date
        dict_header['DSUN_OBS'] = current_wave_map.dsun.to('m').value

        # Origin grid, HCC'' to HCC
        # Moves the observer to HGLT_OBS and adds rigid solar rotation
        total_seconds = u.s * (td.microseconds + (td.seconds + td.days * 24.0 * 3600.0) * 10.0**6) / 10.0**6
        solar_rotation = (total_seconds * solar_rotation_rate).to('degree').value
        zpp, xpp, ypp = euler_zyz(zxy_p,
                                  (0., hglt_obs, solar_rotation))

        # Origin grid, HCC to HPC (arcsec)
        xx, yy = wcs.convert_hcc_hpc(xpp, ypp,
                                     dsun_meters=current_wave_map.dsun.to('m').value)

        # Coordinate positions (HPC) with corresponding map data
        points = np.vstack((xx.ravel(), yy.ravel())).T
        values = np.asarray(deepcopy(current_wave_map.data)).ravel()

        # Solar rotation can push the points off disk and into areas that have
        # nans.  if this is the case, then griddata fails
        # Two solutions
        # 1 - replace all the nans with zeros, in order to get the code to run
        # 2 - the initial condition of zpp.ravel() >= 0 should be extended
        #     to make sure that only finite points are used.

        # 2D interpolation from origin grid to destination grid
        valid_points = np.logical_and(zpp.ravel() >= 0,
                                      np.isfinite(points[:, 0]),
                                      np.isfinite(points[:, 1]))
        grid = griddata(points[valid_points],
                        values[valid_points],
                        (hpcx_grid, hpcy_grid),
                        method="linear")
        transformed_wave_map = Map(grid, MapMeta(dict_header))
        transformed_wave_map.plot_settings = deepcopy(current_wave_map.plot_settings)
        # transformed_wave_map.name = current_wave_map.name
        # transformed_wave_map.meta['date-obs'] = current_wave_map.date
        wave_maps_transformed.append(transformed_wave_map)

    return Map(wave_maps_transformed, cube=True)
Example #12
0
def map_hpc_to_hg_rotate(m,
                         epi_lon=0*u.degree, epi_lat=90*u.degree,
                         lon_bin=1*u.degree, lat_bin=1*u.degree,
                         lon_num=None, lat_num=None, **kwargs):
    """
    Transform raw data in HPC coordinates to HG' coordinates

    HG' = HG, except center at wave epicenter
    """
    x, y = wcs.convert_pixel_to_data([m.data.shape[1], m.data.shape[0]],
                                     [m.scale.x.value, m.scale.y.value],
                                     [m.reference_pixel.x.value, m.reference_pixel.y.value],
                                     [m.reference_coordinate.x.value, m.reference_coordinate.y.value])

    hccx, hccy, hccz = wcs.convert_hpc_hcc(x,
                                           y,
                                           angle_units=m.spatial_units.x,
                                           dsun_meters=m.dsun.to('meter').value,
                                           z=True)

    rot_hccz, rot_hccx, rot_hccy = euler_zyz((hccz,
                                              hccx,
                                              hccy),
                                             (0.,
                                              epi_lat.to('degree').value-90.,
                                              -epi_lon.to('degree').value))

    lon_map, lat_map = wcs.convert_hcc_hg(rot_hccx,
                                          rot_hccy,
                                          b0_deg=m.heliographic_latitude.to('degree').value,
                                          l0_deg=m.heliographic_longitude.to('degree').value,
                                          z=rot_hccz)

    lon_range = (np.nanmin(lon_map), np.nanmax(lon_map))
    lat_range = (np.nanmin(lat_map), np.nanmax(lat_map))

    # This method results in a set of lons and lats that in general does not
    # exactly span the range of the data.
    # lon = np.arange(lon_range[0], lon_range[1], lon_bin)
    # lat = np.arange(lat_range[0], lat_range[1], lat_bin)

    # This method gives a set of lons and lats that exactly spans the range of
    # the data at the expense of having to define values of cdelt1 and cdelt2
    if lon_num is None:
        cdelt1 = lon_bin.to('degree').value
        lon = np.arange(lon_range[0], lon_range[1], cdelt1)
    else:
        nlon = lon_num.to('pixel').value
        cdelt1 = (lon_range[1] - lon_range[0]) / (1.0*nlon - 1.0)
        lon = np.linspace(lon_range[0], lon_range[1], num=nlon)

    if lat_num is None:
        cdelt2 = lat_bin.to('degree').value
        lat = np.arange(lat_range[0], lat_range[1], cdelt2)
    else:
        nlat = lat_num.to('pixel').value
        cdelt2 = (lat_range[1] - lat_range[0]) / (1.0*nlat - 1.0)
        lat = np.linspace(lat_range[0], lat_range[1], num=nlat)

    # Create the grid
    x_grid, y_grid = np.meshgrid(lon, lat)

    ng_xyz = wcs.convert_hg_hcc(x_grid,
                                y_grid,
                                b0_deg=m.heliographic_latitude.to('degree').value,
                                l0_deg=m.heliographic_longitude.to('degree').value,
                                z=True)

    ng_zp, ng_xp, ng_yp = euler_zyz((ng_xyz[2],
                                     ng_xyz[0],
                                     ng_xyz[1]),
                                    (epi_lon.to('degree').value,
                                     90.-epi_lat.to('degree').value,
                                     0.))

    # The function ravel flattens the data into a 1D array
    points = np.vstack((lon_map.ravel(), lat_map.ravel())).T
    values = np.array(m.data).ravel()

    # Get rid of all of the bad (nan) indices (i.e. those off of the sun)
    index = np.isfinite(points[:, 0]) * np.isfinite(points[:, 1])
    # points = np.vstack((points[index,0], points[index,1])).T
    points = points[index]
    values = values[index]

    newdata = griddata(points, values, (x_grid, y_grid), **kwargs)
    newdata[ng_zp < 0] = np.nan

    dict_header = {
        'CDELT1': cdelt1,
        'NAXIS1': len(lon),
        'CRVAL1': lon.min(),
        'CRPIX1': crpix12_value_for_HG,
        'CRPIX2': crpix12_value_for_HG,
        'CUNIT1': "deg",
        'CTYPE1': "HG",
        'CDELT2': cdelt2,
        'NAXIS2': len(lat),
        'CRVAL2': lat.min(),
        'CUNIT2': "deg",
        'CTYPE2': "HG",
        'DATE_OBS': m.meta['date-obs'],
        'DSUN_OBS': m.dsun.to('m').value,
        "CRLN_OBS": m.carrington_longitude.to('degree').value,
        "HGLT_OBS": m.heliographic_latitude.to('degree').value,
        "HGLN_OBS": m.heliographic_longitude.to('degree').value,
        'EXPTIME': m.exposure_time.to('s').value
    }

    # Find out where the non-finites are
    mask = np.logical_not(np.isfinite(newdata))

    # Return a masked array is appropriate
    if mask is None:
        hg = Map(newdata, MapMeta(dict_header))
    else:
        hg = Map(ma.array(newdata, mask=mask), MapMeta(dict_header))

    hg.plot_settings = m.plot_settings
    return hg
Example #13
0
def map_hg_to_hpc_rotate(m,
                         epi_lon=90*u.degree, epi_lat=0*u.degree,
                         xbin=2.4*u.arcsec, ybin=2.4*u.arcsec,
                         xnum=None, ynum=None,
                         solar_information=None, **kwargs):
    """
    Transform raw data in HG' coordinates to HPC coordinates

    HG' = HG, except center at wave epicenter
    """

    # Origin grid, HG'
    lon_grid, lat_grid = wcs.convert_pixel_to_data([m.data.shape[1], m.data.shape[0]],
                                                   [m.scale.x.value, m.scale.y.value],
                                                   [m.reference_pixel.x.value, m.reference_pixel.y.value],
                                                   [m.reference_coordinate.x.value, m.reference_coordinate.y.value])

    # Origin grid, HG' to HCC'
    # HCC' = HCC, except centered at wave epicenter
    x, y, z = wcs.convert_hg_hcc(lon_grid, lat_grid,
                                 b0_deg=m.heliographic_latitude.to('degree').value,
                                 l0_deg=m.carrington_longitude.to('degree').value,
                                 z=True)

    # Origin grid, HCC' to HCC''
    # Moves the wave epicenter to initial conditions
    # HCC'' = HCC, except assuming that HGLT_OBS = 0
    zpp, xpp, ypp = euler_zyz((z,
                               x,
                               y),
                              (epi_lon.to('degree').value,
                               90.-epi_lat.to('degree').value,
                               0.))

    # Add in a solar rotation.  Useful when creating simulated HPC data from
    # HG data.  This code was adapted from the wave simulation code of the
    # AWARE project.
    if solar_information is not None:
        hglt_obs = solar_information['hglt_obs'].to('degree').value
        solar_rotation_value = solar_information['angle_rotated'].to('degree').value
        #print(hglt_obs, solar_rotation_value)
        #print('before', zpp, xpp, ypp)
        zpp, xpp, ypp = euler_zyz((zpp,
                                   xpp,
                                   ypp),
                                  (0.,
                                   hglt_obs,
                                   solar_rotation_value))
        #print('after', zpp, xpp, ypp)
    # Origin grid, HCC to HPC (arcsec)
    # xx, yy = wcs.convert_hcc_hpc(current_wave_map.header, xpp, ypp)
    xx, yy = wcs.convert_hcc_hpc(xpp, ypp,
                                 dsun_meters=m.dsun.to('meter').value)

    # Destination HPC grid
    hpcx_range = (np.nanmin(xx), np.nanmax(xx))
    hpcy_range = (np.nanmin(yy), np.nanmax(yy))

    if xnum is None:
        cdelt1 = xbin.to('arcsec').value
        hpcx = np.arange(hpcx_range[0], hpcx_range[1], cdelt1)
    else:
        nx = xnum.to('pixel').value
        cdelt1 = (hpcx_range[1] - hpcx_range[0]) / (1.0*nx - 1.0)
        hpcx = np.linspace(hpcx_range[1], hpcx_range[0], num=nx)

    if ynum is None:
        cdelt2 = ybin.to('arcsec').value
        hpcy = np.arange(hpcy_range[0], hpcy_range[1], cdelt2)
    else:
        ny = ynum.to('pixel').value
        cdelt2 = (hpcy_range[1] - hpcy_range[0]) / (1.0*ny - 1.0)
        hpcy = np.linspace(hpcy_range[1], hpcy_range[0], num=ny)

    # Calculate the grid mesh
    newgrid_x, newgrid_y = np.meshgrid(hpcx, hpcy)

    #
    # CRVAL1,2 and CRPIX1,2 are calculated so that the co-ordinate system is
    # at the center of the image
    # Note that crpix[] counts pixels starting at 1
    crpix1 = 1 + hpcx.size // 2
    crval1 = hpcx[crpix1 - 1]
    crpix2 = 1 + hpcy.size // 2
    crval2 = hpcy[crpix2 - 1]
    dict_header = {
        "CDELT1": cdelt1,
        "NAXIS1": len(hpcx),
        "CRVAL1": crval1,
        "CRPIX1": crpix1,
        "CUNIT1": "arcsec",
        "CTYPE1": "HPLN-TAN",
        "CDELT2": cdelt2,
        "NAXIS2": len(hpcy),
        "CRVAL2": crval2,
        "CRPIX2": crpix2,
        "CUNIT2": "arcsec",
        "CTYPE2": "HPLT-TAN",
        "HGLT_OBS": m.heliographic_latitude.to('degree').value,  # 0.0
        # "HGLN_OBS": 0.0,
        "CRLN_OBS": m.carrington_longitude.to('degree').value,  # 0.0
        'DATE_OBS': m.meta['date-obs'],
        'DSUN_OBS': m.dsun.to('m').value,
        'EXPTIME': m.exposure_time.to('s').value
    }

    # Coordinate positions (HPC) with corresponding map data
    points = np.vstack((xx.ravel(), yy.ravel())).T
    values = np.asarray(deepcopy(m.data)).ravel()

    # Solar rotation can push the points off disk and into areas that have
    # nans.  if this is the case, then griddata fails
    # Two solutions
    # 1 - replace all the nans with zeros, in order to get the code to run
    # 2 - the initial condition of zpp.ravel() >= 0 should be extended
    #     to make sure that only finite points are used.

    # 2D interpolation from origin grid to destination grid
    valid_points = np.logical_and(zpp.ravel() >= 0,
                                  np.isfinite(points[:, 0]),
                                  np.isfinite(points[:, 1]))
    # 2D interpolation from origin grid to destination grid
    grid = griddata(points[valid_points],
                    values[valid_points],
                    (newgrid_x, newgrid_y), **kwargs)

    # Find out where the non-finites are
    mask = np.logical_not(np.isfinite(grid))

    # Return a masked array is appropriate
    if mask is None:
        hpc = Map(grid, MapMeta(dict_header))
    else:
        hpc = Map(ma.array(grid, mask=mask), MapMeta(dict_header))

    hpc.plot_settings = m.plot_settings
    return hpc
Example #14
0
def processing(mc, radii=[[11, 11]*u.degree],
               clip_limit=None,
               histogram_clip=[0.0, 99.],
               func=np.sqrt,
               three_d=False,
               develop=None):
    """
    Image processing steps used to isolate the EUV wave from the data.  Use
    this part of AWARE to perform the image processing steps that segment
    propagating features that brighten new pixels as they propagate.

    Parameters
    ----------

    mc : sunpy.map.MapCube
    radii : list of lists. Each list contains a pair of numbers that describe the
    radius of the median filter and the closing operation
    histogram_clip
    clip_limit :
    func :
    three_d :
    develop :

    """

    # Define the disks that will be used on all the images.
    # The first disk in each pair is the disk that is used by the median
    # filter.  The second disk is used by the morphological closing
    # operation.
    disks = []
    for r in radii:
        e1 = (r[0]/mc[0].scale.x).to('pixel').value  # median circle radius - across wavefront
        e3 = (r[1]/mc[0].scale.x).to('pixel').value  # closing circle width - across wavefront
        disks.append([disk(e1), disk(e3)])

    # For the dump images
    rstring = ''
    for r in radii:
        z = '%i_%i__' % (r[0].value, r[1].value)
        rstring += z

    # Calculate the persistence
    new = mapcube_tools.persistence(mc)
    if develop is not None:
        develop_filepaths = {}
        filename = develop['img'] + '_persistence_mc.mp4'
        print('\nWriting persistence movie to {:s}'.format(filename))
        aware_utils.write_movie(new, filename)

        filename = develop['dat'] + '_persistence_mc.pkl'
        develop_filepaths['persistence_mc'] = filename
        print('\nWriting persistence mapcube to {:s}'.format(filename))
        f = open(filename, 'wb')
        pickle.dump(new, f)
        f.close()

    # Calculate the running difference
    new = mapcube_tools.running_difference(new)
    if develop is not None:
        filename = develop['img'] + '_rdpi_mc.mp4'
        print('\nWriting RDPI movie to {:s}'.format(filename))
        aware_utils.write_movie(new, filename)

        filename = develop['dat'] + '_rdpi_mc.pkl'
        develop_filepaths['rdpi_mc'] = filename
        print('\nWriting RDPI mapcube to {:s}'.format(filename))
        f = open(filename, 'wb')
        pickle.dump(new, f)
        f.close()

    # Storage for the processed mapcube.
    new_mc = []

    # Only want positive differences, so everything lower than zero
    # should be set to zero
    mc_data = func(new.as_array())
    mc_data[mc_data < 0.0] = 0.0

    # Clip the data to be within a range, and then normalize it.
    if clip_limit is None:
        cl = np.nanpercentile(mc_data, histogram_clip)
    mc_data[mc_data > cl[1]] = cl[1]
    mc_data = (mc_data - cl[0]) / (cl[1]-cl[0])

    # Get rid of NaNs
    nans_here = np.logical_not(np.isfinite(mc_data))
    nans_replaced = deepcopy(mc_data)
    nans_replaced[nans_here] = 0.0

    # Clean the data to isolate the wave front.  Use three dimensional
    # operations from scipy.ndimage.  This approach should get rid of
    # more noise and have better continuity in the time-direction.
    final = np.zeros_like(mc_data, dtype=np.float32)

    # Do the cleaning and isolation operations on multiple length-scales,
    # and add up the final results.
    nr = deepcopy(nans_replaced)
    # Use three-dimensional filters
    for j, d in enumerate(disks):
        pancake = np.swapaxes(np.tile(d[0], (3, 1, 1)), 0, -1)

        print('\n', nr.shape, pancake.shape, '\n', 'started median filter.')
        nr = _apply_median_filter(nr, d[0], three_d)
        if develop is not None:
            filename = develop['dat'] + '_np_median_dc_{:n}.npy'.format(j)
            develop_filepaths['np_median_dc'] = filename
            print('\nWriting results of median filter to {:s}'.format(filename))
            f = open(filename, 'wb')
            np.save(f, nr)
            f.close()

        print(' started grey closing.')
        nr = _apply_closing(nr, d[0], three_d)
        if develop is not None:
            filename = develop['dat'] + '_np_closing_dc_{:n}.npy'.format(j)
            develop_filepaths['np_closing_dc'] = filename
            print('\nWriting results of closing to {:s}'.format(filename))
            f = open(filename, 'wb')
            np.save(f, nr)
            f.close()

        # Sum all the
        final += nr*1.0

    # If in development mode, now dump out the meta's and the nans
    if develop:
        filename = develop['dat'] + '_np_meta.pkl'
        develop_filepaths['np_meta'] = filename
        print('\nWriting all meta data information to {:s}'.format(filename))
        f = open(filename, 'wb')
        pickle.dump(mc.all_meta(), f)
        f.close()
        filename = develop['dat'] + '_np_nans.npy'
        develop_filepaths['np_nans'] = filename
        print('\nWriting all nans to {:s}'.format(filename))
        f = open(filename, 'wb')
        np.save(f, nans_here)
        f.close()

    # Create the list that will be turned in to a mapcube
    for i, m in enumerate(new):
        new_map = Map(ma.masked_array(final[:, :, i],
                                          mask=nans_here[:, :, i]),
                          m.meta)
        new_map.plot_settings = deepcopy(m.plot_settings)
        new_mc.append(new_map)

    # Return the cleaned mapcube
    if develop:
        return Map(new_mc, cube=True), develop_filepaths
    else:
        return Map(new_mc, cube=True)
Example #15
0
def simulate_raw(params, steps, verbose=False):
    """
    Simulate data in HG' coordinates
    
    HG' = HG, except center at wave epicenter
    """
    cadence = params["cadence"]
    direction = 180. + params["direction"].to('degree').value
    
    width_coeff = prep_coeff(params["width"])
    wave_thickness_coeff = prep_coeff(params["wave_thickness"])
    wave_normalization_coeff = prep_coeff(params["wave_normalization"])
    speed_coeff = prep_speed_coeff(params["speed"], params["acceleration"])

    lat_min = params["lat_min"].to('degree').value
    lat_max = params["lat_max"].to('degree').value
    lat_bin = params["lat_bin"].to('degree').value
    lon_min = params["lon_min"].to('degree').value
    lon_max = params["lon_max"].to('degree').value
    lon_bin = params["lon_bin"].to('degree').value

    # This roundabout approach recalculates lat_bin and lon_bin to produce
    # equally sized bins to exactly span the min/max ranges
    lat_num = int(round((lat_max-lat_min)/lat_bin))
    lat_edges, lat_bin = np.linspace(lat_min, lat_max, lat_num+1, retstep=True)

    lon_num = int(round((lon_max-lon_min)/lon_bin))
    lon_edges, lon_bin = np.linspace(lon_min, lon_max, lon_num+1, retstep=True)

    # Propagates from 90. down to lat_min, irrespective of lat_max
    p = np.poly1d([speed_coeff[2]/3., speed_coeff[1]/2., speed_coeff[0],
                   -(90.-lat_min)])
    # p = np.poly1d([0.0, speed_coeff[1], speed_coeff[2]/2.,
    #               -(90.-lat_min)])
    # Will fail if wave does not propagate all the way to lat_min
    # duration = p.r[np.logical_and(p.r.real > 0, p.r.imag == 0)][0]
    
    # steps = int(duration/cadence)+1
    # if steps > params["max_steps"]:
    #    steps = params["max_steps"]
    
    # Maybe used np.poly1d() instead to do the polynomial calculation?
    time = params["start_time_offset"] + np.arange(steps)*cadence
    time_powers = np.vstack((time**0, time**1, time**2))
    
    width = np.dot(width_coeff, time_powers).ravel()
    wave_thickness = np.dot(wave_thickness_coeff, time_powers).ravel()
    wave_normalization = np.dot(wave_normalization_coeff, time_powers).ravel()

    #Position
    #Propagates from 90., irrespective of lat_max
    wave_peak = 90.-(p(time)+(90.-lat_min))

    out_of_bounds = np.logical_or(wave_peak < lat_min, wave_peak > lat_max)
    if out_of_bounds.any():
        steps = np.where(out_of_bounds)[0][0]

    # Storage for the wave maps
    wave_maps = []

    # Header of the wave maps
    dict_header = {
        "CDELT1": lon_bin,
        "NAXIS1": lon_num,
        "CRVAL1": lon_min,
        "CRPIX1": crpix12_value_for_HG,
        "CUNIT1": "deg",
        "CTYPE1": "HG",
        "CDELT2": lat_bin,
        "NAXIS2": lat_num,
        "CRVAL2": lat_min,
        "CRPIX2": crpix12_value_for_HG,
        "CUNIT2": "deg",
        "CTYPE2": "HG",
        "HGLT_OBS": 0.0,  # (sun.heliographic_solar_center(BASE_DATE))[1],  # the value of HGLT_OBS from Earth at the given date
        "CRLN_OBS": 0.0,  # (sun.heliographic_solar_center(BASE_DATE))[0],  # the value of CRLN_OBS from Earth at the given date
        "DSUN_OBS": sun.sunearth_distance(BASE_DATE.strftime(BASE_DATE_FORMAT)).to('m').value,
        "DATE_OBS": BASE_DATE.strftime(BASE_DATE_FORMAT),
        "EXPTIME": 1.0
    }

    if verbose:
        print("  * Simulating "+str(steps)+" raw maps.")

    for istep in range(0, steps):

        # Current datetime
        current_datetime = BASE_DATE + datetime.timedelta(seconds=time[istep])

        # Update the header to set the correct observation time and earth-sun
        # distance
        dict_header['DATE_OBS'] = current_datetime.strftime(BASE_DATE_FORMAT)

        # Update the Earth-Sun distance
        dict_header['DSUN_OBS'] = sun.sunearth_distance(dict_header['DATE_OBS']).to('m').value

        # Update the heliographic latitude
        dict_header['HGLT_OBS'] = 0.0  # (sun.heliographic_solar_center(dict_header['DATE_OBS']))[1].to('degree').value

        # Update the heliographic longitude
        dict_header['CRLN_OBS'] = 0.0  # (sun.heliographic_solar_center(dict_header['DATE_OBS']))[0].to('degree').value

        # Gaussian profile in longitudinal direction
        # Does not take into account spherical geometry (i.e., change in area
        # element)
        if wave_thickness[istep] <= 0:
            print("  * ERROR: wave thickness is non-physical!")
        z = (lat_edges-wave_peak[istep])/wave_thickness[istep]
        wave_1d = wave_normalization[istep]*(ndtr(np.roll(z, -1))-ndtr(z))[0:lat_num]
        wave_1d /= lat_bin
        
        wave_lon_min = direction-width[istep]/2
        wave_lon_max = direction+width[istep]/2

        if width[istep] < 360.:
            # Do these need to be np.remainder() instead?
            wave_lon_min_mod = ((wave_lon_min+180.) % 360.)-180.
            wave_lon_max_mod = ((wave_lon_max+180.) % 360.)-180.
            
            index1 = np.arange(lon_num+1)[np.roll(lon_edges, -1) > min(wave_lon_min_mod, wave_lon_max_mod)][0]
            index2 = np.roll(np.arange(lon_num+1)[lon_edges < max(wave_lon_min_mod, wave_lon_max_mod)], 1)[0]
    
            wave_lon = np.zeros(lon_num)
            wave_lon[index1+1:index2] = 1.
            # Possible weirdness if index1 == index2
            wave_lon[index1] += (lon_edges[index1+1]-min(wave_lon_min_mod, wave_lon_max_mod))/lon_bin
            wave_lon[index2] += (max(wave_lon_min_mod, wave_lon_max_mod)-lon_edges[index2])/lon_bin
            
            if wave_lon_min_mod > wave_lon_max_mod:
                wave_lon = 1.-wave_lon
        else:
            wave_lon = np.ones(lon_num)
        
        # Could be accomplished with np.dot() without casting as matrices?
        wave = np.mat(wave_1d).T*np.mat(wave_lon)

        # Create the new map
        new_map = Map(wave, MapMeta(dict_header))
        new_map.plot_settings = {'cmap': cm.gray,
                                 'norm': ImageNormalize(stretch=LinearStretch()),
                                 'interpolation': 'nearest',
                                 'origin': 'lower'
                                 }
        # Update the list of maps
        wave_maps += [new_map]

    return Map(wave_maps, cube=True)
Example #16
0
def create_synoptic_map(endtime, aia_maps={}):
    """
    Create an AIA synoptic map, using 25 daily AIA 193 maps ending on the
    endtime given. Note that the maps are taken from the start of each day.

    Parameters
    ----------
    endtime :
    aia_maps : dict
        A mapping of `datetime.date` to `sunpy.map.GenericMap`.

    Returns
    -------
    sunpy.map.Map : synoptic map
    """
    if endtime > datetime.now():
        endtime = datetime.now()
    shape = [720, 1440]
    data = np.zeros(shape)
    weight_sum = np.zeros(shape)
    nmaps = 23

    dtimes = [endtime - timedelta(days=i) for i in range(nmaps)[::-1]]
    # Fill up aia_maps
    for dtime in dtimes:
        if dtime.date() in aia_maps:
            continue
        try:
            aia_maps[dtime.date()] = synop_reproject(dtime, shape)
        except ValueError as e:
            warnings.warn(f'Map for {dtime} failed to load')

    # Add up all the reprojected maps
    for dtime in dtimes:
        if dtime.date() in aia_maps:
            aia_synop_map = aia_maps[dtime.date()]
        else:
            warnings.warn(f'Missing map for {dtime}')
            continue
        weights = synop_weights(aia_synop_map)

        aia_data = aia_synop_map.data * weights
        weights[np.isnan(aia_data)] = 0
        aia_data[np.isnan(aia_data)] = 0
        data += aia_data
        weight_sum += weights

    weight_sum[weight_sum == 0] = np.nan
    data /= weight_sum

    meta = aia_synop_map.meta
    meta['date-obs'] = dtime.strftime('%Y-%m-%dT%H:%M:%S')
    data = np.roll(data, data.shape[1] // 2, axis=1)
    meta['crval1'] = 180
    meta['telescop'] = 'sdo'
    meta['instrume'] = 'AIA'
    meta['detector'] = 'AIA'
    meta['waveunit'] = 'angstrom'
    meta['wavelnth'] = 193

    synop_map = Map((data, meta))
    synop_map.plot_settings = aia_synop_map.plot_settings
    synop_map.plot_settings['vmin'] = 0
    synop_map.plot_settings['vmax'] = 2000
    # synop_map.meta['crln_new'] = aia_map.meta['crln_obs']
    return synop_map
Example #17
0
points = np.vstack((xx.ravel(), yy.ravel())).T
values = np.asarray(deepcopy(hg.data)).ravel()

# Solar rotation can push the points off disk and into areas that have
# nans.  if this is the case, then griddata fails
# Two solutions
# 1 - replace all the nans with zeros, in order to get the code to run
# 2 - the initial condition of zpp.ravel() >= 0 should be extended
#     to make sure that only finite points are used.

# 2D interpolation from origin grid to destination grid
valid_points = np.logical_and(zpp.ravel() >= 0,
                              np.isfinite(points[:, 0]),
                              np.isfinite(points[:, 1]))
# 2D interpolation from origin grid to destination grid
grid = griddata(points[valid_points],
                values[valid_points],
                (newgrid_x, newgrid_y))

# Find out where the non-finites are
mask = np.logical_not(np.isfinite(grid))

# Return a masked array is appropriate
if mask is None:
    hpc2 = Map(grid, MapMeta(dict_header))
else:
    hpc2 = Map(ma.array(grid, mask=mask), MapMeta(dict_header))

hpc2.plot_settings = hg.plot_settings
hpc2.peek(aspect='auto')