Ejemplo n.º 1
0
def SelectionUpdateWVT(event):
    if event.final:
        calculateCWT.button_type = "warning"
        # Get x and y points of selection region
        lassoRegionX = event.geometry['x']
        lassoRegionY = event.geometry['y']

        # Get current spectrogram data
        wvpower = wvt_ds.data['image'][0]

        # Find points in wavelet power spectrogram inside selection region
        X, Y = np.meshgrid(np.arange(wvpower.shape[1]),
                           np.arange(wvpower.shape[0]))
        powerPts = np.vstack((Y.flatten(), X.flatten())).T
        polygon = [(int(row), int(col))
                   for row, col in zip(lassoRegionY, lassoRegionX)
                   ]  #flip X and Y for row/col orientation
        path = matplotlib.path.Path(polygon)
        selected = path.contains_points(powerPts)
        selected = selected.reshape((wvpower.shape[0], wvpower.shape[1]))

        # Update image data source
        #wvt_ds.data = {'image': [np.multiply(selected, wvpower)], 'dw': [wvpower.shape[1]], 'dh': [wvpower.shape[0]]}
        wvt_ds.data['image'] = [np.multiply(selected, wvpower)]
        calculateCWT.button_type = "success"
    def segment(self):

        print('Segmenting plane: ', self.plane_ind)

        x = np.arange(0, 1024)
        y = np.arange(0, 1024)
        xv, yv = np.meshgrid(x, y, indexing='xy')
        points = np.hstack((xv.reshape((-1, 1)), yv.reshape((-1, 1))))

        path = matplotlib.path.Path(self.img.pt_lst)
        mask = path.contains_points(points)
        mask = mask.reshape(1024, 1024).astype('float64')

        self.data_masked[self.plane_ind, :, :][mask == 0] = 0

        self.mask_arr[self.plane_ind] = mask

        self.img.setImage(self.data_masked[self.plane_ind, :, :],
                          autoLevels=False,
                          lut=self.lut,
                          levels=[0, 255])

        self.update_text_box()

        self.img.pt_lst = []
 def __getitem__(self, idx):
     
     idx = idx % len(self.img_fns)
     
     img_fn = self.img_fns[idx]
     ann_fn = self.ann_fns[idx]
     
     img = Image.open(self.data_root + img_fn)
     
     with open(self.anns_root + ann_fn) as f:
         # print(ann_fn)
         data_dict = {}
         annotations = json.load(f)
         # print(len(annotations))
         for annotation in annotations:
             h, w = 1080, 1920
             mask_attrs_list = list(
                             filter(
                                 lambda x: True, #x['region_attributes']['name'].strip() == 'smoke', 
                                 annotation['regions']
                             )
                         )
             # print(mask_attrs_list)
             # print(annotation)
             mask_attrs = mask_attrs_list[0]['shape_attributes']
             
             pts = np.array(list(zip(mask_attrs['all_points_x'], mask_attrs['all_points_y'])))
             x, y = np.meshgrid(np.arange(w), np.arange(h))
             x, y = x.flatten(), y.flatten()
             meshgrid = np.vstack((x,y)).T
             path = Path(pts)
             mask = path.contains_points(meshgrid)
             mask = mask.reshape((h,w)).astype(np.float32)
             data_dict['mask'] = mask
         assert (data_dict['mask'].sum() > 0), "No points in mask"
     
     mask = torch.from_numpy(mask)
     mask = tf.to_pil_image(mask)
     mask = tf.resize(mask, (512, 1024))
     mask = tf.to_tensor(mask)
     img = tf.resize(img, (512, 1024))
     img = tf.to_tensor(img).squeeze()
     cur_img_mean = torch.mean(img, axis=(1,2))
     for c in range(3):
         img[c] -= (self.other_data_mean[c]- cur_img_mean[c])
     
     img = img
     output_dict = {
         "idx": idx,
         "input_img": img,
         "target_mask": mask
     } 
     return output_dict
    def __getitem__(self, idx):
        
        idx = idx % len(self.img_fns)
        
        img_fn = self.img_fns[idx]
        ann_fn = self.ann_fns[idx]
        
        img = Image.open(self.data_root + img_fn)
        
        with open(self.anns_root + ann_fn) as f:
            # print(img_fn)
            # print(ann_fn)
            data_dict = {}
            annotations = json.load(f)
            # print(len(annotations))
            for annotation in annotations:
                h, w = 1080, 1920
                # h, w = self.image_size
                mask_attrs_list = list(
                                filter(
                                    lambda x: True,#'smoke' in x['region_attributes']['name'].strip(), 
                                    annotation['regions']
                                )
                            )
                # print(mask_attrs_list)
                # print(annotation)
                mask_attrs = mask_attrs_list[0]['shape_attributes']
                
                pts = np.array(list(zip(mask_attrs['all_points_x'], mask_attrs['all_points_y'])))
                # print(len(mask_attrs['all_points_x']), len(mask_attrs['all_points_y']), pts.shape)
                x, y = np.meshgrid(np.arange(w), np.arange(h))
                x, y = x.flatten(), y.flatten()
                meshgrid = np.vstack((x,y)).T
                path = Path(pts)
                mask = path.contains_points(meshgrid)
                mask = mask.reshape((h,w)).astype(np.float32)
                # mask = np.zeros((h,w))
                # cv2.fillPoly(mask, [pts], (255))
                data_dict['mask'] = mask
            assert (np.count_nonzero(mask) > 0), "No points in mask"
        
        mask = tf.to_pil_image(mask)
        mask = tf.resize(mask, self.image_size)
        mask = tf.to_tensor(mask)
        img = tf.resize(img, self.image_size)
        img = tf.to_tensor(img)
        # print("img.shape after unsqueezing ",img.shape)
        img = (img - img.min())*255/(img.max()-img.min())
        img = img[[2,1,0]]

        for c in range(3):
            img[c,:,:] -= self.img_mean[c]
        return img, mask, str(idx)
Ejemplo n.º 5
0
def mask_inpolygon(hdu, polygon, axis=('x', 'y')):
    logger.info('(mask_inpolygon) polygon={polygon}, axis={axis}'.format(**locals()))
    logger.info('(mask_inpolygon) start calculation')
    wcs = astropy.wcs.WCS(hdu.header)
    polygon_p = wcs.all_world2pix(polygon, 0)
    path = matplotlib.path.Path(polygon_p)
    ax1 = numpy.arange(hdu.data.shape[1])
    ax2 = numpy.arange(hdu.data.shape[0])
    ax12 = numpy.array([_.ravel() for _ in numpy.meshgrid(ax1, ax2)]).T
    mask = path.contains_points(ax12).reshape(hdu.data.shape).astype(int)
    logger.info('(mask_inpolygon) done')
    
    new_header = hdu.header.copy()
    new_hdu = astropy.io.fits.PrimaryHDU(mask, new_header)
    return new_hdu
Ejemplo n.º 6
0
def reduced_bz_E_mask(data, S, e_cut, scale_zone=False):
    symmetry_points, _ = data.S.symmetry_points()
    symmetry = bz_symmetry(data.S.iter_own_symmetry_points)
    point_names = _POINT_NAMES_FOR_SYMMETRY[symmetry]
    # bz_dims = tuple(d for d in data.dims if d in list(symmetry_points.values())[0][0].keys())

    symmetry_points, _ = data.S.symmetry_points()
    points = {k: v[0] for k, v in symmetry_points.items() if k in point_names}
    coords_by_point = {
        k: np.array(
            [v.get(d, 0) for d in data.dims if d in v.keys() or d == 'eV'])
        for k, v in points.items()
    }

    dx_to = reduced_bz_axis_to(data, S, include_E=True)
    if scale_zone:
        dx_to = dx_to * 3
    dE = np.array([0 if d != 'eV' else e_cut for d in data.dims])

    poly_points = np.array([
        coords_by_point['G'],
        coords_by_point['G'] + dx_to,
        coords_by_point['G'] + dx_to + dE,
        coords_by_point['G'] + dE,
    ])

    skip_col = None
    for i in range(poly_points.shape[1]):
        if np.all(poly_points[:, i] == poly_points[0, i]):
            skip_col = i

    assert skip_col is not None
    selector_val = poly_points[0, skip_col]
    poly_points = np.concatenate(
        (poly_points[:, 0:skip_col], poly_points[:, skip_col + 1:]), axis=1)

    selector = dict()
    selector[data.dims[skip_col]] = selector_val
    sdata = data.sel(**selector, method='nearest')

    path = matplotlib.path.Path(poly_points)
    grid = np.array([
        a.ravel() for a in np.meshgrid(*[data.coords[d] for d in sdata.dims],
                                       indexing='ij')
    ]).T
    mask = path.contains_points(grid)
    mask = np.reshape(mask, sdata.data.shape)
    return mask
Ejemplo n.º 7
0
def mask_inpolygon(hdu, polygon, axis=('x', 'y')):
    logger.info(
        '(mask_inpolygon) polygon={polygon}, axis={axis}'.format(**locals()))
    logger.info('(mask_inpolygon) start calculation')
    wcs = astropy.wcs.WCS(hdu.header)
    polygon_p = wcs.all_world2pix(polygon, 0)
    path = matplotlib.path.Path(polygon_p)
    ax1 = numpy.arange(hdu.data.shape[1])
    ax2 = numpy.arange(hdu.data.shape[0])
    ax12 = numpy.array([_.ravel() for _ in numpy.meshgrid(ax1, ax2)]).T
    mask = path.contains_points(ax12).reshape(hdu.data.shape).astype(int)
    logger.info('(mask_inpolygon) done')

    new_header = hdu.header.copy()
    new_hdu = astropy.io.fits.PrimaryHDU(mask, new_header)
    return new_hdu
    def query(self, args):

        c, f, v = args

        df = self._main_model.object_data
        roi_class = False

        if (c == "Size in pixels"):
            try:
                v = float(v)
            except ValueError:
                return None

            if f == "<":
                return df[operator.lt(df[c], v)]
            elif f == ">":
                return df[operator.gt(df[c], v)]
            elif f == "<=":
                return df[operator.le(df[c], v)]
            elif f == ">=":
                return df[operator.ge(df[c], v)]
            elif f == "=":
                return df[operator.eq(df[c], v)]
        else:

            if c in self._main_model.rois.keys():

                path = self.roi_to_path(self._main_model.rois[c][1])

                x = df['Center of the object_1'].values
                y = df['Center of the object_0'].values
                centers = np.transpose(np.vstack([x, y]))
                contains_centers = path.contains_points(centers)
                roi_class = True

            if (f == "INCLUDE") or (f == "="):
                if roi_class:
                    return df.loc[contains_centers]
                else:
                    return df[df['Predicted Class'] == c]
            elif f == "NOT INCLUDE":
                if roi_class:
                    return df.loc[[not x for x in contains_centers]]
                return df[df['Predicted Class'] != c]

        return None
Ejemplo n.º 9
0
def pointsInsidePolygon(points, polygon):
    """
    Return for each point if it lies inside the given polygon.
    
    :param points: shape (n,2)
    :param polygon: unclosed, shape (n,2)
    :rtype: boolean ndarray of shape (n,)
    """
    path = matplotlib.path.Path(polygon)
    try:
        # only in mpl >= 1.2.0
        isInside = path.contains_points(points)
    except AttributeError:
        # we are on an older version, try nxutils instead
        # nxutils is deprecated in mpl >= 1.2.0
        isInside = matplotlib.nxutils.points_inside_poly(points, polygon)
    return isInside
Ejemplo n.º 10
0
def pointsInsidePolygon(points, polygon):
    """
    Return for each point if it lies inside the given polygon.
    
    :param points: shape (n,2)
    :param polygon: unclosed, shape (n,2)
    :rtype: boolean ndarray of shape (n,)
    """
    path = matplotlib.path.Path(polygon)
    try:
        # only in mpl >= 1.2.0        
        isInside = path.contains_points(points)
    except AttributeError:
        # we are on an older version, try nxutils instead
        # nxutils is deprecated in mpl >= 1.2.0
        isInside = matplotlib.nxutils.points_inside_poly(points, polygon)
    return isInside
Ejemplo n.º 11
0
def reduced_bz_mask(data, **kwargs):
    symmetry_points, _ = data.S.symmetry_points()
    bz_dims = tuple(d for d in data.dims
                    if d in list(symmetry_points.values())[0][0].keys())

    poly_points = reduced_bz_poly(data, **kwargs)
    extra_dims_shape = tuple(
        len(data.coords[d]) for d in data.dims if d in bz_dims)

    path = matplotlib.path.Path(poly_points)
    grid = np.array([
        a.ravel()
        for a in np.meshgrid(*[data.coords[d] for d in bz_dims], indexing='ij')
    ]).T
    mask = path.contains_points(grid)
    mask = np.reshape(mask, extra_dims_shape)

    return mask
Ejemplo n.º 12
0
    def place_on_subdomain(self, domain: np.ndarray):
        """
        Places this shape in the center of the supplied (sub)domain
        :param domain: 2D numpy array of type np.bool
        :return: A copy of the domain, with this shape placed on it, centered horizontally and vertically
        """
        geometry = self.get_shape_geometry(domain)
        xy_stacked = np.array(geometry.boundary.xy).T
        hull = scipy.spatial.ConvexHull(xy_stacked)

        # Map hull onto a boolean array.
        path = matplotlib.path.Path(xy_stacked[hull.vertices])
        x, y = np.meshgrid(np.arange(domain.shape[0]),
                           np.arange(domain.shape[1]))
        x, y = x.flatten(), y.flatten()
        points = np.vstack((y, x)).T
        mask = path.contains_points(points)
        mask = mask.reshape(domain.shape)

        return mask
 def __getitem__(self, idx):
     
     idx = idx % len(self.img_fns)
     
     img_fn = self.img_fns[idx]
     ann_fn = self.ann_fns[idx]
     
     img = Image.open(self.data_root + img_fn)
     
     with open(self.anns_root + ann_fn) as f:
         data_dict = {}
         annotations = json.load(f)
         for annotation in annotations:
             h, w = 1080, 1920
             mask_attrs = list(filter(lambda x: x['region_attributes']['name'].strip() in ['smoke', 'smoke1', ''], annotation['regions']))[0]['shape_attributes']
             pts = np.array(list(zip(mask_attrs['all_points_x'], mask_attrs['all_points_y'])))
             x, y = np.meshgrid(np.arange(w), np.arange(h))
             x, y = x.flatten(), y.flatten()
             meshgrid = np.vstack((x,y)).T
             path = Path(pts)
             mask = path.contains_points(meshgrid)
             mask = mask.reshape((h,w)).astype(np.float32)
             data_dict['mask'] = mask
         assert (data_dict['mask'].sum() > 0), "No points in mask"
     
     mask = tf.to_pil_image(mask)
     mask = tf.resize(mask, (512, 1024))
     mask = tf.to_tensor(mask)
     img = tf.resize(img, (512, 1024))
     img = tf.to_tensor(img)
     
     output_dict = {
         "idx": idx,
         "input_img": img,
         "target_mask": mask
     } 
     return output_dict
Ejemplo n.º 14
0
def pvtu_timeseries_flowline(x,y,DIR,fileprefix,variables,inputsdir='none',layer='surface',debug=False,t1=1,t2=np.Inf):

  from scipy.interpolate import griddata
  import numpy as np
  import matplotlib.path

  # First get number of timesteps
  files = os.listdir(DIR)

  totsteps = 0
  for file in files:
    if file.startswith(fileprefix) and file.endswith('.pvtu'):
      timestep = int(file[len(fileprefix):-5])
      numfilelen = len(file)-len('.pvtu')-len(fileprefix)
      if timestep > totsteps:
        totsteps = timestep
    elif file.startswith(fileprefix) and file.endswith('.pvtu.tar.gz'):
      timestep = int(file[-16:-12])
      numfilelen = len(file)-len('.pvtu.tar.gz')-len(fileprefix)
      if timestep > totsteps:
        totsteps = timestep     
  if totsteps == 0:
    sys.exit("Check that file "+DIR+fileprefix+" actually exists.")
  
  if t2 > totsteps:
    t2 = totsteps
  
  print "Loading "+str(t2-t1+1)+" out of "+str(totsteps)+" timesteps"

  if layer == 'surface':
    freesurfacevar = 'zs top'
  elif layer == 'bed' and 'zs bottom' not in variables:
    freesurfacevar = 'zs bottom'
  if freesurfacevar not in variables:
    variables.append(freesurfacevar)

  if not(inputsdir == 'none'):
    mesh_extent_x = np.loadtxt(inputsdir+'/mesh_timeseries_x.dat')
    mesh_extent_y = np.loadtxt(inputsdir+'/mesh_timeseries_y.dat')

  for i in range(0,t2-t1+1):
    t = i+t1
    # Get filename
    pvtufile = '{0}{2:0{1}d}{3}'.format(fileprefix,numfilelen,t,'.pvtu')
    if debug:
      print "Loading file "+pvtufile
    # Get data

    if i==0:
      reader = 'none'
    data,reader = pvtu_file(DIR+pvtufile,variables,reader=reader,returnreader=True)
    surf = data[data[freesurfacevar] != 0]
    del data
    # If first timestep, set up output variable name
    if i==0:
      varnames = list(data.dtype.names)
      varnames.remove('Node Number')
      types = []
      for var in varnames:
        types.append(np.float64)
      dataflow = np.zeros([len(x),t2-t1+1], dtype=zip(varnames,types)) 
      for var in varnames:
        dataflow[var][:,i] = float('nan')
    
    if not(inputdirs == 'none'):
      ind = np.where(mesh_extent_x[:,t-1] != 0)
      path = matplotlib.path.Path(np.column_stack([mesh_extent_x[:,t-1],mesh_extent_y[:,t-1]]))
      inmesh = path.contains_points(np.column_stack([x,y]))
    else:
      inmesh = np.arange(0,len(x))

    for var in varnames:
      dataflow[var][inmesh,i] = griddata((surf['x'],surf['y']),surf[var],(x[inmesh],y[inmesh]))
    
    del surf
     
  return dataflow
Ejemplo n.º 15
0
    def streakMask(self, streak_file, addWidth=0., addLength=100., maxExtrapolate=0):
        '''
        Produce a list of pixels in the image that should be masked for
        streaks in the input table.  streaktab is the output table of new
        streaks to add image is a FITS HDU, with header and image data
        addWidth is additional number of pixels to add to half-width
        addLength is length added to each end of streak (pixels)

        Returns:
        ypix, xpix: 1d arrays with indices of affected pixels
        nStreaks: number of new streaks masked
        '''

        # Read the streaks table first
        try:
            tab = fitsio.FITS(streak_file)
            streaktab = tab[1].read()
        except:
            logger.error('Could not read streak file {:s}'.format(streak_file))
            sys.exit(1)

        image_header = self.sci.header
        image_data = self.sci.data
        # Pixscale in degrees
        pixscale = astrometry.get_pixelscale(image_header, units='arcsec') / 3600.
        shape = image_data.shape

        # # Due to a bug in fitsio 1.0.0rc1+0, we need to clean up the
        # # header before feeding it to wcsutil and remove the 'None' and other problematic items
        # for k in image_header:
        #     # Try to access the item, if failed we hace to remove it
        #     try:
        #         item = image_header[k]
        #     except:
        #         logger.info("Removing keyword: {:s} from header".format(k))
        #         image_header.delete(k)

        w = wcsutil.WCS(image_header)

        # WE NEED TO UPDATE THIS WHEN THE TABLE IS PER EXPNUM
        use = np.logical_and(streaktab['expnum'] == image_header['EXPNUM'],
                             streaktab['ccdnum'] == image_header['CCDNUM'])
        logger.info('{:d} streaks found to mask'.format(np.count_nonzero(use)))

        nStreaks = 0
        inside = None


        for row in streaktab[use]:
            if maxExtrapolate > 0:
                if row['extrapolated'] and row['nearest'] > maxExtrapolate:
                    logger.info('Skipping extrapolated streak')
                    continue
            width = row['width']
            ra = np.array((row['ra1'], row['ra2']))
            dec = np.array((row['dec1'], row['dec2']))
            x, y = w.sky2image(ra, dec)

            x1, x2, y1, y2 = x[0], x[1], y[0], y[1]

            # Slope of the line, cos/sin form
            mx = (x2 - x1) / np.hypot(x2 - x1, y2 -y1)
            my = (y2 - y1) / np.hypot(x2 - x1, y2 -y1)

            #displacement for width of streak:
            wx = width / pixscale + addWidth
            wy = wx * mx
            wx = wx * -my

            # grow length
            x1 -= addLength * mx
            x2 += addLength * mx
            y1 -= addLength * my
            y2 += addLength * my

            # From Alex's immask routine: mark interior pixels
            vertices = [(x1 + wx, y1 + wy), (x2 + wx, y2 + wy), (x2 - wx, y2 - wy), (x1 - wx, y1 - wy)]
            vertices.append(vertices[0])  # Close the path

            if inside is None:
                # Set up coordinate arrays
                yy, xx = np.indices(shape)
                points = np.vstack((xx.flatten(), yy.flatten())).T
                path = matplotlib.path.Path(vertices)
                inside = path.contains_points(points)
            else:
                # use logical_and for additional streaks
                path = matplotlib.path.Path(vertices)
                inside = np.logical_or(inside, path.contains_points(points))

            nStreaks = nStreaks + 1

        logger.info('Masked {:d} new streaks'.format(nStreaks))

        # Make the list of masked pixels
        if inside is None:
            ymask, xmask = np.array(0, dtype=int), np.array(0, dtype=int)
        else:
            ymask, xmask = np.nonzero(inside.reshape(shape))

        logger.info('Setting bits in MSK image for STREAK: {:d}'.format(parse_badpix_mask('STREAK')))
        self.sci.mask[ymask, xmask] |= parse_badpix_mask('STREAK')
Ejemplo n.º 16
0
    def interpolate_from_unstructured(
        self,
        *,
        fill_value=np.nan,
        structured_output=True,
        unstructured_dim_name="unstructured_dim",
        **kwargs,
    ):
        """Interpolate DataArray onto new grids of some existing coordinates

        Parameters
        ----------
        **kwargs : (str, array)
            Each keyword is the name of a coordinate in the DataArray, the argument is a
            1d array giving the values of that coordinate on the output grid
        fill_value : float, default np.nan
            fill_value passed through to scipy.interpolation.griddata
        structured_output : bool, default True
            If True, treat output coordinates values as a structured grid.
            If False, output coordinate values must all have the same length and are not
            broadcast together.
        unstructured_dim_name : str, default "unstructured_dim"
            Name used for the dimension in the output that replaces the dimensions of
            the interpolated coordinates. Only used if structured_output=False.

        Returns
        -------
        DataArray
            Data interpolated onto a new, structured grid
        """

        da = self.data

        if structured_output:
            new_coords = {
                name: xr.DataArray(values, dims=name)
                for name, values in kwargs.items()
            }

            coord_arrays = tuple(
                np.meshgrid(*[values for values in kwargs.values()],
                            indexing="ij"))

            new_output_dims = [d for d in kwargs]
        else:
            new_coords = {
                name: xr.DataArray(values, dims=unstructured_dim_name)
                for name, values in kwargs.items()
            }

            coord_arrays = tuple(kwargs.values())

            lengths = [len(c) for c in coord_arrays]
            if np.any([x != lengths[0] for x in lengths[1:]]):
                raise ValueError(
                    f"When structured_output=False, all the arrays of output coordinate"
                    f"values must have the same length. Got lengths "
                    f"{dict((name, len(coord)) for name, coord in kwargs.items())}"
                )

            new_output_dims = [unstructured_dim_name]

        # Figure out number of dimensions in the coordinates to be interpolated
        dims = set()
        for coord in kwargs:
            dims = dims.union(da[coord].dims)
        dims = tuple(dims)
        ndim = len(dims)

        # dimensions that are not being interpolated
        remaining_dims = tuple(d for d in da.dims if d not in dims)

        # Select interpolation method
        if ndim <= 2:
            # "cubic" only available for 1d or 2d interpolation
            method = "cubic"
        else:
            method = "linear"

        # extend input coordinates to cover all dims, so we can flatten them
        input_coords = []
        for coord in kwargs:
            data = da[coord]
            missing_dims = tuple(set(dims) - set(data.dims))
            expand = {dim: da.sizes[dim] for dim in missing_dims}
            expand_positions = tuple(dims.index(d) for d in missing_dims)
            da[coord] = data.expand_dims(expand, axis=expand_positions)

        # scipy.interpolate.griddata requires the axis being interpolated to be the first
        # one, so stack together 'dims', and then transpose so the resulting stacked
        # dimension is the first
        dims_name_list = [d for d in da.dims if d in dims]
        stacked_dim_name = "stacked_" + "_".join(dims_name_list)
        stacked = da.stack({stacked_dim_name: dims_name_list})
        stacked = stacked.transpose(*((stacked_dim_name, ) + remaining_dims),
                                    transpose_coords=True)

        result = scipy_griddata(
            tuple(stacked[coord] for coord in kwargs),
            stacked,
            coord_arrays,
            method=method,
            fill_value=fill_value,
        )

        # griddata only sets points outside the 'convex hull' to fill_value
        # Nicer to set all points outside the grid boundaries to fill_value
        ###################################################################
        boundaries = self.get_bounding_surfaces(coords=[c for c in kwargs])
        points = np.stack(coord_arrays, axis=-1)

        # boundaries[0] is the outer boundary
        path = matplotlib.path.Path(boundaries[0], closed=True, readonly=True)
        is_contained = path.contains_points(points.reshape([-1, 2]))
        is_contained = is_contained.reshape(coord_arrays[0].shape +
                                            (1, ) * len(remaining_dims))
        result = np.where(is_contained, result, fill_value)

        # boundaries[1] is the inner boundary if it exists
        if len(boundaries) > 1:
            path = matplotlib.path.Path(boundaries[1],
                                        closed=True,
                                        readonly=True)
            is_contained = path.contains_points(points.reshape([-1, 2]))
            is_contained = is_contained.reshape(coord_arrays[0].shape +
                                                (1, ) * len(remaining_dims))
            result = np.where(is_contained, fill_value, result)

        if len(boundaries) > 2:
            raise ValueError(
                f"Found {len(boundaries)} boundaries, expected at most 2")

        # Create DataArray to return, with as much metadata as possible retained
        ########################################################################
        new_coords.update({
            name: array
            for name, array in stacked.coords.items()
            if stacked_dim_name not in array.dims
        })

        result = xr.DataArray(
            result,
            dims=new_output_dims + list(remaining_dims),
            coords=new_coords,
            name=da.name,
            attrs=da.attrs,
        )

        return result
Ejemplo n.º 17
0
# Load beds
x, y, zbed3 = elmerreadlib.input_file(DIR_bed3 + 'inputs/bedrock.xy')
x, y, zbed4 = elmerreadlib.input_file(DIR_bed4 + 'inputs/bedrock.xy')
x, y, zbed5 = elmerreadlib.input_file(DIR_bed5 + 'inputs/bedrock.xy')
x, y, zbed8 = elmerreadlib.input_file(DIR_bed8 + 'inputs/bedrock.xy')

xgrid, ygrid = np.meshgrid(x, y)

# Load mesh extents for cropping
mesh_extent = np.loadtxt(DIR_bed3 + 'inputs/mesh_extent.dat')
mesh_hole1 = np.loadtxt(DIR_bed3 + 'inputs/mesh_hole1.dat')
mesh_hole2 = np.loadtxt(DIR_bed3 + 'inputs/mesh_hole2.dat')

path = matplotlib.path.Path(
    np.column_stack([mesh_extent[:, 0], mesh_extent[:, 1]]))
inmesh = path.contains_points(
    np.column_stack([xgrid.flatten(), ygrid.flatten()]))
inmesh = inmesh.reshape(len(y), len(x))

path = matplotlib.path.Path(
    np.column_stack([mesh_hole1[:, 0], mesh_hole1[:, 1]]))
inhole1 = path.contains_points(
    np.column_stack([xgrid.flatten(), ygrid.flatten()]))
inhole1 = inhole1.reshape(len(y), len(x))
path = matplotlib.path.Path(
    np.column_stack([mesh_hole2[:, 0], mesh_hole2[:, 1]]))
inhole2 = path.contains_points(
    np.column_stack([xgrid.flatten(), ygrid.flatten()]))
inhole2 = inhole2.reshape(len(y), len(x))

del path
Ejemplo n.º 18
0
    def weights(self):
        """(M,N) ndarray. Fractions of pixel areas within the aperture for
        the MxN grid defined by `ApertureBase.extent`. Areas for partial
        pixels along the aperture border are approximated by dividing each
        border pixel into ``ApertureBase.nsub**2`` subpixels. Partial
        pixels are not computed if `ApertureBase.nsub` is 1. Read only.

        """
        # Pixels with centers within the aperture
        x, y = self.grid
        pix = np.vstack(np.broadcast_arrays(x, y)).reshape(2, -1).T
        weights = (self.path.contains_points(pix)
                   .reshape(y.size, x.size).astype(float))

        # Partial pixels
        if self.nsub > 1:
            # Shift the aperture to determine the border pixels.
            #
            # Ideally, additional contains tests would only have to be
            # performed for a dialated and an eroded version of the
            # aperture. Implementing a buffering algorithm for arbitrary
            # polygons is not easy, however, and I don't want to add
            # Shapely as a dependency for this module. Instead, the method
            # below shifts the aperture in eight directions and records the
            # changes in the pixels' coverage. The code is much simpler,
            # but performing eight contains tests instead of two means that
            # the code is also much slower.
            shift_size = 0.5
            shift_list = np.array(
                [[1, 0], [0, 1], [-1, 0], [-1, 0],
                 [0, -1], [0, -1], [1, 0], [1, 0]]) * shift_size
            w = weights.ravel().astype('bool')
            bordertest = np.zeros(w.shape, dtype='bool')
            xy = self._xy.copy()
            path = matplotlib.path.Path(xy, closed=True)
            for shift in shift_list:
                path.vertices += shift
                bordertest += bordertest | w != path.contains_points(pix)
            bordertest = bordertest.reshape(y.size, x.size).astype(float)

            # Lower-left corners of border pixels
            i, j = np.where(bordertest)
            x, y = self.extent[0] + j, self.extent[2] + i

            # Centers of subpixels with respect to a generic pixel
            subx = (np.arange(self.nsub).reshape(1, -1) + 0.5) / self.nsub
            suby = (np.arange(self.nsub).reshape(-1, 1) + 0.5) / self.nsub

            # Centers of subpixels in all border pixels
            subx = x[:,None,None] + subx  # (len(i), 1, nsub)
            suby = y[:,None,None] + suby  # (len(i), nsub, 1)

            # Subpixels with centers within the aperture
            subpix = np.vstack(np.broadcast_arrays(subx, suby)).reshape(2, -1).T
            subweights = (self.path.contains_points(subpix)
                          .reshape(np.broadcast(subx, suby).shape))

            # Refined pixel weights along the border
            kwargs = dict(axis=(1, 2), dtype=float)
            weights[i, j] = np.sum(subweights, **kwargs) / self.nsub**2

        return weights
Ejemplo n.º 19
0
 def maskPixels(self, path):
     XY = np.dstack((self.grid_x, self.grid_y))
     XY_flat = XY.reshape((-1, 2))
     mask_flat = path.contains_points(XY_flat)
     mask = mask_flat.reshape(self.grid_x.shape)
     return mask
Ejemplo n.º 20
0
def pvtu_timeseries_grid(x,y,DIR,fileprefix,variables,inputsdir,layer='surface',debug=False,crop_mesh=True,t1=1,t2=np.Inf):

  from scipy.interpolate import griddata
  import numpy as np
  import matplotlib.path
  import gc

  # First get number of timesteps
  files = os.listdir(DIR)

  xgrid,ygrid = np.meshgrid(x,y)

  totsteps = 0
  for file in files:
    if file.startswith(fileprefix) and file.endswith('.pvtu'):
      timestep = int(file[len(fileprefix):-5])
      numfilelen = len(file)-len('.pvtu')-len(fileprefix)
      if timestep > totsteps:
        totsteps = timestep
    elif file.startswith(fileprefix) and file.endswith('.pvtu.tar.gz'):
      timestep = int(file[-16:-12])
      numfilelen = len(file)-len('.pvtu.tar.gz')-len(fileprefix)
      if timestep > totsteps:
        totsteps = timestep     
  if totsteps == 0:
    sys.exit("Check that file "+DIR+fileprefix+" actually exists.")
 
  if t2 > totsteps:
    t2 = totsteps
  
  print "Loading "+str(t2-t1+1)+" out of "+str(totsteps)+" timesteps"

  if layer == 'surface':
    freesurfacevar = 'zs top'
  elif layer == 'bed' and 'zs bottom' not in variables:
    freesurfacevar = 'zs bottom'
  if freesurfacevar not in variables:
    variables.append(freesurfacevar)
  
  if crop_mesh:
    mesh_extent_x = np.loadtxt(inputsdir+'/mesh_timeseries_x.dat')
    mesh_extent_y = np.loadtxt(inputsdir+'/mesh_timeseries_y.dat')
  try:
    mesh_extent_x = np.loadtxt(inputsdir+'/mesh_timeseries_x.dat')
    mesh_extent_y = np.loadtxt(inputsdir+'/mesh_timeseries_y.dat')

    path = matplotlib.path.Path(np.column_stack([mesh_extent_x[:,0],mesh_extent_y[:,0]]))
    inmesh = path.contains_points(np.column_stack([xgrid.flatten(),ygrid.flatten()]))
    inmesh = inmesh.reshape(len(y),len(x))
  except:
    mesh_extent = np.loadtxt(inputsdir+'/mesh_extent.dat')

    path = matplotlib.path.Path(np.column_stack([mesh_extent_x[:,0],mesh_extent_y[:,1]]))
    inmesh = path.contains_points(np.column_stack([xgrid.flatten(),ygrid.flatten()]))
    inmesh = inmesh.reshape(len(y),len(x))    
  try: 
    mesh_hole1 = np.loadtxt(inputsdir+'/mesh_hole1.dat')
    mesh_hole2 = np.loadtxt(inputsdir+'/mesh_hole2.dat')
    holes = True

    path = matplotlib.path.Path(np.column_stack([mesh_hole1[:,0],mesh_hole1[:,1]]))
    inhole1 = path.contains_points(np.column_stack([xgrid.flatten(),ygrid.flatten()]))
    inhole1 = inhole1.reshape(len(y),len(x))
    path = matplotlib.path.Path(np.column_stack([mesh_hole2[:,0],mesh_hole2[:,1]]))
    inhole2 = path.contains_points(np.column_stack([xgrid.flatten(),ygrid.flatten()]))
    inhole2 = inhole2.reshape(len(y),len(x))
    del path
  except:
    holes = False

  for i in range(0,t2-t1+1):
    t = i+t1
    # Get filename
    pvtufile = '{0}{2:0{1}d}{3}'.format(fileprefix,numfilelen,t,'.pvtu')
    if debug:
      print "Loading file "+pvtufile
      
    # Get data
    if i==0:
      reader='none'
    data,reader = pvtu_file(DIR+pvtufile,variables,reader=reader,returnreader=True)
    surf = data[data[freesurfacevar] != 0]
    del data
    # If first timestep, set up output variable name
    if i==0:
      varnames = list(surf.dtype.names)
      varnames.remove('Node Number')
      varnames.append('dh')
      types = []
      for var in varnames:
        types.append(np.float64)
      datagrid = np.zeros([len(y),len(x),t2-t1+1], dtype=zip(varnames,types)) 
      del types

    if crop_mesh:
      ind = np.where(mesh_extent_x[:,t-1] != 0)
      path = matplotlib.path.Path(np.column_stack([mesh_extent_x[:,t-1],mesh_extent_y[:,t-1]]))
      inmesh = path.contains_points(np.column_stack([xgrid.flatten(),ygrid.flatten()]))
      inmesh = inmesh.reshape(len(y),len(x))
      del path

    for var in varnames:
      if var == 'dh':
        if i > 0:
          z_old = griddata((surf_last['x'],surf_last['y']),surf_last['z'],(surf['x'],surf['y']))
          dh = surf['z']-z_old
          datagrid[var][:,:,i] = griddata((surf['x'],surf['y']),dh,(xgrid,ygrid))
          del surf_last
        surf_last = np.array(surf)
      else:
        datagrid[var][:,:,i] = griddata((surf['x'],surf['y']),surf[var],(xgrid,ygrid))
      
      #if crop_mesh: 
      datagrid[var][~inmesh,i] = float('nan')
      
      if holes:
        datagrid[var][inhole1,i] = float('nan')
        datagrid[var][inhole2,i] = float('nan')   

    del surf,pvtufile
    if crop_mesh:
      del inmesh,ind
    gc.collect()
    
  return datagrid
Ejemplo n.º 21
0
 def intersection(self, points):
     path = matplotlib.path.Path(self.ROI, closed=False)
     return int(path.contains_points(points, radius=1e-9)) * 2 - 1