Beispiel #1
0
def createPolygonsAndColors(latDeg, lonDeg, rgb, colorMode=None):
    """
    Returns polygons (in lat/lon coords) and a color for each polygon.
    
    :param latDeg: latitude for each pixel corner (h+1,w+1)
    :param lonDeg: longitude for each pixel corner (h+1,w+1)
    :param rgb: RGB array of (h,w,3) shape
    :param colorMode: 'matplotlib' normalizes colors to [0,1]
    :rtype: verts of shape (h*w,4,2), colors of shape (h*w,3)
    """
    latLonDeg = ma.dstack((latDeg, lonDeg))

    # adapted from matplotlib.collections.QuadMesh.convert_mesh_to_paths
    verts = ma.concatenate(
        (
            latLonDeg[0:-1, 0:-1],
            latLonDeg[0:-1, 1:],
            latLonDeg[1:, 1:],
            latLonDeg[1:, 0:-1],
            #latLonDeg[0:-1, 0:-1] # matplotlib automatically closes the polygon
        ),
        axis=2)
    verts = verts.reshape(rgb.shape[0] * rgb.shape[1], 4, 2)

    if colorMode == ColorMode.matplotlib:
        rgb = image2mpl(rgb)

    colors = rgb.reshape(-1, 3)

    return verts, colors
    def __call__(self, energy, impact, xmax, xb, yb):
        """
        Evaluate interpolated templates for a set of shower parameters and pixel positions

        Parameters
        ----------
        energy: array-like
            Energy of interpolated template
        impact: array-like
            Impact distance of interpolated template
        xmax: array-like
            Depth of maximum of interpolated templates
        xb: array-like
            Pixel X position at which to evaluate template
        yb: array-like
            Pixel X position at which to evaluate template

        Returns
        -------
        ndarray: Pixel amplitude expectation values
        """
        array = np.stack((energy, impact, xmax), axis=-1)
        points = ma.dstack((xb, yb))

        interpolated_value = self.interpolator(array, points)
        interpolated_value[interpolated_value<0] = 0
        interpolated_value = interpolated_value

        return interpolated_value
Beispiel #3
0
    def __call__(self, energy, impact, xmax, xb, yb):
        """
        Evaluate interpolated templates for a set of shower parameters and pixel positions

        Parameters
        ----------
        energy: array-like
            Energy of interpolated template
        impact: array-like
            Impact distance of interpolated template
        xmax: array-like
            Depth of maximum of interpolated templates
        xb: array-like
            Pixel X position at which to evaluate template
        yb: array-like
            Pixel X position at which to evaluate template

        Returns
        -------
        ndarray: Pixel amplitude expectation values
        """
        array = np.stack((energy, impact, xmax), axis=-1)
        points = ma.dstack((xb, yb))

        interpolated_value = self.interpolator(array, points)
        interpolated_value[interpolated_value < 0] = 0
        interpolated_value = interpolated_value

        return interpolated_value
Beispiel #4
0
def ReadAndAgg_AgMERRA(var, year, lon_min, lon_max, lat_min, lat_max):

    f = netCDF4.Dataset("Data/AgMERRA/" + var + "/AgMERRA_" + \
                                                str(year) + "_" + var + ".nc4")
    #    time = f.variables['time'][:]          # days since start of year (366)
    lats = f.variables['latitude'][:]  # degrees north (31)
    lons = f.variables['longitude'][:]  # degrees east (59)
    data = f.variables[var][:]
    f.close()

    # reducing to region of West Africa
    data_lonpos = data[:, :, lons < 180]
    data_lonneg = data[:, :, lons > 180]
    data_changed = np_ma.dstack([data_lonneg, data_lonpos])
    data_changed = np.flip(data_changed, axis=1)
    lons = np.arange(-179.875, 180, 0.25)
    lats = np.flip(lats)
    data_rel = data_changed[:,((lats>=lat_min) & (lats<=lat_max)),:] \
                                    [:,:,((lons>=lon_min) & (lons<=lon_max))]

    # aggregating to 0.5 degree resolution
    [n_t, n_lat, n_lon] = data_rel.shape
    data_rel_agg = np.zeros([n_t, int(n_lat / 2), int(n_lon / 2)])
    for t in range(0, n_t):
        for lat in range(0, int(n_lat / 2)):
            for lon in range(0, int(n_lon / 2)):
                x = np.nanmin(data_rel[t, (2*lat):(2*lat+2), \
                                                           (2*lon):(2*lon+2)])
                if np.isnan(float(x)):
                    data_rel_agg[t, lat, lon] = np.nan
                else:
                    data_rel_agg[t, lat, lon] = np.nanmean(data_rel[t, \
                                        (2*lat):(2*lat+2), (2*lon):(2*lon+2)])

    return (data_rel_agg)
Beispiel #5
0
def interpusblposition(usbl):

    # ok lest make sure the GPS is on the second
    usbl = usbl.drop_duplicates()
    timesteps = np.diff(usbl.index) / np.timedelta64(1, 's')
    Transition_Matrix = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 1, 0],
                                  [0, 0, 0, 1]])

    temp = np.zeros((len(timesteps), 4, 4))
    for i in range(len(timesteps)):
        Transition_Matrix[0, 2] = timesteps[i]
        Transition_Matrix[1, 3] = timesteps[i]
        temp[i] = Transition_Matrix
    Transition_Matrix = temp
    mask = usbl['UsblNorthing'][:-1].isna()
    lat = ma.array(usbl['UsblNorthing'][:-1], mask=mask)
    lon = ma.array(usbl['UsblEasting'][:-1], mask=mask)
    #print(timesteps.min())
    lonSpeed = ma.array(np.diff(usbl.ShipEasting) / timesteps)
    lonSpeed[np.isnan(lonSpeed)] = 0
    latSpeed = ma.array(np.diff(usbl.ShipNorthing) / timesteps)
    latSpeed[np.isnan(latSpeed)] = 0
    coord = ma.dstack((lon, lat, lonSpeed, latSpeed))[0]
    Observation_Matrix = np.eye(4)
    xinit = lon[0]
    yinit = lat[0]
    vxinit = lonSpeed[0]
    vyinit = latSpeed[0]
    initstate = [xinit, yinit, vxinit, vyinit]
    #print(initstate)
    initcovariance = 1.0e-3 * np.eye(4)
    transistionCov = 1.0e-3 * np.eye(4)
    observationCov = np.eye(4)
    observationCov[0, 0] = 15
    observationCov[1, 1] = 15
    observationCov[2, 2] = 0.01
    observationCov[3, 3] = 0.01
    #print('kman')
    kf = KalmanFilter(transition_matrices=Transition_Matrix,
                      observation_matrices=Observation_Matrix,
                      initial_state_mean=initstate,
                      initial_state_covariance=initcovariance,
                      transition_covariance=transistionCov,
                      observation_covariance=observationCov)

    output = kf.smooth(coord)[0]
    result = usbl
    result['KfUsblNorthing'] = np.append(output[:, 1], output[-1, 1])
    result['KfUsblEasting'] = np.append(output[:, 0], output[-1, 0])
    myProj = Proj(
        "+proj=utm +zone=55F, +south +ellps=WGS84 +datum=WGS84 +units=m +no_defs"
    )
    result['KfUsblLongitude'], result['KfUsblLatitude'] = myProj(
        result['KfUsblEasting'].values,
        result['KfUsblNorthing'].values,
        inverse='True')

    return result
Beispiel #6
0
    def get_vectormap(self, pafs, number=None):
        from numpy import ma
        if number is None:
            for i in range(26):
                u = pafs[2 * i, :, :] * -1
                # u = cv2.resize(u, (1280, 720))
                v = pafs[2 * i + 1, :, :]
                # v = cv2.resize(v, (1280, 720))
                # print(i)
                # print(u.max(), v.max())
                x, y = np.meshgrid(np.arange(u.shape[1]), np.arange(u.shape[0]))
                M = np.zeros(u.shape, dtype='bool')
                M[u**2 + v**2 < 0.2 * 0.2] = True
                u = ma.masked_array(u, mask=M)
                v = ma.masked_array(v, mask=M)
                if i == 0:
                    U = u
                    V = v
                    X = x
                    Y = y
                else:
                    U = ma.dstack((U, u))
                    V = ma.dstack((V, v))
                    X = np.dstack((X, x))
                    Y = np.dstack((Y, y))
                # print(U.shape)
            U = U.transpose(2, 0, 1)
            V = V.transpose(2, 0, 1)
            X = X.transpose(2, 0, 1)
            Y = Y.transpose(2, 0, 1)

        else:
            U = pafs[2 * number, :, :] * -1
            # U = cv2.resize(U, (1280, 720))
            V = pafs[2 * number + 1, :, :]
            # V = cv2.resize(V, (1280, 720))
            # print(U.max(), V.max())
            X, Y = np.meshgrid(np.arange(U.shape[1]), np.arange(U.shape[0]))
            M = np.zeros(U.shape, dtype='bool')
            M[U**2 + V**2 < 0.2 * 0.2] = True
            U = ma.masked_array(U, mask=M)
            V = ma.masked_array(V, mask=M)

        # self.frame = cv2.resize(self.frame, (82, 64))
        return X, Y, U, V
Beispiel #7
0
    def __init__(self, cdfPath, i=0):
        with pycdf.CDF(cdfPath) as root:
            var = root
            altitude = var[self.var_altitude][...] / 1000
            cameraPosGCRS = var[self.var_cameraPos][i]
            photoTime = var[self.var_photoTime][i]

            # for three channels (RGB), each channel is stored as a
            # separate variable: img_red, img_green, img_blue
            # for grayscale, the single variable is called 'img'
            try:
                fillval = var[self.var_img].attrs['FILLVAL']
                img = np.atleast_3d(var[self.var_img][i])
                img = _convertImgDtype(img, fillval)
            except:
                fillval = var[self.var_img_red].attrs['FILLVAL']
                img_red = _convertImgDtype(var[self.var_img_red][i], fillval)
                img_green = _convertImgDtype(var[self.var_img_green][i],
                                             fillval)
                img_blue = _convertImgDtype(var[self.var_img_blue][i], fillval)
                img = ma.dstack((img_red, img_green, img_blue))

            latsCenter = var[self.var_latsCenter][i]
            lonsCenter = var[self.var_lonsCenter][i]
            lats = var[var[self.var_latsCenter].attrs['bounds']][i]
            lons = var[var[self.var_lonsCenter].attrs['bounds']][i]

            # TODO read in MLat/MLT as well if available

            self._latsCenter = ma.masked_invalid(latsCenter)
            self._lonsCenter = ma.masked_invalid(lonsCenter)
            self._lats = ma.masked_invalid(lats)
            self._lons = ma.masked_invalid(lons)
            self._elevation = ma.masked_invalid(90 -
                                                var[self.var_zenithAngle][i])

            metadata = root.attrs

            assert var[self.var_altitude].attrs['UNITS'] == 'meters'
            assert var[self.var_cameraPos].attrs['UNITS'] == 'kilometers'

        identifier = os.path.splitext(os.path.basename(cdfPath))[0]
        BaseMapping.__init__(self,
                             altitude,
                             cameraPosGCRS,
                             photoTime,
                             identifier,
                             metadata=metadata)
        ArrayImageMixin.__init__(self, img)
Beispiel #8
0
def retrieve_lst_space_time(data_files, reliability_files, date_regex,
                            sanity_path):
    space_list = []
    for raster, rel in zip(data_files, reliability_files):
        if re.compile(date_regex).search(raster).group() != re.compile(
                date_regex).search(rel).group():
            sys.exit("Data and reliability files do not match.")
        logging.info("Processing data: " + raster)
        logging.info("Applying reliability mask: " + rel)
        masked_array = create_lst_masked_array(raster, rel, sanity_path)
        logging.warn("Data totally masked: " + str(masked_array.mask.all()))
        space_list.append(masked_array)
    # Lon, Lat, Time.
    space_time = ma.dstack(space_list)
    logging.debug("Space-time shape:" + str(space_time.shape))
    logging.warn("Space-time totally masked: " + str(space_time.mask.all()))
    return space_time
Beispiel #9
0
 def __init__(self, cdfPath, i=0):
     with pycdf.CDF(cdfPath) as root:
         var = root
         altitude = var[self.var_altitude][...]/1000
         cameraPosGCRS = var[self.var_cameraPos][i]
         photoTime = var[self.var_photoTime][i]
         
         # for three channels (RGB), each channel is stored as a
         # separate variable: img_red, img_green, img_blue
         # for grayscale, the single variable is called 'img'
         try:
             fillval = var[self.var_img].attrs['FILLVAL']
             img = np.atleast_3d(var[self.var_img][i])
             img = _convertImgDtype(img, fillval)
         except:
             fillval = var[self.var_img_red].attrs['FILLVAL']
             img_red = _convertImgDtype(var[self.var_img_red][i], fillval)
             img_green = _convertImgDtype(var[self.var_img_green][i], fillval)
             img_blue = _convertImgDtype(var[self.var_img_blue][i], fillval)
             img = ma.dstack((img_red, img_green, img_blue))
                     
         latsCenter = var[self.var_latsCenter][i]
         lonsCenter = var[self.var_lonsCenter][i]
         lats = var[var[self.var_latsCenter].attrs['bounds']][i]
         lons = var[var[self.var_lonsCenter].attrs['bounds']][i]
         
         # TODO read in MLat/MLT as well if available
                     
         self._latsCenter = ma.masked_invalid(latsCenter)
         self._lonsCenter = ma.masked_invalid(lonsCenter)
         self._lats = ma.masked_invalid(lats)
         self._lons = ma.masked_invalid(lons)
         self._elevation = ma.masked_invalid(90 - var[self.var_zenithAngle][i])
         
         metadata = root.attrs
         
         assert var[self.var_altitude].attrs['UNITS'] == 'meters'
         assert var[self.var_cameraPos].attrs['UNITS'] == 'kilometers'
     
     identifier = os.path.splitext(os.path.basename(cdfPath))[0]
     BaseMapping.__init__(self, altitude, cameraPosGCRS, photoTime, identifier, metadata=metadata)
     ArrayImageMixin.__init__(self, img)
Beispiel #10
0
def get_masked_arr_stack(window_size, correction_switch, degree):
    pol=get_padded_feature_stack(window_size, correction_switch, degree)
    s_mask=get_slick_wise_mask(window_size)
    ma_cond=s_mask[...,0]
    shp=ma_cond.shape
    num_features=pol.shape[-1]
    #for i in range(7):
        #a=ma.masked_where(np.repeat(s_mask[...,0]==0,num_features).reshape(shp[0],shp[1],num_features), pol)
    #print (pol.shape)
    #print(s_mask.shape)

    P_ma=ma.masked_where(np.repeat(s_mask[...,0]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    E40_ma=ma.masked_where(np.repeat(s_mask[...,1]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    E60_ma=ma.masked_where(np.repeat(s_mask[...,2]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    E80_ma=ma.masked_where(np.repeat(s_mask[...,3]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    
    W_near_ma=ma.masked_where(np.repeat(s_mask[...,-3]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    W_mid_ma=ma.masked_where(np.repeat(s_mask[...,-2]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    W_far_ma=ma.masked_where(np.repeat(s_mask[...,-1]<1,num_features).reshape(shp[0],shp[1],num_features), pol)
    #return P_ma
    return ma.dstack((P_ma,E40_ma,E60_ma,E80_ma, W_near_ma, W_mid_ma, W_far_ma))
Beispiel #11
0
print(inv_gt_thp)
offset_thp = gdal.ApplyGeoTransform(inv_gt_thp, 1399618.9749825108, 705060.6257949192)
xoff2, yoff2 = map(int, offset_thp)
array_thp = thp.ReadAsArray(xoff2, yoff2, 599, 1240)
print("Array THP ", array_thp)

comp_dem = ma.masked_where(array_dem >= 3000, array_dem).copy()
comp_slope = ma.masked_where(array_slope < 0, array_slope).copy()
comp_thp = ma.masked_where(array_thp >= 10000, array_thp).copy()
print("DEM", comp_dem)
print("SLOPE", comp_slope)
print(comp_thp)
print("Mean of DEM: ", np.mean(comp_demy), "Min of DEM: ", np.min(comp_dem), "Max of DEM: ", np.max(comp_dem))
print("Mean of slope: ", np.mean(comp_slope), "Min of slope: ", np.min(comp_slope), "Max of slope: ", np.max(comp_slope))

arr_dem_slope = ma.dstack((comp_dem_copy, comp_slope_copy))
print("STACK:",arr_dem_slope)
print(arr_dem_slope.shape)

a = arr_dem_slope[:,:,0].copy()
b = arr_dem_slope[:,:,1].copy()
print("DEM:", a)
print("SLOPE:",b)

req_dem_slope = (a < 1000) & (b < 30)
print("Test:", req_dem_slope)
print("Shape of test", req_dem_slope.shape)

req_dem_slope = req_dem_slope * 1
#print(ma.max(req_dem_slope))
def _zone_averaging(
    xprop, yprop, zoneprop, zone_minmax, coarsen, zone_avg, dzprop, mprop, summing=False
):

    # General preprocessing, and...
    # Change the 3D numpy array so they get layers by
    # averaging across zones. This may speed up a lot,
    # but will reduce the resolution.
    # The x y coordinates shall be averaged (ideally
    # with thickness weighting...) while e.g. hcpfzprop
    # must be summed.
    # Somewhat different processing whether this is a hc thickness
    # or an average.

    xpr = xprop
    ypr = yprop
    zpr = zoneprop
    dpr = dzprop

    mpr = mprop

    if coarsen > 1:
        xpr = xprop[::coarsen, ::coarsen, ::].copy(order="C")
        ypr = yprop[::coarsen, ::coarsen, ::].copy(order="C")
        zpr = zoneprop[::coarsen, ::coarsen, ::].copy(order="C")
        dpr = dzprop[::coarsen, ::coarsen, ::].copy(order="C")
        mpr = mprop[::coarsen, ::coarsen, ::].copy(order="C")
        zpr.astype(np.int32)

    if zone_avg:
        zmin = int(zone_minmax[0])
        zmax = int(zone_minmax[1])
        if zpr.min() > zmin:
            zmin = zpr.min()
        if zpr.max() < zmax:
            zmax = zpr.max()

        newx = []
        newy = []
        newz = []
        newm = []
        newd = []

        for izv in range(zmin, zmax + 1):
            logger.info("Averaging for zone %s ...", izv)
            xpr2 = ma.masked_where(zpr != izv, xpr)
            ypr2 = ma.masked_where(zpr != izv, ypr)
            zpr2 = ma.masked_where(zpr != izv, zpr)
            dpr2 = ma.masked_where(zpr != izv, dpr)
            mpr2 = ma.masked_where(zpr != izv, mpr)

            # get the thickness and normalize along axis 2 (vertical)
            # to get normalized thickness weights
            lay_sums = dpr2.sum(axis=2)
            normed_dz = dpr2 / lay_sums[:, :, np.newaxis]

            # assume that coordinates have equal weights within a zone
            xpr2 = ma.average(xpr2, axis=2)
            ypr2 = ma.average(ypr2, axis=2)
            zpr2 = ma.average(zpr2, axis=2)  # avg zone

            dpr2 = ma.sum(dpr2, axis=2)

            if summing:
                mpr2 = ma.sum(mpr2, axis=2)
            else:
                mpr2 = ma.average(mpr2, weights=normed_dz, axis=2)  # avg zone

            newx.append(xpr2)
            newy.append(ypr2)
            newz.append(zpr2)
            newd.append(dpr2)
            newm.append(mpr2)

        xpr = ma.dstack(newx)
        ypr = ma.dstack(newy)
        zpr = ma.dstack(newz)
        dpr = ma.dstack(newd)
        mpr = ma.dstack(newm)
        zpr.astype(np.int32)

    xpr = ma.filled(xpr, fill_value=xtgeo.UNDEF)
    ypr = ma.filled(ypr, fill_value=xtgeo.UNDEF)
    zpr = ma.filled(zpr, fill_value=0)
    dpr = ma.filled(dpr, fill_value=0.0)

    mpr = ma.filled(mpr, fill_value=0.0)

    return xpr, ypr, zpr, mpr, dpr
Beispiel #13
0
def _slice_between_surfaces(
    this,
    cube,
    sampling,
    other,
    other_position,
    zrange,
    ndiv,
    mask,
    attrlist,
    mthreshold,
    snapxy,
    showprogress=False,
    deadtraces=True,
    deletecube=False,
):
    """Slice and find values between two surfaces."""

    npcollect = []
    zincr = zrange / float(ndiv)

    zcenter = this.copy()
    zcenter.slice_cube(cube,
                       sampling=sampling,
                       mask=mask,
                       snapxy=snapxy,
                       deadtraces=deadtraces)
    npcollect.append(zcenter.values)

    # collect below or above the original surface
    if other_position == "above":
        mul = -1
    else:
        mul = 1

    # collect above the original surface
    progress = XTGShowProgress(ndiv, show=showprogress, leadtext="progress: ")
    for idv in range(ndiv):
        progress.flush(idv)
        ztmp = this.copy()
        ztmp.values += zincr * (idv + 1) * mul
        zvalues = ztmp.values.copy()

        ztmp.slice_cube(cube,
                        sampling=sampling,
                        mask=mask,
                        snapxy=snapxy,
                        deadtraces=deadtraces)

        diff = mul * (other.values - zvalues)

        values = ztmp.values
        values = ma.masked_where(diff < 0.0, values)

        npcollect.append(values)

    stacked = ma.dstack(npcollect)

    del npcollect

    if deletecube:
        del cube

    # for cases with erosion, the two surfaces are equal
    isovalues = mul * (other.values - this.values)

    attvalues = dict()
    for attr in attrlist:
        attvaluestmp = _attvalues(attr, stacked)
        attvalues[attr] = ma.masked_where(isovalues < mthreshold, attvaluestmp)

    progress.finished()

    return attvalues  # this is dict with numpies, one per attribute
Beispiel #14
0
def _slice_constant_window(
    this,
    cube,
    sampling,
    zrange,
    ndiv,
    mask,
    attrlist,
    snapxy,
    showprogress=False,
    deadtraces=True,
    deletecube=False,
):
    """Slice a window, (constant in vertical extent)."""
    npcollect = []
    zcenter = this.copy()

    logger.info("Mean W of depth no MIDDLE slice is %s", zcenter.values.mean())
    zcenter.slice_cube(cube,
                       sampling=sampling,
                       mask=mask,
                       snapxy=snapxy,
                       deadtraces=deadtraces)
    logger.info("Mean of cube slice is %s", zcenter.values.mean())

    npcollect.append(zcenter.values)

    zincr = zrange / float(ndiv)

    logger.info("ZINCR is %s", zincr)

    # collect above the original surface
    progress = XTGShowProgress(ndiv * 2,
                               show=showprogress,
                               leadtext="progress: ",
                               skip=1)
    for idv in range(ndiv):
        progress.flush(idv)
        ztmp = this.copy()
        ztmp.values -= zincr * (idv + 1)
        ztmp.slice_cube(cube,
                        sampling=sampling,
                        mask=mask,
                        snapxy=snapxy,
                        deadtraces=deadtraces)
        npcollect.append(ztmp.values)
    # collect below the original surface
    for idv in range(ndiv):
        progress.flush(ndiv + idv)
        ztmp = this.copy()
        ztmp.values += zincr * (idv + 1)
        ztmp.slice_cube(cube,
                        sampling=sampling,
                        mask=mask,
                        snapxy=snapxy,
                        deadtraces=deadtraces)
        npcollect.append(ztmp.values)

    logger.info("Make a stack of the maps...")
    stacked = ma.dstack(npcollect)
    del npcollect
    if deletecube:
        del cube

    attvalues = dict()
    for attr in attrlist:
        logger.info("Running attribute %s", attr)
        attvalues[attr] = _attvalues(attr, stacked)

    progress.finished()
    return attvalues  # this is dict with numpies, one per attribute
Beispiel #15
0
    def __init__(self, cdfPath):
        with Dataset(cdfPath, 'r') as root:
            var = root.variables
            altitude = var[self.var_altitude][:] / 1000
            cameraPosGCRS = var[self.var_cameraPos][:]
            photoTime = _readDate(var[self.var_photoTime])

            # for three channels (RGB), each channel is stored as a
            # separate variable: img_red, img_green, img_blue
            # for grayscale, the single variable is called 'img'
            try:
                img = np.atleast_3d(var[self.var_img][:])
                img = _convertImgDtype(img)
            except:
                img_red = _convertImgDtype(var[self.var_img_red][:])
                img_green = _convertImgDtype(var[self.var_img_green][:])
                img_blue = _convertImgDtype(var[self.var_img_blue][:])
                img = ma.dstack((img_red, img_green, img_blue))

            latsCenter = var[self.var_latsCenter][:]
            lonsCenter = var[self.var_lonsCenter][:]
            latBounds = var[var[self.var_latsCenter].bounds][:]
            lonBounds = var[var[self.var_lonsCenter].bounds][:]

            # TODO read in MLat/MLT as well if available

            if latsCenter.ndim == 1:
                latsCenter, lonsCenter = np.dstack(
                    np.meshgrid(latsCenter, lonsCenter)).T

                assert np.all(latBounds[:-1, 1] == latBounds[1:, 0])
                assert np.all(lonBounds[:-1, 1] == lonBounds[1:, 0])
                latBounds = np.concatenate((latBounds[:,
                                                      0], [latBounds[-1, 1]]))
                lonBounds = np.concatenate((lonBounds[:,
                                                      0], [lonBounds[-1, 1]]))
                lats, lons = np.dstack(np.meshgrid(latBounds, lonBounds)).T
            else:
                lats = np.empty(
                    (latsCenter.shape[0] + 1, latsCenter.shape[1] + 1),
                    latBounds.dtype)
                lons = np.empty_like(lats)

                for grid, bounds in [(lats, latBounds), (lons, lonBounds)]:
                    # have to use numpy's assert to handle NaN's correctly
                    assert_array_equal(bounds[:-1, :-1, 2], bounds[:-1, 1:, 3])
                    assert_array_equal(bounds[:-1, :-1, 2], bounds[1:, 1:, 0])
                    assert_array_equal(bounds[:-1, :-1, 2], bounds[1:, :-1, 1])
                    grid[:-1, :-1] = bounds[:, :, 0]
                    grid[-1, :-1] = bounds[-1, :, 3]
                    grid[:-1, -1] = bounds[:, -1, 1]
                    grid[-1, -1] = bounds[-1, -1, 2]

            self._latsCenter = ma.masked_invalid(latsCenter)
            self._lonsCenter = ma.masked_invalid(lonsCenter)
            self._lats = ma.masked_invalid(lats)
            self._lons = ma.masked_invalid(lons)
            self._elevation = ma.masked_invalid(90 -
                                                var[self.var_zenithAngle][:])

            metadata = root.__dict__  # ordered dict of all global attributes

            assert var[self.var_altitude].units == 'meters'
            assert var[self.var_cameraPos].units == 'kilometers'

        identifier = os.path.splitext(os.path.basename(cdfPath))[0]
        BaseMapping.__init__(self,
                             altitude,
                             cameraPosGCRS,
                             photoTime,
                             identifier,
                             metadata=metadata)
        ArrayImageMixin.__init__(self, img)
Beispiel #16
0
def generate_plots(fname="/tmp/bitstead.hdf5", figbase='/tmp/figures'):
    with h5py.File("/tmp/bitstead.hdf5") as f:
        for fieldname,grp in f.iteritems():
            years = grp.keys()
            years.sort()
            print fieldname


            ylds = []
            profits = []

            for year in years:
                print '\t%s' % year
                for commodity in grp[year]:
                    print '\t\t%s' % commodity

                    data = grp[year][commodity]['yield']
                    xmin = data.attrs['xmin']
                    ymin = data.attrs['ymin']
                    xmax = data.attrs['xmax']
                    ymax = data.attrs['ymax']
                    stride = data.attrs['stride']
                    map_extents = [xmin,xmax,ymin,ymax]

                    # Map data
                    mask = data['mask'].value == 0
                    yld = ma.masked_less(ma.masked_invalid(ma.masked_array(data['yield'], mask)), 0)
                    moisture = ma.masked_array(data['moisture'], mask)
                    elevation = ma.masked_array(data['elevation'], mask)


                    # Areas
                    grid_cell_area = (stride ** 2) / 4046.86
                    areas = ma.masked_array(np.ones(mask.shape), mask) * grid_cell_area  # Convert square meters to acres


                    # Calculating profit/loss
                    costs = get_inputs(fieldname, year, commodity, areas) * areas

                    income = yld * areas * get_market_price(fieldname, year, commodity)
                    profit = income - costs
                    ppa = profit/areas

                    # Normalized yield
                    ynorm = (yld - ma.mean(yld)) / ma.std(yld)


                    # For summary info
                    ylds.append(ynorm)
                    profits.append(ppa)


                    try:

                        # Mask (for outline of field)
                        # fig = plt.figure()
                        # plt.ticklabel_format(useOffset=False, axis='y')
                        # plt.ticklabel_format(useOffset=False, axis='x')

                        # plt.imshow(mask, extent=map_extents)
                        # fig.savefig('%s/%s-%s-%s-mask.pdf' % (figbase, fieldname, year, commodity) , format='pdf')


                        # Yield Map
                        fig = plt.figure()
                        plt.title('%s %s Yield (Bu/Acre)' % (year, commodity) )
                        cmap = mpl.cm.get_cmap('RdYlGn')

                        norm = mpl.colors.BoundaryNorm(np.linspace(ma.min(yld),ma.max(yld),10), cmap.N)
                        plt.gca().axis('off')
                        plot_grid_map(yld, cmap=cmap, norm=norm, interpolation='none')
                        plt.colorbar()
                        fig.savefig('%s/%s-%s-%s-yield-map.pdf' % (figbase, fieldname, year, commodity) , format='pdf')

                        # Yield Histogram
                        fig = plt.figure()
                        plt.title('%s %s Yield Distribution' % (year, commodity) )
                        a = np.ravel(ma.compressed(yld))
                        Y,X = np.histogram(a, 10, normed=0, weights=(np.ones(a.shape) * grid_cell_area))
                        x_span = X.max()-X.min()
                        C = [cmap(((x-X.min())/x_span)) for x in X]
                        plt.bar(X[:-1],Y,color=C,width=X[1]-X[0])
                        plt.xlabel('Yield (Bushel/Acre)')
                        plt.ylabel('Acres')
                        fig.savefig('%s/%s-%s-%s-yield-histogram.pdf' %(figbase, fieldname, year, commodity), format='pdf')

                        # Normalized Yield Map
                        fig = plt.figure()
                        plt.title('%s %s Yield (Bu/Acre) Normalized' % (year, commodity) )
                        cmap = mpl.cm.get_cmap('RdYlGn')
                        norm = mpl.colors.BoundaryNorm(np.linspace(-5,5,11), cmap.N)
                        plot_grid_map(ynorm, cmap=cmap, norm=norm, interpolation='none')
                        plt.colorbar()
                        fig.savefig('%s/%s-%s-%s-yield-map-normalized.pdf' % (figbase, fieldname, year, commodity) , format='pdf')


                        # Input Costs Map
                        # fig = plt.figure()
                        # plt.title('%s %s Costs ($/Acre)' % (year, commodity) )
                        # cmap = mpl.cm.get_cmap('RdYlGn')
                        # norm = mpl.colors.BoundaryNorm(np.linspace(ma.min(costs/areas),ma.max(costs/areas),10), cmap.N)
                        # plot_grid_map(costs/areas, cmap=cmap, norm=norm, interpolation='none')
                        # plt.colorbar()
                        # fig.savefig('%s/%s-%s-%s-costs-map.pdf' % (figbase, fieldname, year, commodity) , format='pdf')

                        # Profit/Loss Map
                        fig = plt.figure()
                        plt.title('%s %s Profit/Loss ($/Acre)' % (year, commodity) )
                        cmap = mpl.cm.get_cmap('RdYlGn')
                        norm = mpl.colors.BoundaryNorm(np.linspace(ma.min(ppa),ma.max(ppa),10), cmap.N)
                        plot_grid_map(ppa, cmap=cmap, norm=norm, interpolation='none')
                        plt.colorbar()
                        fig.savefig('%s/%s-%s-%s-profit-map.pdf' % (figbase, fieldname, year, commodity) , format='pdf')


                        # Profit/Loss Histogram
                        fig = plt.figure()
                        plt.title('%s %s Profit/Loss Distribution' % (year, commodity) )
                        a = np.ravel(ma.compressed(profit / areas))
                        Y,X = np.histogram(a, 10, normed=0, weights=(np.ones(a.shape) * grid_cell_area))

                        def pmap(x):
                            if x < 0:
                                return cmap(X.min())
                            else:
                                return cmap(X.max())

                        x_span = X.max()-X.min()
                        C = [pmap(x) for x in X]
                        plt.bar(X[:-1],Y,color=C,width=X[1]-X[0])
                        plt.xlabel('Profit/Loss ($/Acre)')
                        plt.ylabel('Acres')
                        fig.savefig('%s/%s-%s-%s-profit-histogram.pdf' %(figbase, fieldname, year, commodity), format='pdf')

                        # A map distinguishing what is (or is not) profitable.
                        fig = plt.figure()
                        plot_grid_map(profit > 0, cmap=cmap)
                        plt.title('%s %s Profit or Loss' % (year,commodity))
                        fig.savefig('%s/%s-%s-%s-profit-or-loss.pdf' %(figbase, fieldname, year, commodity), format='pdf')


                    except Exception as e:
                        print e
                        print yld.shape
                        print ma.min(yld)
                        print ma.max(yld)
                        print '%s-%s' % (np.min(yld), np.max(yld))

                        print 'Could not generate maps for %s/%s/%s' % (fieldname, year, commodity)
                        # raise e


            # Field summary stats

            # First, Profits.
            vals = ma.dstack(profits)
            mvals = ma.mean(vals, axis=2)
            vvars = ma.std(vals, axis=2)


            fig = plt.figure()
            plt.title('Profit, mean across years ($/Acre)')
            norm = mpl.colors.BoundaryNorm(np.linspace(mvals.min(),mvals.max(),10), cmap.N)
            plot_grid_map(mvals, cmap=cmap, norm=norm)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-mean.pdf' %(figbase, fieldname))

            fig = plt.figure()
            plt.title('Profit, std deviation across years')
            norm = mpl.colors.BoundaryNorm(np.linspace(vvars.min(),vvars.max(),10), cmap.N)
            plot_grid_map(vvars, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-std.pdf' %(figbase, fieldname))


            fig = plt.figure()
            plt.title('Profit, max across years ($/Acre)')
            ymax = ma.max(vals,axis=2)
            norm = mpl.colors.BoundaryNorm(np.linspace(ymax.min(),ymax.max(),10), cmap.N)
            plot_grid_map(ymax, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-max.pdf' %(figbase, fieldname))


            fig = plt.figure()
            plt.title('Profit, min across years ($/Acre)')
            ymin = ma.min(vals,axis=2)
            norm = mpl.colors.BoundaryNorm(np.linspace(ymin.min(),ymin.max(),10), cmap.N)
            plot_grid_map(ymin, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-min.pdf' % (figbase, fieldname))


            # Now yields
            vals = ma.dstack(ylds)
            mvals = ma.mean(vals, axis=2)
            vvars = ma.std(vals, axis=2)

            fig = plt.figure()
            plt.title('Normalized Yield, mean across years')
            norm = mpl.colors.BoundaryNorm(np.linspace(mvals.min(),mvals.max(),10), cmap.N)
            plot_grid_map(mvals, cmap=cmap, norm=norm)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-mean.pdf' %(figbase, fieldname))

            fig = plt.figure()
            plt.title('Normalized Yield, std deviation across years')
            norm = mpl.colors.BoundaryNorm(np.linspace(vvars.min(),vvars.max(),10), cmap.N)
            plot_grid_map(vvars, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-std.pdf' %(figbase, fieldname))


            fig = plt.figure()
            plt.title('Normalized Yield, max across years')
            ymax = ma.max(vals,axis=2)
            norm = mpl.colors.BoundaryNorm(np.linspace(ymax.min(),ymax.max(),10), cmap.N)
            plot_grid_map(ymax, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-max.pdf' %(figbase, fieldname))


            fig = plt.figure()
            plt.title('Normalized Yield, min across years')
            ymin = ma.min(vals,axis=2)
            norm = mpl.colors.BoundaryNorm(np.linspace(ymin.min(),ymin.max(),10), cmap.N)
            plot_grid_map(ymin, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-min.pdf' % (figbase, fieldname))



            # Log the number of years of data that we have.
            fig = plt.figure()
            plt.title('Number of years of data')
            cnt = ma.count(vals,axis=2)
            norm = mpl.colors.BoundaryNorm(np.linspace(0,cnt.max(),2*(cnt.max()+1)), cmap.N)
            plot_grid_map(cnt, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-number-years-map.pdf' % (figbase, fieldname))
Beispiel #17
0
 def __init__(self, cdfPath):
     with Dataset(cdfPath, 'r') as root:
         var = root.variables
         altitude = var[self.var_altitude][:]/1000
         cameraPosGCRS = var[self.var_cameraPos][:]
         photoTime = _readDate(var[self.var_photoTime])
         
         # for three channels (RGB), each channel is stored as a
         # separate variable: img_red, img_green, img_blue
         # for grayscale, the single variable is called 'img'
         try:
             img = np.atleast_3d(var[self.var_img][:])
             img = _convertImgDtype(img)
         except:
             img_red = _convertImgDtype(var[self.var_img_red][:])
             img_green = _convertImgDtype(var[self.var_img_green][:])
             img_blue = _convertImgDtype(var[self.var_img_blue][:])
             img = ma.dstack((img_red, img_green, img_blue))
                     
         latsCenter = var[self.var_latsCenter][:]
         lonsCenter = var[self.var_lonsCenter][:]
         latBounds = var[var[self.var_latsCenter].bounds][:]
         lonBounds = var[var[self.var_lonsCenter].bounds][:]
         
         # TODO read in MLat/MLT as well if available
         
         if latsCenter.ndim == 1:
             latsCenter, lonsCenter = np.dstack(np.meshgrid(latsCenter, lonsCenter)).T
             
             assert np.all(latBounds[:-1,1] == latBounds[1:,0])
             assert np.all(lonBounds[:-1,1] == lonBounds[1:,0])
             latBounds = np.concatenate((latBounds[:,0], [latBounds[-1,1]]))
             lonBounds = np.concatenate((lonBounds[:,0], [lonBounds[-1,1]]))
             lats, lons = np.dstack(np.meshgrid(latBounds, lonBounds)).T
         else:
             lats = np.empty((latsCenter.shape[0]+1, latsCenter.shape[1]+1), latBounds.dtype)
             lons = np.empty_like(lats)
             
             for grid, bounds in [(lats, latBounds), (lons, lonBounds)]:
                 # have to use numpy's assert to handle NaN's correctly
                 assert_array_equal(bounds[:-1,:-1,2], bounds[:-1,1:,3])
                 assert_array_equal(bounds[:-1,:-1,2], bounds[1:,1:,0])
                 assert_array_equal(bounds[:-1,:-1,2], bounds[1:,:-1,1])
                 grid[:-1,:-1] = bounds[:,:,0]
                 grid[-1,:-1] = bounds[-1,:,3]
                 grid[:-1,-1] = bounds[:,-1,1]
                 grid[-1,-1] = bounds[-1,-1,2]
         
         self._latsCenter = ma.masked_invalid(latsCenter)
         self._lonsCenter = ma.masked_invalid(lonsCenter)
         self._lats = ma.masked_invalid(lats)
         self._lons = ma.masked_invalid(lons)
         self._elevation = ma.masked_invalid(90 - var[self.var_zenithAngle][:])
         
         metadata = root.__dict__ # ordered dict of all global attributes
         
         assert var[self.var_altitude].units == 'meters'
         assert var[self.var_cameraPos].units == 'kilometers'
     
     identifier = os.path.splitext(os.path.basename(cdfPath))[0]
     BaseMapping.__init__(self, altitude, cameraPosGCRS, photoTime, identifier, metadata=metadata)
     ArrayImageMixin.__init__(self, img)
Beispiel #18
0
def generate_plots(fname="/tmp/bitstead.hdf5", figbase='/tmp/figures'):
    with h5py.File("/tmp/bitstead.hdf5") as f:
        for fieldname, grp in f.iteritems():
            years = grp.keys()
            years.sort()
            print fieldname

            ylds = []
            profits = []

            for year in years:
                print '\t%s' % year
                for commodity in grp[year]:
                    print '\t\t%s' % commodity

                    data = grp[year][commodity]['yield']
                    xmin = data.attrs['xmin']
                    ymin = data.attrs['ymin']
                    xmax = data.attrs['xmax']
                    ymax = data.attrs['ymax']
                    stride = data.attrs['stride']
                    map_extents = [xmin, xmax, ymin, ymax]

                    # Map data
                    mask = data['mask'].value == 0
                    yld = ma.masked_less(
                        ma.masked_invalid(ma.masked_array(data['yield'],
                                                          mask)), 0)
                    moisture = ma.masked_array(data['moisture'], mask)
                    elevation = ma.masked_array(data['elevation'], mask)

                    # Areas
                    grid_cell_area = (stride**2) / 4046.86
                    areas = ma.masked_array(
                        np.ones(mask.shape), mask
                    ) * grid_cell_area  # Convert square meters to acres

                    # Calculating profit/loss
                    costs = get_inputs(fieldname, year, commodity,
                                       areas) * areas

                    income = yld * areas * get_market_price(
                        fieldname, year, commodity)
                    profit = income - costs
                    ppa = profit / areas

                    # Normalized yield
                    ynorm = (yld - ma.mean(yld)) / ma.std(yld)

                    # For summary info
                    ylds.append(ynorm)
                    profits.append(ppa)

                    try:

                        # Mask (for outline of field)
                        # fig = plt.figure()
                        # plt.ticklabel_format(useOffset=False, axis='y')
                        # plt.ticklabel_format(useOffset=False, axis='x')

                        # plt.imshow(mask, extent=map_extents)
                        # fig.savefig('%s/%s-%s-%s-mask.pdf' % (figbase, fieldname, year, commodity) , format='pdf')

                        # Yield Map
                        fig = plt.figure()
                        plt.title('%s %s Yield (Bu/Acre)' % (year, commodity))
                        cmap = mpl.cm.get_cmap('RdYlGn')

                        norm = mpl.colors.BoundaryNorm(
                            np.linspace(ma.min(yld), ma.max(yld), 10), cmap.N)
                        plt.gca().axis('off')
                        plot_grid_map(yld,
                                      cmap=cmap,
                                      norm=norm,
                                      interpolation='none')
                        plt.colorbar()
                        fig.savefig('%s/%s-%s-%s-yield-map.pdf' %
                                    (figbase, fieldname, year, commodity),
                                    format='pdf')

                        # Yield Histogram
                        fig = plt.figure()
                        plt.title('%s %s Yield Distribution' %
                                  (year, commodity))
                        a = np.ravel(ma.compressed(yld))
                        Y, X = np.histogram(a,
                                            10,
                                            normed=0,
                                            weights=(np.ones(a.shape) *
                                                     grid_cell_area))
                        x_span = X.max() - X.min()
                        C = [cmap(((x - X.min()) / x_span)) for x in X]
                        plt.bar(X[:-1], Y, color=C, width=X[1] - X[0])
                        plt.xlabel('Yield (Bushel/Acre)')
                        plt.ylabel('Acres')
                        fig.savefig('%s/%s-%s-%s-yield-histogram.pdf' %
                                    (figbase, fieldname, year, commodity),
                                    format='pdf')

                        # Normalized Yield Map
                        fig = plt.figure()
                        plt.title('%s %s Yield (Bu/Acre) Normalized' %
                                  (year, commodity))
                        cmap = mpl.cm.get_cmap('RdYlGn')
                        norm = mpl.colors.BoundaryNorm(np.linspace(-5, 5, 11),
                                                       cmap.N)
                        plot_grid_map(ynorm,
                                      cmap=cmap,
                                      norm=norm,
                                      interpolation='none')
                        plt.colorbar()
                        fig.savefig('%s/%s-%s-%s-yield-map-normalized.pdf' %
                                    (figbase, fieldname, year, commodity),
                                    format='pdf')

                        # Input Costs Map
                        # fig = plt.figure()
                        # plt.title('%s %s Costs ($/Acre)' % (year, commodity) )
                        # cmap = mpl.cm.get_cmap('RdYlGn')
                        # norm = mpl.colors.BoundaryNorm(np.linspace(ma.min(costs/areas),ma.max(costs/areas),10), cmap.N)
                        # plot_grid_map(costs/areas, cmap=cmap, norm=norm, interpolation='none')
                        # plt.colorbar()
                        # fig.savefig('%s/%s-%s-%s-costs-map.pdf' % (figbase, fieldname, year, commodity) , format='pdf')

                        # Profit/Loss Map
                        fig = plt.figure()
                        plt.title('%s %s Profit/Loss ($/Acre)' %
                                  (year, commodity))
                        cmap = mpl.cm.get_cmap('RdYlGn')
                        norm = mpl.colors.BoundaryNorm(
                            np.linspace(ma.min(ppa), ma.max(ppa), 10), cmap.N)
                        plot_grid_map(ppa,
                                      cmap=cmap,
                                      norm=norm,
                                      interpolation='none')
                        plt.colorbar()
                        fig.savefig('%s/%s-%s-%s-profit-map.pdf' %
                                    (figbase, fieldname, year, commodity),
                                    format='pdf')

                        # Profit/Loss Histogram
                        fig = plt.figure()
                        plt.title('%s %s Profit/Loss Distribution' %
                                  (year, commodity))
                        a = np.ravel(ma.compressed(profit / areas))
                        Y, X = np.histogram(a,
                                            10,
                                            normed=0,
                                            weights=(np.ones(a.shape) *
                                                     grid_cell_area))

                        def pmap(x):
                            if x < 0:
                                return cmap(X.min())
                            else:
                                return cmap(X.max())

                        x_span = X.max() - X.min()
                        C = [pmap(x) for x in X]
                        plt.bar(X[:-1], Y, color=C, width=X[1] - X[0])
                        plt.xlabel('Profit/Loss ($/Acre)')
                        plt.ylabel('Acres')
                        fig.savefig('%s/%s-%s-%s-profit-histogram.pdf' %
                                    (figbase, fieldname, year, commodity),
                                    format='pdf')

                        # A map distinguishing what is (or is not) profitable.
                        fig = plt.figure()
                        plot_grid_map(profit > 0, cmap=cmap)
                        plt.title('%s %s Profit or Loss' % (year, commodity))
                        fig.savefig('%s/%s-%s-%s-profit-or-loss.pdf' %
                                    (figbase, fieldname, year, commodity),
                                    format='pdf')

                    except Exception as e:
                        print e
                        print yld.shape
                        print ma.min(yld)
                        print ma.max(yld)
                        print '%s-%s' % (np.min(yld), np.max(yld))

                        print 'Could not generate maps for %s/%s/%s' % (
                            fieldname, year, commodity)
                        # raise e

            # Field summary stats

            # First, Profits.
            vals = ma.dstack(profits)
            mvals = ma.mean(vals, axis=2)
            vvars = ma.std(vals, axis=2)

            fig = plt.figure()
            plt.title('Profit, mean across years ($/Acre)')
            norm = mpl.colors.BoundaryNorm(
                np.linspace(mvals.min(), mvals.max(), 10), cmap.N)
            plot_grid_map(mvals, cmap=cmap, norm=norm)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-mean.pdf' % (figbase, fieldname))

            fig = plt.figure()
            plt.title('Profit, std deviation across years')
            norm = mpl.colors.BoundaryNorm(
                np.linspace(vvars.min(), vvars.max(), 10), cmap.N)
            plot_grid_map(vvars, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-std.pdf' % (figbase, fieldname))

            fig = plt.figure()
            plt.title('Profit, max across years ($/Acre)')
            ymax = ma.max(vals, axis=2)
            norm = mpl.colors.BoundaryNorm(
                np.linspace(ymax.min(), ymax.max(), 10), cmap.N)
            plot_grid_map(ymax, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-max.pdf' % (figbase, fieldname))

            fig = plt.figure()
            plt.title('Profit, min across years ($/Acre)')
            ymin = ma.min(vals, axis=2)
            norm = mpl.colors.BoundaryNorm(
                np.linspace(ymin.min(), ymin.max(), 10), cmap.N)
            plot_grid_map(ymin, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-profit-min.pdf' % (figbase, fieldname))

            # Now yields
            vals = ma.dstack(ylds)
            mvals = ma.mean(vals, axis=2)
            vvars = ma.std(vals, axis=2)

            fig = plt.figure()
            plt.title('Normalized Yield, mean across years')
            norm = mpl.colors.BoundaryNorm(
                np.linspace(mvals.min(), mvals.max(), 10), cmap.N)
            plot_grid_map(mvals, cmap=cmap, norm=norm)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-mean.pdf' % (figbase, fieldname))

            fig = plt.figure()
            plt.title('Normalized Yield, std deviation across years')
            norm = mpl.colors.BoundaryNorm(
                np.linspace(vvars.min(), vvars.max(), 10), cmap.N)
            plot_grid_map(vvars, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-std.pdf' % (figbase, fieldname))

            fig = plt.figure()
            plt.title('Normalized Yield, max across years')
            ymax = ma.max(vals, axis=2)
            norm = mpl.colors.BoundaryNorm(
                np.linspace(ymax.min(), ymax.max(), 10), cmap.N)
            plot_grid_map(ymax, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-max.pdf' % (figbase, fieldname))

            fig = plt.figure()
            plt.title('Normalized Yield, min across years')
            ymin = ma.min(vals, axis=2)
            norm = mpl.colors.BoundaryNorm(
                np.linspace(ymin.min(), ymin.max(), 10), cmap.N)
            plot_grid_map(ymin, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-yield-min.pdf' % (figbase, fieldname))

            # Log the number of years of data that we have.
            fig = plt.figure()
            plt.title('Number of years of data')
            cnt = ma.count(vals, axis=2)
            norm = mpl.colors.BoundaryNorm(
                np.linspace(0, cnt.max(), 2 * (cnt.max() + 1)), cmap.N)
            plot_grid_map(cnt, norm=norm, cmap=cmap)
            plt.colorbar(shrink=0.5)
            fig.savefig('%s/%s-number-years-map.pdf' % (figbase, fieldname))