示例#1
0
def calc_mask(series, periods, pctfile, land_dim='land'):
    """Calculates and returns a 1D mask representative of the 2D box found in pctfile.
    
    The returned 1D mask contains True when the pixel is included in the ROI,
    False when the pixel is excluded from the ROI. For consistency, this code 
    uses the same mask calculation function used by the burned_area module."""
    # read the bounding box if recorded in the file.
    pct_ds = nc.Dataset(pctfile)
    if not ('max_lat' in pct_ds.ncattrs()):
        pct_ds.close()
        return None
    geog_box = (pct_ds.min_lon, pct_ds.max_lon, pct_ds.min_lat, pct_ds.max_lat)

    pct_ds.close()

    ds = series.get_dataset(periods.first())
    ca = trend.CompressedAxes(ds, land_dim)

    mask_1d = oi.calc_geog_mask(ca, ds, geog_box)

    return (mask_1d, geog_box)
示例#2
0
def calc_mask(series, periods, pctfile, land_dim='land') :
    """Calculates and returns a 1D mask representative of the 2D box found in pctfile.
    
    The returned 1D mask contains True when the pixel is included in the ROI,
    False when the pixel is excluded from the ROI. For consistency, this code 
    uses the same mask calculation function used by the burned_area module."""
    # read the bounding box if recorded in the file. 
    pct_ds = nc.Dataset(pctfile)
    if not ('max_lat' in pct_ds.ncattrs()) :
        pct_ds.close()
        return None
    geog_box =  (pct_ds.min_lon, pct_ds.max_lon, 
                 pct_ds.min_lat, pct_ds.max_lat)
    
    pct_ds.close()
    
    ds = series.get_dataset(periods.first())
    ca = trend.CompressedAxes(ds, land_dim)
    
    mask_1d = oi.calc_geog_mask(ca, ds, geog_box)
        
    return (mask_1d, geog_box)
示例#3
0
def ba_multifile_histograms(ba_files, ind_files, indices_names,minmax, 
                             day_range=None, geog_box=None) : 
    """calculates combined index-oriented and MODIS BA oriented histograms
    
    The user can specify a day of year range and geography box to limit the 
    data. Geography box is specified as a tuple: (lon_min, lon_max, lat_min,
    lat_max). 
    
    Computes and returns nine histograms using the minmax description 
    provided. Five histograms involve only the indices, which are assumed 
    to be computed at a coarse resolution such as 0.5 deg by 0.5 deg. 
    These histograms are computed with a uniform weight of 1 for every 
    occurrence. One represents all observed combinations of indices,
    the other represents all combinations of indices observed to contain 
    some level of burning. From these, unburned area for each combination
    of indices can be derived.

    The remaining four histograms represent high resolution burned area data
    aggregated to the coarse resolution grid. It is assumed that landcover 
    information is only available for the high resolution data. Separate
    histograms are calculated for groups of landcover codes representing
    forest, non-forest, and "other". These histograms, as well as a "total"
    histogram, are weighted by the burned area observed to occur at each 
    combination of indices. These four histograms represent only burned
    area, and do not contain information from which unburned area may be 
    derived.
    """
    one_day = len(ind_files[0].dimensions['land'])

    # these count 0.5 x 0.5 degree cells
    occurrence = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int32)
    burned_occurrence = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int32)
    burned_forest_occ = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int32)
    burned_not_forest_occ = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int32)
    burned_other_occ      = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int32)

    # these four count individual modis detections
    burned_forest = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64) 
    burned_not_forest = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64)
    burned_other = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64)
    burned_total = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64)

    ca = trend.CompressedAxes(ind_files[0], 'land')    
    
    # convert the box into a mask where pixels are True if included.
    if geog_box is not None : 
        geog_mask = oi.calc_geog_mask(ca, ba_files[0], geog_box)
    else : 
        geog_mask = np.ones( (one_day,), dtype=np.bool)

    # initialize the IndexManager for this index file
    manager = oi.IndexManager(indices_names, geog_mask)

    for i_year in range(len(ind_files)) : 
        # fetch the correct file handles for this year
        indfile = ind_files[i_year]
        bafile  = ba_files[i_year]
        
        # get BA handle and initialize an object to aggregate BA by
        # landcover type
        count   = bafile.variables['count']
        lc_edges = landcover_classification(bafile.variables['landcover'][:])
        lc_type = rv.CutpointReduceVar(count.shape[:-1], 2, lc_edges)
        
        
        # get number of samples along the time dimension
        timelim = len(indfile.dimensions['days'])-1
        timerange = range(1,timelim)
        if day_range is not None : 
            timerange = range(day_range.start, day_range.stop)
                
        for i_day in timerange : 
            print i_year, i_day
                        
            # grab one day's worth of data 
            ba_day = count[...,i_day]
            
            # aggregate the data
            ba_forest = lc_type.sum(0,ba_day)
            ba_nonforest = lc_type.sum(1,ba_day)
            ba_other     = lc_type.sum(2,ba_day)
            
            # compress the aggregated data into the 1D land array
            ba_forest_cmp = ca.compress(ba_forest)
            ba_nonforest_cmp = ca.compress(ba_nonforest)
            ba_other_cmp = ca.compress(ba_other)
            
            # get the index values from the file, 
            # as well as an array which selects out only pixels
            # having valid data.
            land_data, records = manager.get_indices_vector(indfile, i_day)
            records = records[land_data]
                        
            occurrence.put_batch(records)
            burned_weight= np.zeros( (np.count_nonzero(land_data),))
            
            # for each of the histograms which count only burned area, 
            # extract those records with nonzero burned area and 
            # submit them as a batch to the relevant histogram.
            ba = ba_forest_cmp[land_data]
            if np.count_nonzero(ba) > 0 : 
                idx = np.where( ba != 0)
                rec = records[idx,:].squeeze(axis=(0,))
                burned_forest.put_batch(rec, weights=ba[idx])
                burned_forest_occ.put_batch(rec)
                burned_weight += ba
            
            ba = ba_nonforest_cmp[land_data]
            if np.count_nonzero(ba) > 0 : 
                idx = np.where( ba != 0)
                rec = records[idx,:].squeeze(axis=(0,))
                burned_not_forest.put_batch(rec, weights=ba[idx])
                burned_not_forest_occ.put_batch(rec)
                burned_weight += ba
            
            ba = ba_other_cmp[land_data]
            if np.count_nonzero(ba) > 0 : 
                idx = np.where( ba != 0)
                rec = records[idx,:].squeeze(axis=(0,))
                burned_other.put_batch(rec, weights=ba[idx])
                burned_other_occ.put_batch(rec)
                burned_weight += ba
            
            ba = burned_weight
            if np.count_nonzero(ba) > 0 : 
                idx = np.where( ba != 0)
                rec = records[idx,:].squeeze(axis=(0,))
                burned_total.put_batch(rec, weights=ba[idx])
                burned_occurrence.put_batch(rec)
                
    return (occurrence, burned_occurrence, 
             burned_forest, burned_forest_occ, 
             burned_not_forest, burned_not_forest_occ,
             burned_other, burned_other_occ, burned_total)
示例#4
0
def ba_multifile_histograms(ba_files,
                            ind_files,
                            indices_names,
                            minmax,
                            day_range=None,
                            geog_box=None):
    """calculates combined index-oriented and MODIS BA oriented histograms
    
    The user can specify a day of year range and geography box to limit the 
    data. Geography box is specified as a tuple: (lon_min, lon_max, lat_min,
    lat_max). 
    
    Computes and returns nine histograms using the minmax description 
    provided. Five histograms involve only the indices, which are assumed 
    to be computed at a coarse resolution such as 0.5 deg by 0.5 deg. 
    These histograms are computed with a uniform weight of 1 for every 
    occurrence. One represents all observed combinations of indices,
    the other represents all combinations of indices observed to contain 
    some level of burning. From these, unburned area for each combination
    of indices can be derived.

    The remaining four histograms represent high resolution burned area data
    aggregated to the coarse resolution grid. It is assumed that landcover 
    information is only available for the high resolution data. Separate
    histograms are calculated for groups of landcover codes representing
    forest, non-forest, and "other". These histograms, as well as a "total"
    histogram, are weighted by the burned area observed to occur at each 
    combination of indices. These four histograms represent only burned
    area, and do not contain information from which unburned area may be 
    derived.
    """
    one_day = len(ind_files[0].dimensions['land'])

    # these count 0.5 x 0.5 degree cells
    occurrence = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int32)
    burned_occurrence = ah.AccumulatingHistogramdd(minmax=minmax,
                                                   dtype=np.int32)
    burned_forest_occ = ah.AccumulatingHistogramdd(minmax=minmax,
                                                   dtype=np.int32)
    burned_not_forest_occ = ah.AccumulatingHistogramdd(minmax=minmax,
                                                       dtype=np.int32)
    burned_other_occ = ah.AccumulatingHistogramdd(minmax=minmax,
                                                  dtype=np.int32)

    # these four count individual modis detections
    burned_forest = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64)
    burned_not_forest = ah.AccumulatingHistogramdd(minmax=minmax,
                                                   dtype=np.int64)
    burned_other = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64)
    burned_total = ah.AccumulatingHistogramdd(minmax=minmax, dtype=np.int64)

    ca = trend.CompressedAxes(ind_files[0], 'land')

    # convert the box into a mask where pixels are True if included.
    if geog_box is not None:
        geog_mask = oi.calc_geog_mask(ca, ba_files[0], geog_box)
    else:
        geog_mask = np.ones((one_day, ), dtype=np.bool)

    # initialize the IndexManager for this index file
    manager = oi.IndexManager(indices_names, geog_mask)

    for i_year in range(len(ind_files)):
        # fetch the correct file handles for this year
        indfile = ind_files[i_year]
        bafile = ba_files[i_year]

        # get BA handle and initialize an object to aggregate BA by
        # landcover type
        count = bafile.variables['count']
        lc_edges = landcover_classification(bafile.variables['landcover'][:])
        lc_type = rv.CutpointReduceVar(count.shape[:-1], 2, lc_edges)

        # get number of samples along the time dimension
        timelim = len(indfile.dimensions['days']) - 1
        timerange = range(1, timelim)
        if day_range is not None:
            timerange = range(day_range.start, day_range.stop)

        for i_day in timerange:
            print i_year, i_day

            # grab one day's worth of data
            ba_day = count[..., i_day]

            # aggregate the data
            ba_forest = lc_type.sum(0, ba_day)
            ba_nonforest = lc_type.sum(1, ba_day)
            ba_other = lc_type.sum(2, ba_day)

            # compress the aggregated data into the 1D land array
            ba_forest_cmp = ca.compress(ba_forest)
            ba_nonforest_cmp = ca.compress(ba_nonforest)
            ba_other_cmp = ca.compress(ba_other)

            # get the index values from the file,
            # as well as an array which selects out only pixels
            # having valid data.
            land_data, records = manager.get_indices_vector(indfile, i_day)
            records = records[land_data]

            occurrence.put_batch(records)
            burned_weight = np.zeros((np.count_nonzero(land_data), ))

            # for each of the histograms which count only burned area,
            # extract those records with nonzero burned area and
            # submit them as a batch to the relevant histogram.
            ba = ba_forest_cmp[land_data]
            if np.count_nonzero(ba) > 0:
                idx = np.where(ba != 0)
                rec = records[idx, :].squeeze(axis=(0, ))
                burned_forest.put_batch(rec, weights=ba[idx])
                burned_forest_occ.put_batch(rec)
                burned_weight += ba

            ba = ba_nonforest_cmp[land_data]
            if np.count_nonzero(ba) > 0:
                idx = np.where(ba != 0)
                rec = records[idx, :].squeeze(axis=(0, ))
                burned_not_forest.put_batch(rec, weights=ba[idx])
                burned_not_forest_occ.put_batch(rec)
                burned_weight += ba

            ba = ba_other_cmp[land_data]
            if np.count_nonzero(ba) > 0:
                idx = np.where(ba != 0)
                rec = records[idx, :].squeeze(axis=(0, ))
                burned_other.put_batch(rec, weights=ba[idx])
                burned_other_occ.put_batch(rec)
                burned_weight += ba

            ba = burned_weight
            if np.count_nonzero(ba) > 0:
                idx = np.where(ba != 0)
                rec = records[idx, :].squeeze(axis=(0, ))
                burned_total.put_batch(rec, weights=ba[idx])
                burned_occurrence.put_batch(rec)

    return (occurrence, burned_occurrence, burned_forest, burned_forest_occ,
            burned_not_forest, burned_not_forest_occ, burned_other,
            burned_other_occ, burned_total)