Esempio n. 1
0
 def __init__(self, histo=None, ff_name=None, ff_dict=forms, weighted=False, 
             min_npts=10, normalize=True) : 
     """specify the histogram and functional form to use for initialization, or none"""
     self.fits = {}
     self.default_fit = None
     self.ff_name = ff_name
     
     if ff_name is not None : 
         self._init_ff(ff_dict[ff_name])
         
     if histo is not None : 
         self.minmax = histo.minmax
         self._index = ah.init_indexers(self.minmax)
         self.default_contrib = histo.default_contrib
         
         combos = histo.get_combos(units=False)
         self.x_min = {} 
         for c in combos : 
             self.x_min[c] = np.mean(histo.get_edges(c, units=False)[:2])
         self.default_x_min = np.mean(histo.default_edges[:2])
         
         self._fit_functional_form(histo, weighted, min_npts, normalize)
         
         seed(self)
     else: 
         self.default_contrib = {}
Esempio n. 2
0
def sparse_multiyear_histogram(years, csv_template, bahistfile, 
                            count_threshold=50, bins=25, out_template=None) : 
    """computes and optionally saves sparse histograms of MODIS BA counts"""
    # open the ba histogram file
    bahist = nc.Dataset(bahistfile)
    counts = bahist.variables['burned_total']
    
    # read all csv files and concatenate
    file_list = []
    for y in years :  
        file_list.append(pd.read_csv(csv_template % y))
    compare = pd.concat(file_list)
    compare = compare[ np.logical_and(compare.icol(0)>=10,compare.icol(0)<364) ] 
    
    # get min/max/bin from multiyear histogram file
    mmb, binsizes = read_multiyear_minmax(bahist,counts.dimensions)
        
    # create an indexer
    index = ah.init_indexers(mmb) 
    
    # strip out geometry
    dim_bins = [m[2] for m in mmb]   
    
    # create sparse histograms
    shisto_forest = ah.SparseKeyedHistogram(minmax=mmb, threshold=count_threshold,
                           bins=bins, default_minmax=(1,count_threshold,count_threshold-1))
    shisto_not_forest = ah.SparseKeyedHistogram(minmax=mmb, threshold=count_threshold,
                           bins=bins, default_minmax=(1,count_threshold,count_threshold-1))
    shisto_total = ah.SparseKeyedHistogram(minmax=mmb, threshold=count_threshold,
                           bins=bins, default_minmax=(1,count_threshold,count_threshold-1))

                           

    # loop through all bins with nonzero data
    i_nonzero = np.where( counts[:]>0 )
    for i_bin in zip(*i_nonzero) : 
        total = select_data(compare, counts.dimensions, i_bin, index, dim_bins)
        forest = total[ total.ix[:,1].isin(FOREST_LC) ]
        not_forest = total [ total.ix[:,1].isin(NONFOREST_LC) ]

        shisto_forest.put_combo(i_bin, forest['BA Count'], units=False)
        shisto_not_forest.put_combo(i_bin, not_forest["BA Count"], units=False)
        shisto_total.put_combo(i_bin, total['BA Count'], units=False)
        
    # save file if filename template specified
    if out_template is not None : 
        ah.save_sparse_histos(shisto_total, out_template%'total')
        ah.save_sparse_histos(shisto_forest, out_template%'forest')
        ah.save_sparse_histos(shisto_not_forest, out_template%'not_forest')
        
    bahist.close()
    
    return (shisto_total, shisto_forest, shisto_not_forest)