def get_livelihoods(out):
    # s2_03: Main livelihoods
    liv = ee.FeatureCollection("users/geflanddegradation/toolbox_datasets/livelihoodzones")

    livImage = liv.filter(ee.Filter.neq('lztype_num', None)).reduceToImage(properties=['lztype_num'], reducer=ee.Reducer.first()).unmask(0)

    liv_fields = ["No Data", "Agro-Forestry", "Agro-Pastoral", "Arid", "Crops - Floodzone", "Crops - Irrigated", "Crops - Rainfed", "Fishery", "Forest-Based", "National Park", "Other", "Pastoral", "Urban"]
    # multiply pixel area by the area which experienced each of the five transitions --> output: area in ha
    livelihoodareas = livImage.eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) \
            .rename(liv_fields).multiply(ee.Image.pixelArea().divide(10000)).reduceRegions(aoi, ee.Reducer.sum(), 250)
    out['livelihoods'] = get_fc_properties(livelihoodareas, normalize=True, scaling=100)
    # Handle the case of polygons outside of the area of coverage of the livelihood 
    # zones data
    if out['livelihoods']['No Data'] < 10:
        # If there is less than 10 percent no data, then ignore the no data by 
        # eliminating that category, and normalizing all the remaining categories 
        # to sum to 100
        out['livelihoods'].pop('No Data')
        denominator = sum(out['livelihoods'].values())
        out['livelihoods'] = {key: value / denominator * 100 for key, value in out['livelihoods'].iteritems()}
    else:
        # if more than 10% of the area is no data, then return zero for all 
        # categories
        out['livelihoods'].pop('No Data')
        out['livelihoods'] = {key: 0. for key, value in out['livelihoods'].iteritems()}
def get_carbon_emissions_tons_co2e(out):
    # Get annual emissions and sum them across all years
    emissions = get_fc_properties(
        areas.reduceRegions(collection=aoi, reducer=ee.Reducer.sum(),
                            scale=30),
        normalize=False,
        filter_regex='carbon_emissions_tons_co2e_[0-9]*')
    out['carbon_emissions_tons_co2e'] = sum(emissions.values())
def get_forest_areas(out):
    forest_areas = get_fc_properties(areas.reduceRegions(
        collection=aoi, reducer=ee.Reducer.sum(), scale=30),
                                     normalize=False,
                                     filter_regex='forest_cover_[0-9]*')
    out['forest_area_hectares_2001'] = forest_areas['forest_cover_2001']
    out['forest_area_hectares_2015'] = forest_areas['forest_cover_2015']
    out['forest_area_percent_2001'] = forest_areas[
        'forest_cover_2001'] / area_hectares * 100
    out['forest_area_percents_2015'] = forest_areas[
        'forest_cover_2015'] / area_hectares * 100
Example #4
0
def get_forest_loss(out):
    # Minimun tree cover to be considered a forest
    tree_cover = 30
    year_start = 2001
    year_end = 2015

    # Import Hansen global forest dataset
    hansen = ee.Image('UMD/hansen/global_forest_change_2017_v1_5')

    # define forest cover at the starting date
    fc_loss = hansen.select('treecover2000').gte(tree_cover) \
            .And(hansen.select('lossyear').gt(year_start - 2000)) \
        .And(hansen.select('lossyear').lte(year_end - 2000))

    # compute pixel areas in hectareas
    areas = fc_loss.multiply(ee.Image.pixelArea().divide(10000))
    forest_loss = get_fc_properties(areas.reduceRegions(
        collection=aoi, reducer=ee.Reducer.sum(), scale=scale),
                                    normalize=False)
    out['forest_loss'] = forest_loss['sum']
def get_prod_agriculture(out):
    prod_agriculture = te_prod.updateMask(te_land.eq(33)).eq([-32768,-1,0,1]).rename(prod_fields).multiply(ee.Image.pixelArea().divide(10000)).reduceRegions(aoi, ee.Reducer.sum())
    out['prod_agriculture'] = get_fc_properties(prod_agriculture, normalize=True, scaling=100)
def get_area_soc(out):
    # s3_04: soc degradation classes
    te_socc_deg = ee.Image("users/geflanddegradation/global_ld_analysis/r20180821_soc_globe_2001-2015_deg").select("soc_deg")
    soc_areas = te_socc_deg.eq([-32768,-1,0,1]).rename(["no data", "degraded", "stable", "improved"]).multiply(ee.Image.pixelArea().divide(10000)).reduceRegions(aoi, ee.Reducer.sum())
    out['area_soc'] = get_fc_properties(soc_areas, normalize=True, scaling=100)
def get_area_lc(out):
    # s3_03: Land cover degradation classes
    te_land = ee.Image("users/geflanddegradation/global_ld_analysis/r20180821_lc_traj_globe_2001-2001_to_2015")
    lc_areas = te_land.select("lc_dg").eq([-32768, -1,0,1]).rename(["nodata", "degraded", "stable", "improved"]).multiply(ee.Image.pixelArea().divide(10000)).reduceRegions(aoi, ee.Reducer.sum())
    out['area_lc'] = get_fc_properties(lc_areas, normalize=True, scaling=100)
def get_area_prod(out):
    # s3_02: Productivity degradation classes
    te_prod = ee.Image("users/geflanddegradation/global_ld_analysis/r20180821_lp7cl_globe_2001_2015_modis").remap([-32768,1,2,3,4,5,6,7],[-32768,-1,-1,0,0,0,1,1])
    prod_areas = te_prod.eq([-32768,-1,0,1]).rename(["nodata", "degraded", "stable", "improved"]).multiply(ee.Image.pixelArea().divide(10000)).reduceRegions(aoi, ee.Reducer.sum())
    out['area_prod'] = get_fc_properties(prod_areas, normalize=True, scaling=100)
def get_lc_transitions(out):
    # multiply pixel area by the area which experienced each of the lc 
    # transition classes --> output: area in ha
    lc_transitions = te_land.select("lc_tr").eq([11,12,13,14,15,16,17,21,22,23,24,25,26,26,31,32,33,34,35,36,37,41,42,43,44,45,46,47,51,52,53,54,55,56,57,
                  61,62,63,64,65,66,67,71,72,73,74,75,76,77]).rename(lc_tr_fields).multiply(ee.Image.pixelArea().divide(10000)).reduceRegions(aoi, ee.Reducer.sum())
    out['lc_transition_hectares'] = get_fc_properties(lc_transitions, normalize=False)
def get_lc_2015(out):
    out['lc_2015'] = get_fc_properties(lc_target, normalize=True, scaling=100)
def get_lc_2001(out):
    out['lc_2001'] = get_fc_properties(lc_baseline, normalize=True, scaling=100)