示例#1
0
 def __init__(self):
     """ Virtually private constructor. """
     self.base_dir = dirfuncs.guess_data_dir()
     self.island_data_table = {}
     if Imagery_Cache.__instance != None:
         raise Exception("This class is a singleton!")
     else:
         Imagery_Cache.__instance = self
示例#2
0
    # 'app_jambi',  # d
    # 'app_kalbar',
    # 'app_kaltim',

    #'app_oki',  # c 'mukti_prakarsa',
    'app_riau',
    'multipersada_gatramegah',  #'musim_mas',  # 'unggul_lestari',

    # e
    'gar_pgm',
    'PTAgroAndalan',
    'Bumitama_PTGemilangMakmurSubur',
]
my_sampler = sampler.Sampler()
base_dir = dirfuncs.guess_data_dir()
band_set = {
    0: {
        'blue_max', 'red_max', 'nir_max', 'swir1_max', 'VH_0', 'VH', 'VH_2',
        'VV_0', 'VV', 'VV_2', 'EVI', 'swir2_max', 'brightness', 'wetness',
        'greenness', 'slope'
    },
    1: {
        'blue_max', 'red_max', 'nir_max', 'swir1_max', 'VH_0', 'VH', 'VV_0',
        'VV', 'VV_2', 'EVI', 'swir2_max', 'wetness', 'greenness', 'slope'
    },
    2: {
        'blue_max', 'red_max', 'nir_max', 'swir1_max', 'VH_0', 'VH', 'VV_0',
        'VV', 'VV_2', 'brightness', 'swir2_max', 'wetness', 'greenness',
        'slope'
    },
示例#3
0
    # plt.figure()
    # destination.plot(robust=True)
    #  plt.show()

    image2 = False
    #image3=False
    return image3


if __name__ == "__main__":
    wbt = WhiteboxTools()
    wbt.set_verbose_mode(True)
    name = 'Kalimantan'
    file1 = "KalimantanFINAL_classified_by_ensemble_rf.tif"
    year = 2017
    base_dir = dir.guess_data_dir()
    wbt.work_dir = os.path.join(base_dir, name, 'sklearn_test', str(year),
                                'test_temp', 'wkdir')
    # wbt.majority_filter(file, "Kalimantan2018_Final_smooth3x3.tif", filterx=3, filtery=3)
    #
    base = os.path.join(base_dir, name, 'sklearn_test', str(year), 'test_temp',
                        'Kalimantan2017_Final_100m.tif')
    out1 = "resample2017.tif"
    out2 = "round2017.tif"
    wbt.resample(file1, out1, "cc", base=base)
    wbt.round(out1, out2)

    wbt.majority_filter(out2,
                        "RES_RND_SMTH_Kalimantan2017_Final3x3.tif",
                        filterx=3,
                        filtery=3)
示例#4
0
import sqlite3
import pandas as pd
import dirfuncs
conn = sqlite3.connect('data/hcs_database.db')
#conn = sqlite3.connect('hcs_database.db')

##TODO this should be moved to hcs_database.py
base_dir = dirfuncs.guess_data_dir()
concessions_csv = base_dir + 'concession_inventory.csv'
concession_df = pd.read_csv(concessions_csv)
assessment_year_dict = dict(zip(concession_df.app_key, concession_df.use_year))

leaf_nodes_dict = {
    'Bumitama_PTDamaiAgroSejahtera': 10,
    'Bumitama_PTHungarindoPersada': 6,
    'PTMitraNusaSarana': 10,
    'makmur_abadi': 10,
    'sawit_perdana': 6,
    'aneka_sawit': 10,
    'PTMentariPratama': 6,
    'PTSukajadiSawitMekar': 10,
    'PTLabontaraEkaKarsa': 10,
    'adi_perkasa': 8
}
features_dict = {
    'Bumitama_PTDamaiAgroSejahtera': 0.33,
    'Bumitama_PTHungarindoPersada': 0.33,
    'PTMitraNusaSarana': 0.65,
    'makmur_abadi': 0.33,
    'sawit_perdana': 0.8,
    'aneka_sawit': 0.65,
示例#5
0
def ingest_kml_fixed_classes():
    #input = 'C:\\Users\\ME\\Dropbox\\HCSproject\\data\\PoC\\supplementary_class\\impervious\\doc.kml'
    imageSumatra = image_cache.get_band_by_context_year('nir_max', 'Sumatra', 2019)
    imageKalimantan = image_cache.get_band_by_context_year('nir_max', 'Kalimantan', 2019)
    imagePapua = image_cache.get_band_by_context_year('nir_max', 'Papua', 2019)

    for landcover in supplemental_class_codes.keys():
        print(landcover)
        input = os.path.join(dirfuncs.guess_data_dir(), 'supplementary_class', landcover,'doc.kml')
        srcDS = gdal.OpenEx(input)
        output = os.path.join(dirfuncs.guess_data_dir(), 'supplementary_class', landcover, landcover + '.json')
        #ds = gdal.VectorTranslate(output, srcDS, format='GeoJSON')
        #file = glob.glob(output)
        print(output)
        with open(output) as f:
                data = f.read()
                #TODO on windows you may get a parsing error that does not make sense but it has to do with EOL characters
                jsonload = js.loads(data)
                features = jsonload["features"]
                print(features)

        # NOTE: buffer(0) is a trick for fixing scenarios where polygons have overlapping coordinates
        #temp = GeometryCollection([shape(feature["geometry"]).buffer(0) for feature in features])

        #TODO get the year from the json or doc.kml

        #my_dict = sat_ops.s1_band_dict.copy()
        my_dict = sat_ops.l8_band_dict.copy()
        #my_dict.update(sat_ops.s2_band_dict)
        #my_dict.update(sat_ops.dem_band_dict)
        bands = my_dict.values()
        print(bands)



        shapes = ((shape(feature["geometry"]).buffer(0), (feature['properties']['Description']),
                   feature['properties']['Name']) for feature in features)
        for geom in shapes:
            print(geom)
            get_reference = True
            xmin, ymin, xmax, ymax = geom[0].bounds
            year_list = geom[1].split(sep=',')
            year_list = map(int, year_list)
            year_list = list(year_list)
            year_list.sort(reverse=True)
            my_year = year_list[0]
            if my_year == 2018: my_year=2019
            if my_year == 2016: my_year=2017 # TODO these only appliy because we are just using landsat 8 data aggregated in 2 year chuncks
            name = geom[2]
            for band in bands:
                if get_reference:
                    bbox = box(xmin, ymin, xmax, ymax)
                    geo = gpd.GeoDataFrame({'geometry': bbox}, index=[0], crs=from_epsg(4326))
                    geo = geo.to_crs(crs=from_epsg(4326))
                    coords = getFeatures(geo)
                    if 'kal' in name:
                        out_img = imageKalimantan.rio.clip(coords, imageKalimantan.rio.crs)
                        island = 'Kalimantan'
                    elif 'sum' in name:
                        out_img = imageSumatra.rio.clip(coords, imageSumatra.rio.crs)
                        island = 'Sumatra'
                    elif 'pap' in name:
                        out_img = imagePapua.rio.clip(coords, imagePapua.rio.crs)
                        island = 'Papua'
                    else:
                        raise RuntimeError
                    # meta = out_img.meta.copy()
                    print(island)
                    trans = out_img.transform
                    crs = out_img.rio.crs
                    height = out_img.rio.height
                    width = out_img.rio.width
                    dtype = rio.int16
                    # burned = rioft.rasterize(shapes=geom, fill=0)
                    out_class = os.path.join(dirfuncs.guess_data_dir(), 'supplementary_class', landcover, name + '.tif')
                    with rio.open(out_class, 'w+', driver='GTiff',
                                  height=height, width=width,
                                  crs=crs, dtype=dtype, transform=trans, count=1) as out:
                        out_arr = out.read(1)
                        burned = rioft.rasterize(shapes=[(geom[0], supplemental_class_codes[landcover])], fill=-9999,
                                                 out=out_arr, transform=out.transform)
                        burned = np.where(burned != supplemental_class_codes[landcover], -9999,
                                          burned)  # NoData the other pixels
                        out.write_band(1, burned)
                    out.close()
                    get_reference=False

                out_band = os.path.join(dirfuncs.guess_data_dir(), 'supplementary_class', landcover, 'out',
                                        name + '_' + band + '.tif')
                if os.path.isfile(out_band): continue # this is a hack because I have hit errors in the middle of long batch jobs and had find way to restart

                image = image_cache.get_band_by_context_year(band, island, my_year)
                out_img = image.rio.clip(coords, image.rio.crs)
                if out_img.dtype == 'float64':
                    out_img.data = np.float32(out_img)
                dtype = rio.float32


                print('Writing:  ', out_band)
                with rio.open(out_band, 'w+', driver='GTiff',
                              height=height, width=width,
                              crs=crs, dtype=dtype, transform=trans, count=1) as out2:
                    out2.write_band(1, out_img[0])
                out2.close()
示例#6
0
"""
Running regression to explore how different parameterizations affect accuracy
across all analyses in a summary table.
"""
# =============================================================================
# Imports
# =============================================================================
import dirfuncs
import pandas as pd
import statsmodels.formula.api as smf
dropbox_dir = dirfuncs.guess_data_dir()

# =============================================================================
# Load data
# =============================================================================
data_dir = dropbox_dir
result_csv = data_dir + 'result.12252019.csv'
result_df = pd.read_csv(result_csv)

# =============================================================================
# Run regression
# =============================================================================
mod = smf.ols(formula="two_class_score_weighted ~ bands + class_scheme",
              data=result_df)
res = mod.fit()
print(res.summary())
示例#7
0
# Parameters
#================================================================================
#A shape file - study area
lon_edge=2
# lon_end=119
# lat_start = -5
# lat_end = 5
#lat_edge = 2.5 #PAPUA
lat_edge = 2
#site = 'Kalimantan'
start = 1
end = 50
years= [#2017,2015,
     2019]
site = 'None'
out_path = dirfuncs.guess_data_dir()
#Take a set of years
#Take a set of bands
# take a step

# for each grid cell

    # for each band
def get_grid_polygons(lon_start, lon_end, lat_start,lat_end):
    print(lon_start, lon_end, lat_start,lat_end)
    polys = [];
    lon = lon_start
    cell_id=0
    while lon < lon_end :
      x1 = lon;
      x2 = lon + lon_edge;