def get_feature_array(self, outputfile): ''' This method creates an array of features from the bands of this raster. ''' image_array = self.get_raster().read_data_file_as_array() ndvi_array = self.get_NDVI() ndvi_array[ndvi_array <= -1] = -1 ndvi_array[ndvi_array >= 1] = 1 red_edge_ndvi_array = self.get_red_edge_NDVI() red_edge_ndvi_array[red_edge_ndvi_array <= -1] = -1 red_edge_ndvi_array[red_edge_ndvi_array >= 1] = 1 gndvi_array = self.get_gndvi() gndvi_array[gndvi_array <= -1] = -1 gndvi_array[gndvi_array >= 1] = 1 ndre_array = self.get_ndre() ndre_array[ndre_array <= -1] = -1 ndre_array[ndre_array >= 1] = 1 sobel_filter_array = self.get_sobel_filter(sigma=2) all_features = numpy.array([ image_array[0], image_array[1], image_array[2], image_array[3], image_array[4], ndvi_array, red_edge_ndvi_array, gndvi_array, ndre_array, sobel_filter_array ]) create_raster_from_reference(outputfile, all_features, self.get_raster_file(), creating_options=['BIGTIFF=YES']) return raster.Data(outputfile)
def get_raster(self): ''' Lazily creates and returns a raster object for this bundle. ''' if self.raster is None: self.raster = raster.Data(self.file_dictionary[_BASE % (self.get_letter(), self.get_mission(), 'B1.TIF')], self.FORMAT) return self.raster
def get_raster(self): ''' Lazily creates and returns a raster object for this bundle. ''' if self.raster is None: self.raster = raster.Data(self.file_dictionary[self.get_image_file()], self.get_format_file()) return self.raster
def get_raster(self): ''' Lazily creates and returns a raster object for this bundle. ''' if self.raster is None: self.raster = raster.Data(self.get_raster_file(), self.FORMAT) return self.raster
def raster_to_vector_mask(raster_object, output_path, no_data=[0]): raster_array = raster_object.read_data_file_as_array() for i in no_data: raster_array[raster_array != i] = 0 raster_array[raster_array != 0] = 1 mask_file = create_filename(output_path, 'mask.tif') create_raster_from_reference(mask_file, raster_array, raster_object.get_file()) mask_raster = raster.Data(mask_file) ds = mask_raster._open_file() rb = ds.GetRasterBand(1) dst_layername = 'POLYGONIZED_STUFF' drv = ogr.GetDriverByName(str('ESRI Shapefile')) dst_ds = drv.CreateDataSource(output_path) dst_layer = dst_ds.CreateLayer(dst_layername, srs = None ) #dst_layer.SetSpatialRef(raster.get_spatial_reference()) gdal.Polygonize(rb, None, dst_layer, -1, [])
def test_maf_image(self): ''' Perform a maf transformation with the result of imad transformation ''' from madmex.mapper.data import raster from madmex.transformation import maf gdal_format = "GTiff" #image_imad = '/Users/erickpalacios/test_imad_pair_images/result_change_detection.tif' #image_imad = '/Users/erickpalacios/Documents/CONABIO/Tareas/1_DeteccionCambiosSpot/2_AdapterParaDeteccionDeCambios/Tarea2/res12CambiosMadTransfJulian/593_318_031210_SP5_593_318_021114_SP5_mad.tif' image_imad = '/LUSTRE/MADMEX/staging/antares_test/test_imad_pair_images/result_mad_prueba.tif' image_imad_class = raster.Data(image_imad, gdal_format) width, height, bands = image_imad_class.get_attribute( raster.DATA_SHAPE) print 'bands:' print bands geotransform = image_imad_class.get_attribute(raster.GEOTRANSFORM) projection = image_imad_class.get_attribute(raster.PROJECTION) maf_class = maf.Transformation( image_imad_class.read_data_file_as_array()) maf_class.execute() output = get_parent(image_imad) output += '/result_maf.tif' print output image_maf = image_imad_class.create_from_reference( output, width, height, bands - 1, geotransform, projection) print 'write' image_imad_class.write_raster(image_maf, maf_class.output)
def get_raster(self): ''' Lazily creates and returns a raster object for this bundle. ''' if self.raster is None: self.raster = raster.Data(self.file_dictionary[_BASE_SR % (self.get_letter(), self.get_mission(), '.hdf$')], self.FORMAT) self.raster._extract_hdf_raster_properties(FILES[0]) return self.raster
def test_create_raster_in_memory(self): ''' Create a raster in memory ''' from madmex.mapper.data import raster folder = '' image = raster.Data(folder, '') image.create_raster_in_memory()
def test_harmonize_pair_images(self): ''' Harmonize pair images based on three criteria: geographical transformation, projection and shape of the data ''' from madmex.mapper.data import harmonized from madmex.mapper.data import raster image1 = '/LUSTRE/MADMEX/eodata/rapideye/1147524/2012/2012-10-18/l3a/2012-10-18T191005_RE3_3A-NAC_11137283_149747.tif' image2 = '/LUSTRE/MADMEX/eodata/rapideye/1147524/2013/2013-09-09/l3a/1147524_2013-09-09_RE5_3A_175826.tif' gdal_format = "GTiff" image1_data_class = raster.Data(image1, gdal_format) image2_data_class = raster.Data(image2, gdal_format) harmonized_class = harmonized.Data(image1_data_class, image2_data_class) self.assertEqual(harmonized_class.get_attribute(harmonized.XRANGE), 5000) self.assertEqual( harmonized_class.get_attribute(harmonized.GEOTRANSFORM), (715500.0, 5.0, 0.0, 2040500.0, 0.0, -5.0))
def test_get_raster_properties(self): ''' This method tests the extraction of raster properties. ''' from madmex.mapper.data import raster folder = '/LUSTRE/MADMEX/eodata/rapideye/1447720/2013/2013-02-11/l3a/1447720_2013-02-11_RE3_3A_182802.tif' gdal_format = 'GTiff' data_class = raster.Data(folder, gdal_format) data_class.get_attribute(raster.GEOTRANSFORM) data_class.get_attribute(raster.FOOTPRINT) data_class.get_attribute(raster.DRIVER_METADATA) data_class.get_attribute(raster.METADATA_FILE)
def test_imad_pair_images(self): ''' Perform an imad transformation with two images ''' from madmex.transformation import imad from madmex.mapper.data import harmonized from madmex.mapper.data import raster image1 = '/LUSTRE/MADMEX/eodata/rapideye/1147524/2012/2012-10-18/l3a/2012-10-18T191005_RE3_3A-NAC_11137283_149747.tif' image2 = '/LUSTRE/MADMEX/eodata/rapideye/1147524/2013/2013-09-09/l3a/1147524_2013-09-09_RE5_3A_175826.tif' #image2 = '/LUSTRE/MADMEX/eodata/spot/556297/2010/2010-01-26/1a/556_297_260110_SP5.img' gdal_format = "GTiff" image1_data_class = raster.Data(image1, gdal_format) image2_data_class = raster.Data(image2, gdal_format) harmonized_class = harmonized.Data(image1_data_class, image2_data_class) if harmonized_class: data_shape_harmonized = harmonized_class.get_attribute( harmonized.DATA_SHAPE) width, height, bands = data_shape_harmonized geotransform_harmonized = harmonized_class.get_attribute( harmonized.GEOTRANSFORM) projection_harmonized = harmonized_class.get_attribute( harmonized.PROJECTION) image1_data_array, image2_data_array = harmonized_class.harmonized_arrays( image1_data_class, image2_data_class) imad_class = imad.Transformation( [image1_data_array, image2_data_array]) imad_class.execute() output = os.path.join(os.path.expanduser('~'), 'test_imad_pair_images') create_directory_path(output) output += '/result_mad.tif' mad_image = harmonized_class.create_from_reference( output, width, height, (bands + 1), geotransform_harmonized, projection_harmonized) harmonized_class.write_raster(mad_image, imad_class.output) print 'corrlist' print imad_class.outcorrlist
def test_calculate_ndvi(self): from madmex.util import get_parent from madmex.mapper.data import raster from madmex.processing.raster import calculate_ndvi import numpy image = "/Users/erickpalacios/Documents/CONABIO/Tareas/4_RedisenioMadmex/5_Clasificacion/rapideyemapgrid/folder_test/rapideye/1649125/2014/2014-01-23/L3A/1649125_2014-01-23_RE4_3A_301519.tif" gdal_format = "GTiff" data_class = raster.Data(image, gdal_format) array = data_class.read_data_file_as_array() width, height, bands = data_class.get_attribute(raster.DATA_SHAPE) feature_bands = numpy.zeros([2, width, height]) feature_bands[0, :, :] = calculate_ndvi(array[4, :, :], array[2, :, :]) feature_bands[1, :, :] = calculate_ndvi(array[3, :, :], array[2, :, :]) out = get_parent(image) + 'result_ndvi' raster.create_raster_tiff_from_reference(data_class.metadata, out, feature_bands)
def vector_to_raster(vector, output_path, x_size, y_size, options, data_type=gdal.GDT_Int32): ''' This method creates a raster object by burning the values of this shape file into a raster with the given resolution. ''' source_layer = vector.get_layer() x_min, x_max, y_min, y_max = source_layer.GetExtent() print source_layer.GetExtent() x_resolution = int((x_max - x_min) / x_size) y_resolution = int((y_max - y_min) / -y_size) print x_resolution, y_resolution LOGGER.info(x_min, x_max, y_min, y_max) target_ds = gdal.GetDriverByName(str('GTiff')).Create(output_path, x_resolution, y_resolution, 1, data_type) spatial_reference = vector.get_spatial_reference() target_ds.SetProjection(spatial_reference.ExportToWkt()) target_ds.SetGeoTransform((x_min, x_size, 0, y_max, 0, -y_size)) gdal.RasterizeLayer(target_ds, [1], source_layer, options=options) target_ds.FlushCache() return raster.Data(output_path)
def test_create_image_from_reference(self): ''' Test functionality on creating a raster using numerical data. ''' from madmex.mapper.data import raster image = '/LUSTRE/MADMEX/eodata/rapideye/1447720/2013/2013-02-11/l3a/1447720_2013-02-11_RE3_3A_182802.tif' gdal_format = "GTiff" data_class = raster.Data(image, gdal_format) geotransform = data_class.get_attribute(raster.GEOTRANSFORM) projection = data_class.get_attribute(raster.PROJECTION) data_shape = data_class.get_attribute(raster.DATA_SHAPE) width = data_shape[0] height = data_shape[1] number_of_bands = data_shape[2] outname = image + 'result.TIF' data_array = data_class.read_data_file_as_array() data_file = data_class.create_from_reference(outname, width, height, number_of_bands, geotransform, projection) data_class.write_raster(data_file, data_array)
def handle(self, **options): ''' In this example command, the values that come from the user input are added up and the result is printed in the screen. ''' target_tag = 'DN' start_time_all = time.time() shape_name = options['shape'][0] raster_paths = options['path'] destination = options['dest'] models = options['model'] dataframe_features = None temporary_directory = getattr(SETTINGS, 'TEMPORARY') create_directory_path(temporary_directory) # I read the training data in shape form training_shape = vector.Data(shape_name) training_dataframe = training_shape.to_dataframe() training_path = create_filename(temporary_directory, 'training_raster.tif') categories_file = create_filename(temporary_directory, 'categories.json') training_warped_path = create_filename(temporary_directory, 'training_warped_raster.tif') pixel_size = 0.000462175996292 if not is_file(training_warped_path): training_raster = vector_to_raster( training_shape, training_path, pixel_size, -pixel_size, ['ATTRIBUTE=OBJECTID', 'COMPRESS=LZW']) training_raster_warped = training_raster.reproject( training_warped_path, epgs=32617) else: training_raster_warped = raster.Data(training_warped_path) dem_file = getattr(SETTINGS, 'DEM') dem_raster = raster.Data(dem_file) print dem_raster.get_spatial_reference() print 'reproyecting raster' #dem_raster_warped = dem_raster.reproject(training_warped_path, epgs=32614) #training_raster_warped = raster.Data(training_path) aspect_file = getattr(SETTINGS, 'ASPECT') slope_file = getattr(SETTINGS, 'SLOPE') print dem_file, aspect_file, slope_file for raster_path in raster_paths: scene_bundle = rapideye.Bundle(raster_path) raster_mask = scene_bundle.get_raster() #example_path = create_filename(temporary_directory, 'mask') #create_directory_path(example_path) #raster_to_vector_mask(raster_mask, example_path) print scene_bundle.get_raster_file() basename = get_basename(scene_bundle.get_raster_file()) all_file = create_filename(temporary_directory, '%s_all_features.tif' % basename) # Do not recalculate if the file is already there. if is_file(all_file): features_raster = raster.Data(all_file) else: features_raster = scene_bundle.get_feature_array(all_file) new_df = get_dataframe_from_raster(features_raster, training_raster_warped) if new_df is not None: if dataframe_features is not None: dataframe_features = pandas.concat([ dataframe_features, get_dataframe_from_raster(features_raster, training_raster_warped) ]) else: dataframe_features = get_dataframe_from_raster( features_raster, training_raster_warped) features_size = len(list(dataframe_features)) training_set = dataframe_features.set_index(0).join( training_dataframe.set_index('OBJECTID')) print training_set training_set['target'] = pandas.Categorical.from_array( training_set[target_tag]).labels categories_array = pandas.Categorical.from_array( training_set[target_tag]).categories create_categories_file(categories_file, categories_array) training_set = training_set[training_set['target'] != -1] #features_size includes 0 that is the index of the feature training_set_array = numpy.transpose( numpy.transpose(training_set.as_matrix([range(1, features_size)]))) target_set_array = training_set.pop('target') print training_set_array.shape print target_set_array.shape X_train, X_test, y_train, y_test = train_test_split(training_set_array, target_set_array, train_size=0.8, test_size=0.2) models_directory = create_filename(temporary_directory, 'models') create_directory_path(models_directory) for model_name in models: start_time = time.time() print numpy.unique(y_train) train_model(X_train, X_test, y_train, y_test, models_directory, model_name) print "--- %s seconds training %s model---" % ( (time.time() - start_time), model_name)
def handle(self, **options): image_to_be_classified = options['image'][0] #landmask_path = options['landmask_path'][0] outlier = options['outlier'][0] folder_results = getattr(SETTINGS, 'BIG_FOLDER') shutil.copy(image_to_be_classified, folder_results) image_to_be_classified = folder_results + get_basename_of_file(image_to_be_classified) landmask_path = getattr(SETTINGS, 'LANDMASK_PATH') image_for_segmentation = '/results/' + get_basename_of_file(image_to_be_classified) LOGGER.info('Starting segmentation with: %s' % image_for_segmentation) val_t = 50 val_s = 0.7 val_c = 0.3 val_xt = 40 val_rows = 625 val_tile = True val_mp = True folder_and_bind_segmentation = getattr(SETTINGS, 'FOLDER_SEGMENTATION') folder_and_bind_license = getattr(SETTINGS, 'FOLDER_SEGMENTATION_LICENSE') folder_and_bind_image = getattr(SETTINGS, 'BIG_FOLDER_HOST') LOGGER.info('starting segmentation') command = 'run_container' hosts_from_command = get_host_from_command(command) LOGGER.info('The command to be executed is %s in the host %s' % (command, hosts_from_command[0].hostname)) remote = RemoteProcessLauncher(hosts_from_command[0]) arguments = 'docker run --rm -v ' + folder_and_bind_segmentation + ' -v ' + folder_and_bind_license + ' -v ' + folder_and_bind_image + ' madmex/segmentation python /segmentation/segment.py ' + image_for_segmentation arguments+= ' -t ' + str(val_t) + ' -s ' + str(val_s) + ' -c ' + str(val_c) + ' --tile ' + str(val_tile) + ' --mp ' + str(val_mp) + ' --xt ' + str(val_xt) + ' --rows ' + str(val_rows) remote.execute(arguments) LOGGER.info('Finished segmentation') image_segmentation_file = image_to_be_classified + '_' + str(val_t) + '_' + ''.join(str(val_s).split('.'))+ '_' + ''.join(str(val_c).split('.')) + '.tif' LOGGER.info('Starting vectorization of segmentation file: %s' % image_segmentation_file) image_segmentation_shp_file = image_segmentation_file + '.shp' vectorize_raster(image_segmentation_file, 1, image_segmentation_shp_file, 'objects', 'id') LOGGER.info('Finished vectorization: %s' % image_segmentation_shp_file) gdal_format = 'GTiff' image_to_be_classified_class = raster.Data(image_to_be_classified, gdal_format) width, height, bands = image_to_be_classified_class.get_attribute(raster.DATA_SHAPE) LOGGER.info('Identifying landmask %s' % landmask_path) bundle = _get_bundle_from_path(landmask_path) if bundle: LOGGER.info('Directory %s is a %s bundle', landmask_path, bundle.get_name()) LOGGER.info('Rasterizing vector shape') options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.DATA_SHAPE, (width,height, 1), {}) dataset_landmask_rasterized = create_raster_tiff_from_reference(image_to_be_classified_class.metadata, '', None, options_to_create) bundle.rasterize(dataset_landmask_rasterized, [1], [1]) #the rasterized process changes the dataset options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.GDAL_CREATE_OPTIONS, ['COMPRESS=LZW'], {}) image = folder_results + 'landmask_rasterized.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, image, dataset_landmask_rasterized.ReadAsArray(), options_to_create) LOGGER.info('Finished rasterizing vector shape') LOGGER.info('Polygonizing the landmask rasterized') landmask_folder = folder_results + 'landmask_from_rasterize/' layer_landmask = 'landmask' landmask_file = landmask_folder + layer_landmask + '.shp' layer_landmask = 'landmask' vectorize_raster(folder_results + 'landmask_rasterized.tif', 1, landmask_folder, layer_landmask, 'id') LOGGER.info('Folder of polygon: %s' % landmask_folder) image_segmentation_file_class = raster.Data(image_segmentation_file, gdal_format) LOGGER.info('Reading array of %s' % image_for_segmentation) array_sg_raster = image_segmentation_file_class.read_data_file_as_array() unique_labels_for_objects = numpy.unique(array_sg_raster) LOGGER.info('Calculating zonal stats for :%s' % image_to_be_classified) LOGGER.info('Reading array of %s' % image_to_be_classified) array_image_to_be_classified = image_to_be_classified_class.read_data_file_as_array() array_zonal_statistics = calculate_zonal_statistics(array_image_to_be_classified, array_sg_raster, unique_labels_for_objects) LOGGER.info('finished zonal statistics') array_zonal_statistics_labeled = append_labels_to_array(array_zonal_statistics, unique_labels_for_objects) LOGGER.info('Shape of array of zonal statistics labeled %s %s' % (array_zonal_statistics_labeled.shape[0], array_zonal_statistics_labeled.shape[1])) LOGGER.info('Building data frame') dataframe_zonal_statistics = create_names_of_dataframe_from_filename(build_dataframe_from_array(array_zonal_statistics_labeled.T), array_zonal_statistics_labeled.shape[0], get_basename_of_file(image_to_be_classified)) LOGGER.info('Filling NaN with zeros') dataframe_zonal_statistics = dataframe_zonal_statistics.fillna(0) file_name = folder_results + 'dataframe_zonal_statistics' dataframe_zonal_statistics.to_csv(file_name, sep='\t', encoding='utf-8', index = False) LOGGER.info('Working with the training data') training_data_file = getattr(SETTINGS, 'TRAINING_DATA') LOGGER.info('Clipping training_data_file: %s with: %s' % (training_data_file, landmask_file)) training_data_file_clipped = folder_results + get_basename_of_file(training_data_file) + '_cropped_subprocess_call.tif' command = [ 'gdalwarp', '-cutline', landmask_file, '-crop_to_cutline', '-of', 'GTiff','-co', 'compress=lzw', '-co', 'tiled=yes', training_data_file, training_data_file_clipped ] subprocess.call(command) LOGGER.info('Finished clipping of training data file') LOGGER.info('Starting warping of file: %s according to %s ' % (training_data_file_clipped, image_segmentation_file)) dataset_warped_training_data_file = warp_raster_from_reference(training_data_file_clipped, image_segmentation_file_class.data_file, None) LOGGER.info('Starting resizing of array of training file: %s' % training_data_file_clipped) array_resized_and_warped_training_data_file = get_array_resized_from_reference_dataset(dataset_warped_training_data_file, image_segmentation_file_class.data_file) import gdal training_data_file_resized_and_warped = folder_results + get_basename_of_file(training_data_file) + '_resized_and_warped.tif' options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.GDAL_CREATE_OPTIONS, ['TILED=YES', 'COMPRESS=LZW', 'INTERLEAVE=BAND'], {}) create_raster_tiff_from_reference(image_to_be_classified_class.metadata, training_data_file_resized_and_warped, array_resized_and_warped_training_data_file, options_to_create, data_type = gdal.GDT_Int32) LOGGER.info('Starting resampling') array_training_data_resampled = resample_numpy_array(array_resized_and_warped_training_data_file, width, height, interpolation = 'nearest') training_data_file_resampled = folder_results + get_basename_of_file(training_data_file) + '_resampled_from_resized_and_warped.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, training_data_file_resampled, array_training_data_resampled, options_to_create, data_type = gdal.GDT_Int32) LOGGER.info('Calculating zonal histograms for file: %s according to: %s' % (training_data_file_resampled, image_segmentation_file)) unique_classes = numpy.unique(array_training_data_resampled) array_of_distribution_of_classes_per_object_segmentation = calculate_zonal_histograms(array_training_data_resampled, unique_classes, array_sg_raster, unique_labels_for_objects) LOGGER.info('Shape of zonal histogram: %s %s' % (array_of_distribution_of_classes_per_object_segmentation.shape[0], array_of_distribution_of_classes_per_object_segmentation.shape[1])) array_training_data_resampled = None LOGGER.info('Getting objects that have a class of at least .75 proportion within zonal histogram') dataframe_of_objects_for_training_data = get_objects_by_relative_proportion_from_raster_as_dataframe(array_of_distribution_of_classes_per_object_segmentation, unique_labels_for_objects, unique_classes, ["id", "given"], 0.75) file_name = folder_results + 'dataframe_of_objects_for_training_data' dataframe_of_objects_for_training_data.to_csv(file_name, sep='\t', encoding='utf-8', index = False) LOGGER.info('Number of rows and columns of dataframe of pure objects of training data %s %s' % (len(dataframe_of_objects_for_training_data.index), len(dataframe_of_objects_for_training_data.columns) )) array_of_distribution_of_classes_per_object_segmentation = None dataframe_of_objects_for_training_data = None dataframe_of_objects_for_training_data = pandas.read_csv(file_name, sep='\t') LOGGER.info('Joining dataframe of dataframe zonal statistics and dataframe of objects of training data') dataframe_all_joined_classified = join_dataframes_by_column_name([dataframe_zonal_statistics, dataframe_of_objects_for_training_data], 'id') LOGGER.info('Number of rows and columns of dataframe joined %s %s' % (len(dataframe_all_joined_classified.index), len(dataframe_all_joined_classified.columns) )) if outlier == 'True': LOGGER.info('Starting outlier elimination with dataframe of zonal statistics and dataframe of pure objects of training data') LOGGER.info('Starting principal component analysis') array_reduced_pca = reduce_dimensionality(dataframe_all_joined_classified, .95, ['id', 'given']) LOGGER.info('Shape of reduced array of zonal statistics and pure objects of training data by pca: %s %s' %(array_reduced_pca.shape[0], array_reduced_pca.shape[1]) ) labels_of_objects_reduced_dataframe = dataframe_all_joined_classified['id'].values LOGGER.info('Appending labels') array_reduced_pca_labeled = append_labels_to_array(array_reduced_pca.T, labels_of_objects_reduced_dataframe) LOGGER.info('Shape of array reduced by pca and labeled: %s %s' %(array_reduced_pca_labeled.shape[0], array_reduced_pca_labeled.shape[1])) LOGGER.info('Building data frame') dataframe_reduced_pca_file = folder_results + 'dataframe_joined_for_zonal_statistics_and_pure_objects_of_training_data_reduced_by_pca' dataframe_reduced_pca = create_names_of_dataframe_from_filename(build_dataframe_from_array(array_reduced_pca_labeled.T), array_reduced_pca_labeled.shape[0], get_basename_of_file(dataframe_reduced_pca_file)) dataframe_reduced_pca.to_csv(dataframe_reduced_pca_file, sep=',', encoding='utf-8', index = False) LOGGER.info('Starting with elimination of outliers') LOGGER.info('Joining reduced dataframe by pca with object ids and dataframe of pure objects of training data') dataframe_reduced_pca_with_classes= join_dataframes_by_column_name([dataframe_reduced_pca, dataframe_of_objects_for_training_data], 'id') LOGGER.info('Number of rows and columns of dataframe joined: (%s,%s)' %(len(dataframe_reduced_pca_with_classes.index), len(dataframe_reduced_pca_with_classes.columns))) dataframe_reduced_pca_with_classes.to_csv(dataframe_reduced_pca_file + 'classes', sep = ',', encoding = 'utf8', index = False) unique_classes = numpy.unique(dataframe_of_objects_for_training_data['given'].values) object_ids_outlier_elimination = outlier_elimination_for_dataframe(dataframe_reduced_pca_with_classes, 'id', 'given', 'id', 3, unique_classes, 0.15) object_ids_outlier_elimination_file = folder_results + 'dataframe_object_ids_outlier_elimination' object_ids_outlier_elimination.to_csv(object_ids_outlier_elimination_file, sep = ',', encoding = 'utf-8', index = False) LOGGER.info('Joining all dataframes according to ids of outlier elimination ') dataframe_all_joined_classified = join_dataframes_by_column_name([object_ids_outlier_elimination, dataframe_all_joined_classified], 'id') LOGGER.info('Number of rows and columns of dataframe joined classified: (%s,%s)' %(len(dataframe_all_joined_classified.index), len(dataframe_all_joined_classified.columns))) dataframe_zonal_statistics['given'] = '?' LOGGER.info('Number of rows and columns of dataframe for classifying: (%s,%s)' %(len(dataframe_zonal_statistics.index), len(dataframe_zonal_statistics.columns))) index_of_objects_not_id_zero = dataframe_zonal_statistics['id'] > 0 dataframe_all_joined_for_classifying = dataframe_zonal_statistics[index_of_objects_not_id_zero] LOGGER.info('Number of rows and columns of dataframe for classifying after removing object with id zero: (%s,%s)' %(len(dataframe_all_joined_for_classifying.index), len(dataframe_all_joined_for_classifying.columns))) LOGGER.info('Generating data file') dataframe_all_joined_classified_file = folder_results + 'C5.data' dataframe_all_joined_classified.to_csv(dataframe_all_joined_classified_file, sep = ',', encoding = 'utf-8', index = False, header = False) LOGGER.info('Generating cases file') dataframe_all_joined_for_classifying_file = folder_results + 'C5.cases' dataframe_all_joined_for_classifying.to_csv(dataframe_all_joined_for_classifying_file, sep = ',', encoding = 'utf-8', index = False, header = False) LOGGER.info('Generating names file') unique_classes = numpy.unique(dataframe_all_joined_classified['given'].values) name_namesfile = folder_results + 'C5.names' generate_namesfile(dataframe_all_joined_classified.columns, unique_classes,name_namesfile, 'id', 'given') command = 'run_container' hosts_from_command = get_host_from_command(command) LOGGER.info('The command to be executed is %s in the host %s' % (command, hosts_from_command[0].hostname)) remote = RemoteProcessLauncher(hosts_from_command[0]) folder_and_bind_c5 = getattr(SETTINGS, 'BIG_FOLDER_HOST') arguments = 'docker run --rm -v ' + folder_and_bind_c5 + ' madmex/c5_execution ' + 'c5.0 -b -f /results/C5' LOGGER.info('Beginning C5') remote.execute(arguments) LOGGER.info('Begining predict') arguments = 'docker run --rm -v ' + folder_and_bind_c5 + ' madmex/c5_execution ' + 'predict -f /results/C5' remote = RemoteProcessLauncher(hosts_from_command[0]) output = remote.execute(arguments, True) LOGGER.info('Writing C5 result to csv') C5_result = write_C5_result_to_csv(output, folder_results) LOGGER.info('Using result of C5: %s for generating land cover shapefile and raster image' % C5_result) LOGGER.info('Using result of C5 for generating land cover shapefile and raster image') C5_result = folder_results + 'C5_result.csv' dataframe_c5_result = pandas.read_csv(C5_result) FORMAT = 'ESRI Shapefile' image_segmentation_shp_class = vector.Data(image_segmentation_shp_file, FORMAT) LOGGER.info('Joining dataframe %s to %s' %(C5_result, image_segmentation_shp_file)) dataframe_joined_shp_segmentation_and_c5_result = join_C5_dataframe_and_shape(image_segmentation_shp_class, 'id', dataframe_c5_result, 'id') LOGGER.info('Number of rows and columns of dataframe joined: (%s,%s)' %(len(dataframe_joined_shp_segmentation_and_c5_result.index), len(dataframe_joined_shp_segmentation_and_c5_result.columns))) dataframe_joined_shp_segmentation_and_c5_result_file = folder_results + 'dataframe_joined_shp_segmentation_and_c5_result.csv' LOGGER.info('Writing csv of join between c5 result and segmentation shape: %s' % dataframe_joined_shp_segmentation_and_c5_result_file) dataframe_joined_shp_segmentation_and_c5_result.to_csv(dataframe_joined_shp_segmentation_and_c5_result_file, sep =',', encoding = 'utf8', index = False) LOGGER.info('Writing C5 result joined with segmentation shape to shapefile') segmentation_and_c5_result_file_vectorized_folder = folder_results + 'segmentation_and_c5_result_vectorized/' create_directory_path(segmentation_and_c5_result_file_vectorized_folder) sql = "SELECT a.id, a.predicted, a.confidence, st_geomfromtext(a.geom," + image_segmentation_shp_class.srid+ ") as geometry " sql+= "from dataframe_joined_shp_segmentation_and_c5_result a" shp_result = segmentation_and_c5_result_file_vectorized_folder + '/C5_result_joined_segmentation_shape.shp' command = [ 'ogr2ogr', shp_result, dataframe_joined_shp_segmentation_and_c5_result_file, '-dialect', 'sqlite', '-sql', sql ] subprocess.call(command) LOGGER.info('Rasterizing segmentation and c5 result shape of folder %s' % segmentation_and_c5_result_file_vectorized_folder) LOGGER.info('Identifying segmentation and c5 shape folder %s' % segmentation_and_c5_result_file_vectorized_folder) bundle = _get_bundle_from_path(segmentation_and_c5_result_file_vectorized_folder) if bundle: LOGGER.info('Directory %s is a %s bundle', segmentation_and_c5_result_file_vectorized_folder, bundle.get_name()) LOGGER.info('Rasterizing vector shape to get land cover tif') options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.DATA_SHAPE, (width,height, 1), {}) dataset_shape_sg_and_c5_rasterized = create_raster_tiff_from_reference(image_to_be_classified_class.metadata, '', None, options_to_create) bundle.rasterize(dataset_shape_sg_and_c5_rasterized, [1], None, ["ATTRIBUTE=predicted" ]) #the rasterized process changes the dataset options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.GDAL_CREATE_OPTIONS, ['COMPRESS=LZW'], {}) image =folder_results + 'madmex_lcc_prueba.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, image, dataset_shape_sg_and_c5_rasterized.ReadAsArray(), options_to_create, data_type = gdal.GDT_Int32) LOGGER.info('Finished rasterizing vector shape') LOGGER.info('Rasterizing vector shape to get confidence tif') bundle.rasterize(dataset_shape_sg_and_c5_rasterized, [1], None, ["ATTRIBUTE=confidence" ]) image =folder_results + 'madmex_lcc_confidence_prueba.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, image, dataset_shape_sg_and_c5_rasterized.ReadAsArray(), options_to_create) LOGGER.info('Finished rasterizing vector shape') LOGGER.info('Finished workflow classification :)')
def handle(self, **options): mapgrid = '1449619' acq = get_pair_quality(mapgrid) for image in acq: print image.pk_id print image.pk_id id = options["id"][0] image_path = options["image"][0] reference_path = options["reference"][0] output = options["output"][0] print image_path print reference_path image_bundle = _get_bundle_from_path(image_path) reference_bundle = _get_bundle_from_path(reference_path) #extents = harmonize_images([image_bundle.get_raster(), reference_bundle.get_raster()]) #print extents #print extents['x_offset'] #print extents['y_offset'] shape = image_bundle.get_raster().get_attribute(raster.DATA_SHAPE) invariant_array = numpy.full((shape[0], shape[1]), INV_MASK_VALUE, dtype=np.int) in1 = reference_bundle.get_raster_file() in2 = image_bundle.get_raster_file() in_invar = create_filename(output, 'invariantPixelMask.tif') result = create_filename(output, 'crosscorrelation_next.tif') to_polar = create_filename(output, 'crosscorrelation_polar.tif') create_raster_from_reference(in_invar, invariant_array, image_bundle.get_raster_file(), gdal.GDT_Byte) local = LocalProcessLauncher() volume = '%s:%s' % (output, output) shell_array = [ 'docker', 'run', '--rm', '-v', volume, 'madmex/antares', 'correlation', '-in1', in1, '-in2', in2, '-in_invar', in_invar, '-val_invar', '%s' % INV_MASK_VALUE, '-out', result, '-window_size', '%s' % WINDOW_SIZE, '-max_gap', '%s' % MAX_GAP ] shell_string = ' '.join(shell_array) print shell_string if not is_file(result): log = local.execute(shell_string) crosscorrelation = raster.Data(result, 'GTiff') print crosscorrelation.get_attribute(raster.PROJECTION) print crosscorrelation.get_attribute(raster.GEOTRANSFORM) #tile_map(result, result) correlation_array = crosscorrelation.read_data_file_as_array() band_0 = correlation_array[0, :] band_1 = correlation_array[1, :] phi_band = phi(band_0, band_1) rho_band = rho(band_0, band_1) correlation_array[0, :] = phi_band correlation_array[1, :] = rho_band #create_raster_from_reference(to_polar, correlation_array, result) crosscorrelation_polar = raster.Data(to_polar, 'GTiff') extents = harmonize_images( [crosscorrelation_polar, reference_bundle.get_raster()]) x_offset = extents['x_offset'][1] y_offset = extents['y_offset'][1] x_tile_size = extents['x_range'] y_tile_size = extents['y_range'] aux_name = create_filename(output, 'auxiliar.tif') tile_map(reference_bundle.get_raster_file(), aux_name, x_tile_size, y_tile_size, x_offset, y_offset) aux_array = raster.Data(aux_name, 'GTiff').read_data_file_as_array() crosscorrelation_polar_array = crosscorrelation_polar.read_data_file_as_array( ) stats = calculate_statistics_qa(crosscorrelation_polar_array, aux_array, STAT_CLASSES, STAT_MIN, STAT_MAX, THRESHOLD_COD, THRESHOLD_LOG) desision = calculate_decision(stats['band_1']['histogram'], stats['band_1']['histogram_bins']) print stats quality = QualityAssessment( decision=desision, max=adapt_numpy_float(stats['band_1']['maximum']), min=adapt_numpy_float(stats['band_1']['minimum']), median=adapt_numpy_float(stats['band_1']['median']), mean=adapt_numpy_float(stats['band_1']['mean']), standard_deviation=adapt_numpy_float(stats['band_1']['std']), product_id=1, reference_id=2) persist_quality(quality) print desision
def open_handle(filename): data = raster.Data(filename, GTIFF) data_array = data.read_data_file_as_array() data.close() return data_array
def handle(self, **options): ''' This process will call the change detection process from a set of two individual images. It will perform the harmonization and the multivariate alteration detection on the images. It will then perform a maximum correlation factor on them and work with the resulting bands. ''' image_a = options['ima'][0] image_b = options['imb'][0] output_image = options['output'][0] LOGGER.info('Image %s will be compared against image %s. Output will be available' \ ' at %s.', image_a, image_b, output_image) gdal_format = "GTiff" image_a_data_class = raster.Data(image_a, gdal_format) image_b_data_class = raster.Data(image_b, gdal_format) # TODO : remove references to class harmonized harmonized_class = harmonized.Data(image_a_data_class, image_b_data_class) #band1 = image_a_data_class.GetRasterBand(1) #band1 = band1.ReadAsArray(0, 0,image_a_data_class.RasterXSize,image_a_data_class.RasterYSize).astype(float) #print(band1) if harmonized_class: #data_shape_harmonized = harmonized_class.get_attribute(harmonized.DATA_SHAPE) #width, height, bands = data_shape_harmonized #geotransform_harmonized = harmonized_class.get_attribute(raster.GEOTRANSFORM) #projection_harmonized = harmonized_class.get_attribute(harmonized.PROJECTION) image_a_data_array, image_b_data_array = harmonized_class.harmonized_arrays(image_a_data_class, image_b_data_class) imad_class = imad.Transformation([image_a_data_array, image_b_data_array]) imad_class.execute() mad_result = imad_class.output LOGGER.debug('mad_result.shape: %s', mad_result.shape) create_directory_path(getattr(SETTINGS, 'TEST_FOLDER')) mad_output_file = create_filename(getattr(SETTINGS, 'TEST_FOLDER'), 'mad.tif') create_raster_from_reference(mad_output_file, mad_result,image_a) maf_class = maf.Transformation(imad_class.output) maf_class.execute() maf_result = maf_class.output pdf_file = create_filename(getattr(SETTINGS, 'TEST_FOLDER'), 'maf_pdf.png') thresholds = calc_threshold_grid(maf_result, pdf_file) class_result = recode_classes_grid(maf_result, thresholds) LOGGER.debug('maf_result.shape: %s', maf_result.shape) LOGGER.debug('class_result.shape: %s', class_result.shape) maf_outputfile = create_filename(getattr(SETTINGS, 'TEST_FOLDER'), 'maf.tif') class_outputfile = create_filename(getattr(SETTINGS, 'TEST_FOLDER'), 'class.tif') create_raster_from_reference(maf_outputfile, maf_result, image_a) create_raster_from_reference(class_outputfile, class_result, image_a) print 'Output written in: %s' % mad_output_file print 'Shape is ', imad_class.output.shape
def handle(self, **options): ''' In this example command, the values that come from the user input are added up and the result is printed in the screen. ''' sum_of_numbers = 0 #path_a = options['a'][0] #path_b = options['b'][0] tiles = ["086W_20N","086W_21N","087W_17N","087W_18N","087W_19N","087W_20N","087W_21N","088W_16N","088W_17N","088W_18N","088W_19N","088W_20N","088W_21N","089W_15N","089W_16N","089W_17N","089W_18N","089W_19N","089W_20N","089W_21N","090W_14N","090W_15N","090W_16N","090W_17N","090W_18N","090W_19N","090W_20N","090W_21N","091W_14N","091W_15N","091W_16N","091W_17N","091W_18N","091W_19N","092W_14N","092W_15N","092W_16N","092W_17N","092W_18N","093W_15N","093W_16N","093W_17N","093W_18N","094W_16N","094W_17N","094W_18N","095W_15N","095W_16N","095W_17N","095W_18N","095W_19N","096W_15N","096W_16N","096W_17N","096W_18N","096W_19N","096W_20N","097W_15N","097W_16N","097W_17N","097W_18N","097W_19N","097W_20N","097W_21N","097W_22N","097W_23N","097W_24N","097W_25N","097W_26N","097W_27N","098W_16N","098W_17N","098W_18N","098W_19N","098W_20N","098W_21N","098W_22N","098W_23N","098W_24N","098W_25N","098W_26N","098W_27N","098W_28N","099W_16N","099W_17N","099W_18N","099W_19N","099W_20N","099W_21N","099W_22N","099W_23N","099W_24N","099W_25N","099W_26N","099W_27N","099W_28N","099W_29N","099W_30N","100W_16N","100W_17N","100W_18N","100W_19N","100W_20N","100W_21N","100W_22N","100W_23N","100W_24N","100W_25N","100W_26N","100W_27N","100W_28N","100W_29N","100W_30N","101W_17N","101W_18N","101W_19N","101W_20N","101W_21N","101W_22N","101W_23N","101W_24N","101W_25N","101W_26N","101W_27N","101W_28N","101W_29N","101W_30N","102W_17N","102W_18N","102W_19N","102W_20N","102W_21N","102W_22N","102W_23N","102W_24N","102W_25N","102W_26N","102W_27N","102W_28N","102W_29N","102W_30N","103W_18N","103W_19N","103W_20N","103W_21N","103W_22N","103W_23N","103W_24N","103W_25N","103W_26N","103W_27N","103W_28N","103W_29N","103W_30N","103W_31N","104W_18N","104W_19N","104W_20N","104W_21N","104W_22N","104W_23N","104W_24N","104W_25N","104W_26N","104W_27N","104W_28N","104W_29N","104W_30N","104W_31N","105W_19N","105W_20N","105W_21N","105W_22N","105W_23N","105W_24N","105W_25N","105W_26N","105W_27N","105W_28N","105W_29N","105W_30N","105W_31N","105W_32N","106W_21N","106W_22N","106W_23N","106W_24N","106W_25N","106W_26N","106W_27N","106W_28N","106W_29N","106W_30N","106W_31N","106W_32N","107W_23N","107W_24N","107W_25N","107W_26N","107W_27N","107W_28N","107W_29N","107W_30N","107W_31N","107W_32N","108W_24N","108W_25N","108W_26N","108W_27N","108W_28N","108W_29N","108W_30N","108W_31N","108W_32N","109W_22N","109W_23N","109W_24N","109W_25N","109W_26N","109W_27N","109W_28N","109W_29N","109W_30N","109W_31N","109W_32N","110W_22N","110W_23N","110W_24N","110W_25N","110W_27N","110W_28N","110W_29N","110W_30N","110W_31N","110W_32N","111W_24N","111W_25N","111W_26N","111W_27N","111W_28N","111W_29N","111W_30N","111W_31N","111W_32N","112W_24N","112W_25N","112W_26N","112W_27N","112W_28N","112W_29N","112W_30N","112W_31N","112W_32N","113W_26N","113W_27N","113W_28N","113W_29N","113W_30N","113W_31N","113W_32N","113W_33N","114W_26N","114W_27N","114W_28N","114W_29N","114W_30N","114W_31N","114W_32N","114W_33N","115W_27N","115W_28N","115W_29N","115W_30N","115W_31N","115W_32N","115W_33N","116W_30N","116W_31N","116W_32N","116W_33N","117W_32N","117W_33N"] print len(tiles) path_1985 = '/Users/agutierrez/Dropbox/Multivariado/classification/classification_1985.tif' path_2009 = '/Users/agutierrez/Dropbox/Multivariado/classification/classification_2009.tif' path_change = '/Users/agutierrez/Dropbox/Multivariado/change.tif' path_change_1985 = '/Users/agutierrez/Dropbox/Multivariado/classification/change_1985.tif' path_change_2009 = '/Users/agutierrez/Dropbox/Multivariado/classification/change_2009.tif' gdal_format = 'GTiff' image_1985 = raster.Data(path_1985, gdal_format) image_2009 = raster.Data(path_2009, gdal_format) image_change = raster.Data(path_change, gdal_format) array_1985 = image_1985.read_data_file_as_array() array_2009 = image_2009.read_data_file_as_array() array_change = image_change.read_data_file_as_array() print array_1985.shape print array_2009.shape print numpy.unique(array_1985, return_counts=True)[0] print numpy.unique(array_1985, return_counts=True)[1] * 30 * 30 * 0.0001 print numpy.unique(array_1985[array_change > .5], return_counts=True) print numpy.unique(array_2009[array_change > .5], return_counts=True) array_1985_masked = array_1985[array_change > .5] array_2009_masked = array_2009[array_change > .5] counts ={'1.0->2.0':0, '1.0->3.0':0, '1.0->4.0':0, '1.0->5.0':0, '2.0->1.0':0, '2.0->3.0':0, '2.0->4.0':0, '2.0->5.0':0, '3.0->1.0':0, '3.0->2.0':0, '3.0->4.0':0, '3.0->5.0':0, '4.0->1.0':0, '4.0->2.0':0, '4.0->3.0':0, '4.0->5.0':0, '5.0->1.0':0, '5.0->2.0':0, '5.0->3.0':0, '5.0->4.0':0} for i in range(len(array_1985_masked)): if not array_1985_masked[i] == array_2009_masked[i]: counts['%s->%s' % (array_1985_masked[i], array_2009_masked[i])] = counts['%s->%s' % (array_1985_masked[i], array_2009_masked[i])] + 30 * 30 * 0.0001 import json print json.dumps(counts, indent=1) array_1985[array_change < .5] = 0 array_2009[array_change < .5] = 0 create_raster_from_reference(path_change_1985, array_1985, path_1985, data_type=gdal.GDT_Byte) create_raster_from_reference(path_change_2009, array_2009, path_1985, data_type=gdal.GDT_Byte) ''' init = 1985 gdal_format = 'GTiff' for tile in tiles: for i in range(30): path_a = '/LUSTRE/MADMEX/tasks/2016_tasks/matt_hansen_forests/tiles/%s/%s_%s.tif' % (tile, tile, init+i) path_b = '/LUSTRE/MADMEX/tasks/2016_tasks/matt_hansen_forests/tiles/%s/%s_%s.tif' % (tile, tile, init+i+1) path_c = '/LUSTRE/MADMEX/tasks/2016_tasks/matt_hansen_forests/tiles_example/%s/%s_%s_%s.tif' % (tile, tile, init+i,init+i+1) directory = '/LUSTRE/MADMEX/tasks/2016_tasks/matt_hansen_forests/tiles_example/%s/' % tile print path_a print path_b print path_c #remove_file(path_c) create_directory_path(directory) if not check_if_file_exists(path_c): image_a = raster.Data(path_a, gdal_format) image_b = raster.Data(path_b, gdal_format) array_a = image_a.read_data_file_as_array() array_b = image_b.read_data_file_as_array() #mask = ((array_a < 30) | (70 < array_a)) & ((array_b < 30) | (70 < array_b)) #array_a[array_a <= 30] = 0 #array_a[30 < array_a] = 1 #array_b[array_b <= 30] = 0 #array_b[30 < array_b] = 1 #diff = array_b - array_a diff = numpy.zeros(numpy.shape(array_a)) upper = 30 lower = 10 diff[(array_b < lower) & (array_a > upper)] = 1 diff[(array_b > upper) & (array_a < lower)] = 2 #diff[mask] = -9999 create_raster_from_reference(path_c, diff, path_a, data_type=gdal.GDT_Byte) else: print 'File exists.' print 'Done %s-%s' % (init+i,init+i) ''' ''' total = {} for state in ['Campeche','Chiapas','Oaxaca','Quintana_Roo','Yucatan']: total_state = {} for where in ['anps', 'corr-anp', 'est-corr-anp']: raster_path = '/LUSTRE/MADMEX/staging/2017_tasks/corredor_biologico/analisis_por_estado-vegetacion/shapes_area_interes_por_estado/%s/%s_%s.tif' % (state, state, where) print raster_path image = raster.Data(raster_path, gdal_format) array = image.read_data_file_as_array() array[array > 0] = 1 counts = numpy.unique(array, return_counts=True) print counts total_state[where] = counts[1][1] total[state] = total_state print total init = 1985 for state in ['Campeche','Chiapas','Oaxaca','Quintana_Roo','Yucatan']: statistics_by_state = [] for where in ['anps', 'corr-anp', 'est-corr-anp']: for i in range(30): mask_path = '/LUSTRE/MADMEX/staging/2017_tasks/corredor_biologico/analisis_por_estado-vegetacion/shapes_area_interes_por_estado/%s/%s_%s.tif' % (state, state, where) mask_image = raster.Data(mask_path, gdal_format) mask_array = mask_image.read_data_file_as_array() path_c = '/LUSTRE/MADMEX/staging/2017_tasks/corredor_biologico/analisis_por_estado-vegetacion/corte_por_area_interes_en_estado/diff-michael-repro/Mexico_TCC_%s_%s_%s_%s.tif' % (state, init+i,init+i+1,where) print path_c image_c = raster.Data(path_c, gdal_format) array_c = image_c.read_data_file_as_array() #array_c = array_c[array_c > -9999] counts = numpy.unique(array_c, return_counts=True) print counts #size_non_zero = len(array_c) #final = numpy.zeros(size_non_zero, dtype=numpy.int) #final[(-40 < array_c) & (array_c < 40)] = 0 #final[40 < array_c] = 1 #final[array_c < -40] = 2 x_resolution = image_c.get_geotransform()[1] y_resolution = image_c.get_geotransform()[5] area = x_resolution * y_resolution stats = {} stats['period'] = '%s-%s' % (init+i,init+i+1) stats['type'] = where stats['resolution'] = area if len(counts[1]) > 2: stats['negative'] = counts[1][0] stats['no-change'] = counts[1][1] stats['positive'] = counts[1][2] else: stats['negative'] = 0 stats['no-change'] = counts[1][0] stats['positive'] = 0 #stats['net'] = (counts[1][1] - counts[1][2]) stats['total'] = total[state][where] statistics_by_state.append(stats) print statistics_by_state stats_path = '/LUSTRE/MADMEX/staging/2017_tasks/corredor_biologico/analisis_por_estado-vegetacion/shapes_area_interes_por_estado/stats/estadisticas-michael-repro-%s.json' % state import json with open(stats_path, 'w') as outfile: json.dump(statistics_by_state, outfile) '''