def test_rasterize_vector_and_write_to_disk(self): from madmex.mapper.data.raster import new_options_for_create_raster_from_reference, create_raster_tiff_from_reference from madmex.core.controller.commands import get_bundle_from_path from madmex.mapper.data import raster BUNDLE_PACKAGE = 'madmex.mapper.bundle' landmask_path = '/Users/erickpalacios/Documents/CONABIO/MADMEXdata/eodata/footprints/country_mexico/' bundle = get_bundle_from_path(landmask_path, '../../../mapper', BUNDLE_PACKAGE) extents_dictionary = { u'x_range': 7521.0, u'y_range': 7741.0, u'properties': { u'projection': 'PROJCS["UTM Zone 15, Northern Hemisphere",GEOGCS["Unknown datum based upon the WGS 84 ellipsoid",DATUM["Not specified (based on WGS 84 spheroid)",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]', u'geotransform': (523185.0, 30.0, 0.0, 2033715.0, 0.0, -30.0) }, u'x_offset': 'array([ 10., 30., 0., 50.])', u'y_offset': 'array([-0., -0., -0., -0.])' } options_to_create = new_options_for_create_raster_from_reference( extents_dictionary, raster.DATA_SHAPE, (int(extents_dictionary['x_range']), int(extents_dictionary['y_range']), 1), {}) image = '/Users/erickpalacios/Documents/CONABIO/Tareas/Redisenio_MADMEX/clasificacion_landsat/landsat8/classification/rasterize2.tif' #The next line create a tiff with empty array data = create_raster_tiff_from_reference(extents_dictionary, image, None, options_to_create) bundle.rasterize(data, [1], [1]) new_options_for_create_raster_from_reference(extents_dictionary, raster.DATASET, data, options_to_create) create_raster_tiff_from_reference(extents_dictionary, image, data.ReadAsArray()) #created in disk
def test_create_raster_in_memory_and_then_write_to_disk(self): from madmex.mapper.data.raster import new_options_for_create_raster_from_reference, create_raster_tiff_from_reference from madmex.mapper.data import raster extents_dictionary = { u'x_range': 7521.0, u'y_range': 7741.0, u'properties': { u'projection': 'PROJCS["UTM Zone 15, Northern Hemisphere",GEOGCS["Unknown datum based upon the WGS 84 ellipsoid",DATUM["Not specified (based on WGS 84 spheroid)",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]', u'geotransform': (523185.0, 30.0, 0.0, 2033715.0, 0.0, -30.0) }, u'x_offset': 'array([ 10., 30., 0., 50.])', u'y_offset': 'array([-0., -0., -0., -0.])' } options_to_create = new_options_for_create_raster_from_reference( extents_dictionary, raster.DATA_SHAPE, (int(extents_dictionary['x_range']), int(extents_dictionary['y_range']), 1), {}) data = create_raster_tiff_from_reference( extents_dictionary, '', None, options_to_create) #created in memory new_options_for_create_raster_from_reference(extents_dictionary, raster.DATASET, data, options_to_create) image = '/Users/erickpalacios/Documents/CONABIO/Tareas/Redisenio_MADMEX/clasificacion_landsat/landsat8/classification/raster_test_created.tif' create_raster_tiff_from_reference(extents_dictionary, image, data.ReadAsArray()) #created in disk
def handle(self, **options): ''' In this example command, the values that come from the user input are added up and the result is printed in the screen. ''' paths = options['paths'] print paths tiles = options['tiles'] name = ''.join(options['name']) from madmex.mapper.bundle.rapideye import Bundle if tiles: for tile in tiles: sensor_id = 1 product_id = 2 new_paths = get_images_for_tile(int(tile), sensor_id, product_id) reference_array = create_reference_array(new_paths) bundle = Bundle(new_paths[0]) re_raster_metadata = bundle.get_raster().metadata create_raster_tiff_from_reference(re_raster_metadata, '%s.tif' % tile, reference_array) if paths: new_paths = map(util.get_parent, paths) bundle = Bundle(new_paths[0]) re_raster_metadata = bundle.get_raster().metadata create_raster_tiff_from_reference(re_raster_metadata, '%s.tif' % name, reference_array)
def test_create_raster_in_memory_and_then_write_to_disk(self): from madmex.mapper.data.raster import new_options_for_create_raster_from_reference, create_raster_tiff_from_reference from madmex.mapper.data import raster extents_dictionary = {u'x_range': 7521.0, u'y_range': 7741.0, u'properties': {u'projection': 'PROJCS["UTM Zone 15, Northern Hemisphere",GEOGCS["Unknown datum based upon the WGS 84 ellipsoid",DATUM["Not specified (based on WGS 84 spheroid)",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]', u'geotransform': (523185.0, 30.0, 0.0, 2033715.0, 0.0, -30.0)}, u'x_offset': 'array([ 10., 30., 0., 50.])', u'y_offset': 'array([-0., -0., -0., -0.])'} options_to_create = new_options_for_create_raster_from_reference(extents_dictionary, raster.DATA_SHAPE, (int(extents_dictionary['x_range']), int(extents_dictionary['y_range']), 1), {}) data = create_raster_tiff_from_reference(extents_dictionary, '', None, options_to_create) #created in memory new_options_for_create_raster_from_reference(extents_dictionary, raster.DATASET, data, options_to_create) image = '/Users/erickpalacios/Documents/CONABIO/Tareas/Redisenio_MADMEX/clasificacion_landsat/landsat8/classification/raster_test_created.tif' create_raster_tiff_from_reference(extents_dictionary, image, data.ReadAsArray()) #created in disk
def handle(self, **options): ''' In this example command, the values that come from the user input are added up and the result is printed in the screen. ''' paths = options['paths'] print paths tiles = options['tiles'] name = ''.join(options['name']) from madmex.mapper.bundle.rapideye import Bundle if tiles: for tile in tiles: sensor_id = 1 product_id = 2 new_paths = get_images_for_tile(int(tile), sensor_id, product_id) reference_array = create_reference_array(new_paths) bundle = Bundle(new_paths[0]) re_raster_metadata = bundle.get_raster().metadata create_raster_tiff_from_reference(re_raster_metadata, '%s.tif' % tile, reference_array) if paths: new_paths = map(util.get_parent, paths) reference_array = numpy.sort( create_stacked_array_rapideye(new_paths), axis=0) medians = numpy.empty( (reference_array.shape[1], reference_array.shape[2], reference_array.shape[3])) limit = reference_array.shape[1] / 5 for band in range(reference_array.shape[1]): array_band = numpy.ma.array( reference_array[:, band, :, :], mask=reference_array[:, band, :, :] == 0) medians[band] = numpy.ma.median(array_band, axis=0) bundle = Bundle(new_paths[0]) re_raster_metadata = bundle.get_raster().metadata create_raster_tiff_from_reference(re_raster_metadata, '%s_max.tif' % name, reference_array[0]) create_raster_tiff_from_reference(re_raster_metadata, '%s_mean.tif' % name, reference_array[8]) create_raster_tiff_from_reference(re_raster_metadata, '%s_median.tif' % name, medians) create_raster_tiff_from_reference( re_raster_metadata, '%s_min.tif' % name, reference_array[reference_array.shape[0] - 1])
def test_rasterize_vector_and_write_to_disk(self): from madmex.mapper.data.raster import new_options_for_create_raster_from_reference, create_raster_tiff_from_reference from madmex.core.controller.commands import get_bundle_from_path from madmex.mapper.data import raster BUNDLE_PACKAGE = 'madmex.mapper.bundle' landmask_path = '/Users/erickpalacios/Documents/CONABIO/MADMEXdata/eodata/footprints/country_mexico/' bundle = get_bundle_from_path(landmask_path, '../../../mapper', BUNDLE_PACKAGE) extents_dictionary = {u'x_range': 7521.0, u'y_range': 7741.0, u'properties': {u'projection': 'PROJCS["UTM Zone 15, Northern Hemisphere",GEOGCS["Unknown datum based upon the WGS 84 ellipsoid",DATUM["Not specified (based on WGS 84 spheroid)",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]', u'geotransform': (523185.0, 30.0, 0.0, 2033715.0, 0.0, -30.0)}, u'x_offset': 'array([ 10., 30., 0., 50.])', u'y_offset': 'array([-0., -0., -0., -0.])'} options_to_create = new_options_for_create_raster_from_reference(extents_dictionary, raster.DATA_SHAPE, (int(extents_dictionary['x_range']), int(extents_dictionary['y_range']), 1), {}) image = '/Users/erickpalacios/Documents/CONABIO/Tareas/Redisenio_MADMEX/clasificacion_landsat/landsat8/classification/rasterize2.tif' #The next line create a tiff with empty array data = create_raster_tiff_from_reference(extents_dictionary, image, None, options_to_create) bundle.rasterize(data, [1], [1]) new_options_for_create_raster_from_reference(extents_dictionary, raster.DATASET, data, options_to_create) create_raster_tiff_from_reference(extents_dictionary, image, data.ReadAsArray()) #created in disk
def test_calculate_ndvi(self): from madmex.util import get_parent from madmex.mapper.data import raster from madmex.processing.raster import calculate_ndvi import numpy image = "/Users/erickpalacios/Documents/CONABIO/Tareas/4_RedisenioMadmex/5_Clasificacion/rapideyemapgrid/folder_test/rapideye/1649125/2014/2014-01-23/L3A/1649125_2014-01-23_RE4_3A_301519.tif" gdal_format = "GTiff" data_class = raster.Data(image, gdal_format) array = data_class.read_data_file_as_array() width, height, bands = data_class.get_attribute(raster.DATA_SHAPE) feature_bands = numpy.zeros([2, width, height]) feature_bands[0, :, :] = calculate_ndvi(array[4, :, :], array[2, :, :]) feature_bands[1, :, :] = calculate_ndvi(array[3, :, :], array[2, :, :]) out = get_parent(image) + 'result_ndvi' raster.create_raster_tiff_from_reference(data_class.metadata, out, feature_bands)
def handle(self, **options): image_to_be_classified = options['image'][0] #landmask_path = options['landmask_path'][0] outlier = options['outlier'][0] folder_results = getattr(SETTINGS, 'BIG_FOLDER') shutil.copy(image_to_be_classified, folder_results) image_to_be_classified = folder_results + get_basename_of_file(image_to_be_classified) landmask_path = getattr(SETTINGS, 'LANDMASK_PATH') image_for_segmentation = '/results/' + get_basename_of_file(image_to_be_classified) LOGGER.info('Starting segmentation with: %s' % image_for_segmentation) val_t = 50 val_s = 0.7 val_c = 0.3 val_xt = 40 val_rows = 625 val_tile = True val_mp = True folder_and_bind_segmentation = getattr(SETTINGS, 'FOLDER_SEGMENTATION') folder_and_bind_license = getattr(SETTINGS, 'FOLDER_SEGMENTATION_LICENSE') folder_and_bind_image = getattr(SETTINGS, 'BIG_FOLDER_HOST') LOGGER.info('starting segmentation') command = 'run_container' hosts_from_command = get_host_from_command(command) LOGGER.info('The command to be executed is %s in the host %s' % (command, hosts_from_command[0].hostname)) remote = RemoteProcessLauncher(hosts_from_command[0]) arguments = 'docker run --rm -v ' + folder_and_bind_segmentation + ' -v ' + folder_and_bind_license + ' -v ' + folder_and_bind_image + ' madmex/segmentation python /segmentation/segment.py ' + image_for_segmentation arguments+= ' -t ' + str(val_t) + ' -s ' + str(val_s) + ' -c ' + str(val_c) + ' --tile ' + str(val_tile) + ' --mp ' + str(val_mp) + ' --xt ' + str(val_xt) + ' --rows ' + str(val_rows) remote.execute(arguments) LOGGER.info('Finished segmentation') image_segmentation_file = image_to_be_classified + '_' + str(val_t) + '_' + ''.join(str(val_s).split('.'))+ '_' + ''.join(str(val_c).split('.')) + '.tif' LOGGER.info('Starting vectorization of segmentation file: %s' % image_segmentation_file) image_segmentation_shp_file = image_segmentation_file + '.shp' vectorize_raster(image_segmentation_file, 1, image_segmentation_shp_file, 'objects', 'id') LOGGER.info('Finished vectorization: %s' % image_segmentation_shp_file) gdal_format = 'GTiff' image_to_be_classified_class = raster.Data(image_to_be_classified, gdal_format) width, height, bands = image_to_be_classified_class.get_attribute(raster.DATA_SHAPE) LOGGER.info('Identifying landmask %s' % landmask_path) bundle = _get_bundle_from_path(landmask_path) if bundle: LOGGER.info('Directory %s is a %s bundle', landmask_path, bundle.get_name()) LOGGER.info('Rasterizing vector shape') options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.DATA_SHAPE, (width,height, 1), {}) dataset_landmask_rasterized = create_raster_tiff_from_reference(image_to_be_classified_class.metadata, '', None, options_to_create) bundle.rasterize(dataset_landmask_rasterized, [1], [1]) #the rasterized process changes the dataset options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.GDAL_CREATE_OPTIONS, ['COMPRESS=LZW'], {}) image = folder_results + 'landmask_rasterized.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, image, dataset_landmask_rasterized.ReadAsArray(), options_to_create) LOGGER.info('Finished rasterizing vector shape') LOGGER.info('Polygonizing the landmask rasterized') landmask_folder = folder_results + 'landmask_from_rasterize/' layer_landmask = 'landmask' landmask_file = landmask_folder + layer_landmask + '.shp' layer_landmask = 'landmask' vectorize_raster(folder_results + 'landmask_rasterized.tif', 1, landmask_folder, layer_landmask, 'id') LOGGER.info('Folder of polygon: %s' % landmask_folder) image_segmentation_file_class = raster.Data(image_segmentation_file, gdal_format) LOGGER.info('Reading array of %s' % image_for_segmentation) array_sg_raster = image_segmentation_file_class.read_data_file_as_array() unique_labels_for_objects = numpy.unique(array_sg_raster) LOGGER.info('Calculating zonal stats for :%s' % image_to_be_classified) LOGGER.info('Reading array of %s' % image_to_be_classified) array_image_to_be_classified = image_to_be_classified_class.read_data_file_as_array() array_zonal_statistics = calculate_zonal_statistics(array_image_to_be_classified, array_sg_raster, unique_labels_for_objects) LOGGER.info('finished zonal statistics') array_zonal_statistics_labeled = append_labels_to_array(array_zonal_statistics, unique_labels_for_objects) LOGGER.info('Shape of array of zonal statistics labeled %s %s' % (array_zonal_statistics_labeled.shape[0], array_zonal_statistics_labeled.shape[1])) LOGGER.info('Building data frame') dataframe_zonal_statistics = create_names_of_dataframe_from_filename(build_dataframe_from_array(array_zonal_statistics_labeled.T), array_zonal_statistics_labeled.shape[0], get_basename_of_file(image_to_be_classified)) LOGGER.info('Filling NaN with zeros') dataframe_zonal_statistics = dataframe_zonal_statistics.fillna(0) file_name = folder_results + 'dataframe_zonal_statistics' dataframe_zonal_statistics.to_csv(file_name, sep='\t', encoding='utf-8', index = False) LOGGER.info('Working with the training data') training_data_file = getattr(SETTINGS, 'TRAINING_DATA') LOGGER.info('Clipping training_data_file: %s with: %s' % (training_data_file, landmask_file)) training_data_file_clipped = folder_results + get_basename_of_file(training_data_file) + '_cropped_subprocess_call.tif' command = [ 'gdalwarp', '-cutline', landmask_file, '-crop_to_cutline', '-of', 'GTiff','-co', 'compress=lzw', '-co', 'tiled=yes', training_data_file, training_data_file_clipped ] subprocess.call(command) LOGGER.info('Finished clipping of training data file') LOGGER.info('Starting warping of file: %s according to %s ' % (training_data_file_clipped, image_segmentation_file)) dataset_warped_training_data_file = warp_raster_from_reference(training_data_file_clipped, image_segmentation_file_class.data_file, None) LOGGER.info('Starting resizing of array of training file: %s' % training_data_file_clipped) array_resized_and_warped_training_data_file = get_array_resized_from_reference_dataset(dataset_warped_training_data_file, image_segmentation_file_class.data_file) import gdal training_data_file_resized_and_warped = folder_results + get_basename_of_file(training_data_file) + '_resized_and_warped.tif' options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.GDAL_CREATE_OPTIONS, ['TILED=YES', 'COMPRESS=LZW', 'INTERLEAVE=BAND'], {}) create_raster_tiff_from_reference(image_to_be_classified_class.metadata, training_data_file_resized_and_warped, array_resized_and_warped_training_data_file, options_to_create, data_type = gdal.GDT_Int32) LOGGER.info('Starting resampling') array_training_data_resampled = resample_numpy_array(array_resized_and_warped_training_data_file, width, height, interpolation = 'nearest') training_data_file_resampled = folder_results + get_basename_of_file(training_data_file) + '_resampled_from_resized_and_warped.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, training_data_file_resampled, array_training_data_resampled, options_to_create, data_type = gdal.GDT_Int32) LOGGER.info('Calculating zonal histograms for file: %s according to: %s' % (training_data_file_resampled, image_segmentation_file)) unique_classes = numpy.unique(array_training_data_resampled) array_of_distribution_of_classes_per_object_segmentation = calculate_zonal_histograms(array_training_data_resampled, unique_classes, array_sg_raster, unique_labels_for_objects) LOGGER.info('Shape of zonal histogram: %s %s' % (array_of_distribution_of_classes_per_object_segmentation.shape[0], array_of_distribution_of_classes_per_object_segmentation.shape[1])) array_training_data_resampled = None LOGGER.info('Getting objects that have a class of at least .75 proportion within zonal histogram') dataframe_of_objects_for_training_data = get_objects_by_relative_proportion_from_raster_as_dataframe(array_of_distribution_of_classes_per_object_segmentation, unique_labels_for_objects, unique_classes, ["id", "given"], 0.75) file_name = folder_results + 'dataframe_of_objects_for_training_data' dataframe_of_objects_for_training_data.to_csv(file_name, sep='\t', encoding='utf-8', index = False) LOGGER.info('Number of rows and columns of dataframe of pure objects of training data %s %s' % (len(dataframe_of_objects_for_training_data.index), len(dataframe_of_objects_for_training_data.columns) )) array_of_distribution_of_classes_per_object_segmentation = None dataframe_of_objects_for_training_data = None dataframe_of_objects_for_training_data = pandas.read_csv(file_name, sep='\t') LOGGER.info('Joining dataframe of dataframe zonal statistics and dataframe of objects of training data') dataframe_all_joined_classified = join_dataframes_by_column_name([dataframe_zonal_statistics, dataframe_of_objects_for_training_data], 'id') LOGGER.info('Number of rows and columns of dataframe joined %s %s' % (len(dataframe_all_joined_classified.index), len(dataframe_all_joined_classified.columns) )) if outlier == 'True': LOGGER.info('Starting outlier elimination with dataframe of zonal statistics and dataframe of pure objects of training data') LOGGER.info('Starting principal component analysis') array_reduced_pca = reduce_dimensionality(dataframe_all_joined_classified, .95, ['id', 'given']) LOGGER.info('Shape of reduced array of zonal statistics and pure objects of training data by pca: %s %s' %(array_reduced_pca.shape[0], array_reduced_pca.shape[1]) ) labels_of_objects_reduced_dataframe = dataframe_all_joined_classified['id'].values LOGGER.info('Appending labels') array_reduced_pca_labeled = append_labels_to_array(array_reduced_pca.T, labels_of_objects_reduced_dataframe) LOGGER.info('Shape of array reduced by pca and labeled: %s %s' %(array_reduced_pca_labeled.shape[0], array_reduced_pca_labeled.shape[1])) LOGGER.info('Building data frame') dataframe_reduced_pca_file = folder_results + 'dataframe_joined_for_zonal_statistics_and_pure_objects_of_training_data_reduced_by_pca' dataframe_reduced_pca = create_names_of_dataframe_from_filename(build_dataframe_from_array(array_reduced_pca_labeled.T), array_reduced_pca_labeled.shape[0], get_basename_of_file(dataframe_reduced_pca_file)) dataframe_reduced_pca.to_csv(dataframe_reduced_pca_file, sep=',', encoding='utf-8', index = False) LOGGER.info('Starting with elimination of outliers') LOGGER.info('Joining reduced dataframe by pca with object ids and dataframe of pure objects of training data') dataframe_reduced_pca_with_classes= join_dataframes_by_column_name([dataframe_reduced_pca, dataframe_of_objects_for_training_data], 'id') LOGGER.info('Number of rows and columns of dataframe joined: (%s,%s)' %(len(dataframe_reduced_pca_with_classes.index), len(dataframe_reduced_pca_with_classes.columns))) dataframe_reduced_pca_with_classes.to_csv(dataframe_reduced_pca_file + 'classes', sep = ',', encoding = 'utf8', index = False) unique_classes = numpy.unique(dataframe_of_objects_for_training_data['given'].values) object_ids_outlier_elimination = outlier_elimination_for_dataframe(dataframe_reduced_pca_with_classes, 'id', 'given', 'id', 3, unique_classes, 0.15) object_ids_outlier_elimination_file = folder_results + 'dataframe_object_ids_outlier_elimination' object_ids_outlier_elimination.to_csv(object_ids_outlier_elimination_file, sep = ',', encoding = 'utf-8', index = False) LOGGER.info('Joining all dataframes according to ids of outlier elimination ') dataframe_all_joined_classified = join_dataframes_by_column_name([object_ids_outlier_elimination, dataframe_all_joined_classified], 'id') LOGGER.info('Number of rows and columns of dataframe joined classified: (%s,%s)' %(len(dataframe_all_joined_classified.index), len(dataframe_all_joined_classified.columns))) dataframe_zonal_statistics['given'] = '?' LOGGER.info('Number of rows and columns of dataframe for classifying: (%s,%s)' %(len(dataframe_zonal_statistics.index), len(dataframe_zonal_statistics.columns))) index_of_objects_not_id_zero = dataframe_zonal_statistics['id'] > 0 dataframe_all_joined_for_classifying = dataframe_zonal_statistics[index_of_objects_not_id_zero] LOGGER.info('Number of rows and columns of dataframe for classifying after removing object with id zero: (%s,%s)' %(len(dataframe_all_joined_for_classifying.index), len(dataframe_all_joined_for_classifying.columns))) LOGGER.info('Generating data file') dataframe_all_joined_classified_file = folder_results + 'C5.data' dataframe_all_joined_classified.to_csv(dataframe_all_joined_classified_file, sep = ',', encoding = 'utf-8', index = False, header = False) LOGGER.info('Generating cases file') dataframe_all_joined_for_classifying_file = folder_results + 'C5.cases' dataframe_all_joined_for_classifying.to_csv(dataframe_all_joined_for_classifying_file, sep = ',', encoding = 'utf-8', index = False, header = False) LOGGER.info('Generating names file') unique_classes = numpy.unique(dataframe_all_joined_classified['given'].values) name_namesfile = folder_results + 'C5.names' generate_namesfile(dataframe_all_joined_classified.columns, unique_classes,name_namesfile, 'id', 'given') command = 'run_container' hosts_from_command = get_host_from_command(command) LOGGER.info('The command to be executed is %s in the host %s' % (command, hosts_from_command[0].hostname)) remote = RemoteProcessLauncher(hosts_from_command[0]) folder_and_bind_c5 = getattr(SETTINGS, 'BIG_FOLDER_HOST') arguments = 'docker run --rm -v ' + folder_and_bind_c5 + ' madmex/c5_execution ' + 'c5.0 -b -f /results/C5' LOGGER.info('Beginning C5') remote.execute(arguments) LOGGER.info('Begining predict') arguments = 'docker run --rm -v ' + folder_and_bind_c5 + ' madmex/c5_execution ' + 'predict -f /results/C5' remote = RemoteProcessLauncher(hosts_from_command[0]) output = remote.execute(arguments, True) LOGGER.info('Writing C5 result to csv') C5_result = write_C5_result_to_csv(output, folder_results) LOGGER.info('Using result of C5: %s for generating land cover shapefile and raster image' % C5_result) LOGGER.info('Using result of C5 for generating land cover shapefile and raster image') C5_result = folder_results + 'C5_result.csv' dataframe_c5_result = pandas.read_csv(C5_result) FORMAT = 'ESRI Shapefile' image_segmentation_shp_class = vector.Data(image_segmentation_shp_file, FORMAT) LOGGER.info('Joining dataframe %s to %s' %(C5_result, image_segmentation_shp_file)) dataframe_joined_shp_segmentation_and_c5_result = join_C5_dataframe_and_shape(image_segmentation_shp_class, 'id', dataframe_c5_result, 'id') LOGGER.info('Number of rows and columns of dataframe joined: (%s,%s)' %(len(dataframe_joined_shp_segmentation_and_c5_result.index), len(dataframe_joined_shp_segmentation_and_c5_result.columns))) dataframe_joined_shp_segmentation_and_c5_result_file = folder_results + 'dataframe_joined_shp_segmentation_and_c5_result.csv' LOGGER.info('Writing csv of join between c5 result and segmentation shape: %s' % dataframe_joined_shp_segmentation_and_c5_result_file) dataframe_joined_shp_segmentation_and_c5_result.to_csv(dataframe_joined_shp_segmentation_and_c5_result_file, sep =',', encoding = 'utf8', index = False) LOGGER.info('Writing C5 result joined with segmentation shape to shapefile') segmentation_and_c5_result_file_vectorized_folder = folder_results + 'segmentation_and_c5_result_vectorized/' create_directory_path(segmentation_and_c5_result_file_vectorized_folder) sql = "SELECT a.id, a.predicted, a.confidence, st_geomfromtext(a.geom," + image_segmentation_shp_class.srid+ ") as geometry " sql+= "from dataframe_joined_shp_segmentation_and_c5_result a" shp_result = segmentation_and_c5_result_file_vectorized_folder + '/C5_result_joined_segmentation_shape.shp' command = [ 'ogr2ogr', shp_result, dataframe_joined_shp_segmentation_and_c5_result_file, '-dialect', 'sqlite', '-sql', sql ] subprocess.call(command) LOGGER.info('Rasterizing segmentation and c5 result shape of folder %s' % segmentation_and_c5_result_file_vectorized_folder) LOGGER.info('Identifying segmentation and c5 shape folder %s' % segmentation_and_c5_result_file_vectorized_folder) bundle = _get_bundle_from_path(segmentation_and_c5_result_file_vectorized_folder) if bundle: LOGGER.info('Directory %s is a %s bundle', segmentation_and_c5_result_file_vectorized_folder, bundle.get_name()) LOGGER.info('Rasterizing vector shape to get land cover tif') options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.DATA_SHAPE, (width,height, 1), {}) dataset_shape_sg_and_c5_rasterized = create_raster_tiff_from_reference(image_to_be_classified_class.metadata, '', None, options_to_create) bundle.rasterize(dataset_shape_sg_and_c5_rasterized, [1], None, ["ATTRIBUTE=predicted" ]) #the rasterized process changes the dataset options_to_create = new_options_for_create_raster_from_reference(image_to_be_classified_class.metadata, raster.GDAL_CREATE_OPTIONS, ['COMPRESS=LZW'], {}) image =folder_results + 'madmex_lcc_prueba.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, image, dataset_shape_sg_and_c5_rasterized.ReadAsArray(), options_to_create, data_type = gdal.GDT_Int32) LOGGER.info('Finished rasterizing vector shape') LOGGER.info('Rasterizing vector shape to get confidence tif') bundle.rasterize(dataset_shape_sg_and_c5_rasterized, [1], None, ["ATTRIBUTE=confidence" ]) image =folder_results + 'madmex_lcc_confidence_prueba.tif' create_raster_tiff_from_reference(image_to_be_classified_class.metadata, image, dataset_shape_sg_and_c5_rasterized.ReadAsArray(), options_to_create) LOGGER.info('Finished rasterizing vector shape') LOGGER.info('Finished workflow classification :)')