def preprocess(self): ''' Top of atmosphere is calculated and persisted into a file. Then a cloud mask is created with the given algorithm. ''' solar_zenith = self.get_sensor().parser.get_attribute(rapideye.SOLAR_ZENITH) data_acquisition_date = self.get_sensor().parser.get_attribute(rapideye.ACQUISITION_DATE) solar_azimuth = self.get_sensor().parser.get_attribute(rapideye.SOLAR_AZIMUTH) geotransform = self.get_raster().get_attribute(raster.GEOTRANSFORM) data = self.get_raster().read_data_file_as_array() sun_earth_distance = calculate_distance_sun_earth(data_acquisition_date) top_of_atmosphere_data = calculate_toa_rapideye(calculate_rad_rapideye(data), sun_earth_distance, solar_zenith) top_of_atmosphere_directory = create_file_name(get_parent(self.path), 'TOA') create_directory_path(top_of_atmosphere_directory) output_file = create_file_name(top_of_atmosphere_directory, get_base_name(self.get_files()[2]) + '_toa.tif') # TODO: change [2] in self.get_files()[2] create_raster_from_reference(output_file, top_of_atmosphere_data, self.file_dictionary[_IMAGE], data_type=NumericTypeCodeToGDALTypeCode(numpy.float32) ) LOGGER.debug('Top of atmosphere file was created.') cloud_output_file = create_file_name(top_of_atmosphere_directory, get_base_name(self.get_files()[2]) + '_cloud.tif') if self.algorithm == ANOMALY_DETECTION: LOGGER.debug('Cloud mask by anomaly detection process.') clouds = self.anomaly_detection_cloud_mask(top_of_atmosphere_data, cloud_output_file, solar_zenith, solar_azimuth, geotransform) elif self.algorithm == TIME_SERIES: LOGGER.debug('Cloud mask by reference with time series process.') tile_id = self.get_sensor().get_attribute(TILE_ID) clouds = self.masking_with_time_series(data, cloud_output_file, solar_zenith, solar_azimuth, geotransform, tile_id) create_raster_from_reference(cloud_output_file, clouds, self.file_dictionary[_IMAGE], data_type=NumericTypeCodeToGDALTypeCode(numpy.float32) ) LOGGER.info('Cloud mask was created.')
def handle(self, **options): output = options['output'][0] for image_path in options['path']: print image_path basename = '%s.tif' % get_base_name(image_path) print basename target = create_file_name(output, basename) print target start_time = time.time() #self.method_by_block(image_path, target) self.mask_iterating_values(image_path, target) print("--- %s seconds ---" % (time.time() - start_time)) print 'Dataset was written.'
def get_thumbnail_with_path(self, thumbnail_path): ''' Creates a thumbnail for the scene in true color. ''' from subprocess import call thumnail_directory = create_file_name(thumbnail_path, 'thumbnail') create_directory_path(thumnail_directory) filename = self.file_dictionary[_BROWSE] thumbnail = create_file_name(thumnail_directory, '%s.jpg' % get_base_name(filename)) resize_command = ['/Library/Frameworks/GDAL.framework/Programs/gdal_translate', filename, '-of', 'JPEG', thumbnail] call(resize_command)
def handle(self, **options): ''' This is the code that does the ingestion. ''' interest_band = 1 for image_path in options['path']: print image_path ds = gdal.Open(image_path) bands = ds.RasterCount geotransform = ds.GetGeoTransform() x_resolution = geotransform[1] y_resolution = geotransform[5] pixel_area = abs(x_resolution * y_resolution) array = numpy.array(ds.GetRasterBand(interest_band).ReadAsArray()) print numpy.unique(array) flat = numpy.ravel(array) length = len(flat) parent = get_parent(image_path) basename = '%s.txt' % get_base_name(image_path) target = create_file_name(parent, basename) count = 1 values = {} progress = 0 for value in flat: count = count + 1 if not values.get(value): values[value] = 1 else: values[value] = values[value] + 1 if count % 1000000 == 0: aux = progress progress = math.floor(100 * count / float(length)) if not aux == progress: print str(int(progress)) + '%\r' added = self.add_up(INITIAL_ARRAY, values) area = self.transform_to_area(added, pixel_area) with open(target, "a") as f: json.dump(area, f)