def update_icemap_mosaic(inp_filename, inp_data, out_filename, out_domain, out_metadata): if os.path.exists(out_filename): mos_array = Nansat(out_filename)[1] else: mos_array = np.zeros(out_domain.shape(), np.uint8) + 255 # read classification data and reproject onto mosaic domain n = Nansat(inp_filename) if inp_data is None: n.reproject_gcps() n.reproject(out_domain) inp_data = dict(arr=n[1], mask=n[2]) # put data into mosaic array gpi = (inp_data['mask'] == 1) * (inp_data['arr'] < 255) mos_array[gpi] = inp_data['arr'][gpi] # export n_out = Nansat.from_domain(out_domain) n_out.add_band(array=mos_array, parameters={'name': 'classification'}) n_out.set_metadata(n.get_metadata()) n_out.set_metadata(out_metadata) n_out = add_colortable(n_out) n_out.export(out_filename, driver='GTiff', options=['COMPRESS=LZW']) return inp_data
def test_crop_gcpproj(self): n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n1.reproject_gcps() ext = n1.crop(10, 20, 50, 60) xmed = abs(np.median(np.array([gcp.GCPX for gcp in n1.vrt.dataset.GetGCPs()]))) gcpproj = NSR(n1.vrt.dataset.GetGCPProjection() ).ExportToProj4().split(' ')[0] self.assertTrue(xmed > 360) self.assertTrue(gcpproj=='+proj=stere')
def test_reproject_gcps_on_repro_gcps(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) n2 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n2.reproject_gcps() n1.reproject(n2) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_reproject_gcps_on_repro_gcps.png') n1.write_figure(tmpfilename, 2, clim='hist') self.assertEqual(n1.shape(), n2.shape()) self.assertEqual(type(n1[1]), np.ndarray)
def save_ice_map(inp_filename, raw_filename, classifier_filename, threads, source, quicklook, force): """ Load texture features, apply classifier and save ice map """ # get filenames out_filename = inp_filename.replace('_texture_features.npz', '_classified_%s.tif' % source) if os.path.exists(out_filename) and not force: print('Processed file %s already exists.' % out_filename) return out_filename # import classifier clf = pickle.load(open(classifier_filename, "rb")) clf.n_jobs = threads # get texture features npz = np.load(inp_filename) features = np.vstack([ npz['textureFeatures'].item()['HH'], npz['textureFeatures'].item()['HV'], ]) imgSize = features.shape[1:] features = features.reshape((26, np.prod(imgSize))).T gpi = np.isfinite(features.sum(axis=1)) result = clf.predict(features[gpi, :]) classImage = np.ones(np.prod(imgSize)) * 255 classImage[gpi] = result classImage = classImage.reshape(imgSize) img_shape = classImage.shape # open original file to get geometry raw_nansat = Nansat(raw_filename) # crop and resize original Nansat to match the ice map raw_shape = raw_nansat.shape() crop = [rshape % ishape for (rshape, ishape) in zip(raw_shape, img_shape)] raw_nansat.crop(0, 0, raw_shape[1] - crop[1], raw_shape[0] - crop[0]) raw_nansat.resize(height=img_shape[0]) raw_nansat.reproject_gcps() # create new Nansat object and add ice map ice_map = Nansat.from_domain(domain=raw_nansat, array=classImage.astype(np.uint8)) ice_map.set_metadata(raw_nansat.get_metadata()) ice_map.set_metadata('entry_title', 'S1_SAR_ICE_MAP') ice_map = add_colortable(ice_map, source) ice_map.export(out_filename, bands=[1], driver='GTiff') if quicklook: rgb = colorcode_array(classImage, source) plt.imsave(out_filename.replace('.tif', '.png'), rgb) return out_filename
def _get_normalized_attributes(self, dataset_info, *args, **kwargs): """Gets dataset attributes using nansat""" normalized_attributes = {} n_points = int(kwargs.get('n_points', 10)) nansat_options = kwargs.get('nansat_options', {}) url_scheme = urlparse(dataset_info).scheme if not 'http' in url_scheme and not 'ftp' in url_scheme: normalized_attributes['geospaas_service_name'] = FILE_SERVICE_NAME normalized_attributes['geospaas_service'] = LOCAL_FILE_SERVICE elif 'http' in url_scheme and not 'ftp' in url_scheme: normalized_attributes['geospaas_service_name'] = DAP_SERVICE_NAME normalized_attributes['geospaas_service'] = OPENDAP_SERVICE elif 'ftp' in url_scheme: raise ValueError( f"Can't ingest '{dataset_info}': nansat can't open remote ftp files" ) # Open file with Nansat nansat_object = Nansat(nansat_filename(dataset_info), log_level=self.LOGGER.getEffectiveLevel(), **nansat_options) # get metadata from Nansat and get objects from vocabularies n_metadata = nansat_object.get_metadata() # set compulsory metadata (source) normalized_attributes['entry_title'] = n_metadata.get( 'entry_title', 'NONE') normalized_attributes['summary'] = n_metadata.get('summary', 'NONE') normalized_attributes['time_coverage_start'] = dateutil.parser.parse( n_metadata['time_coverage_start']).replace(tzinfo=tzutc()) normalized_attributes['time_coverage_end'] = dateutil.parser.parse( n_metadata['time_coverage_end']).replace(tzinfo=tzutc()) normalized_attributes['platform'] = json.loads(n_metadata['platform']) normalized_attributes['instrument'] = json.loads( n_metadata['instrument']) normalized_attributes['specs'] = n_metadata.get('specs', '') normalized_attributes['entry_id'] = n_metadata.get( 'entry_id', 'NERSC_' + str(uuid.uuid4())) # set optional ForeignKey metadata from Nansat or from defaults normalized_attributes['gcmd_location'] = n_metadata.get( 'gcmd_location', pti.get_gcmd_location('SEA SURFACE')) normalized_attributes['provider'] = pti.get_gcmd_provider( n_metadata.get('provider', 'NERSC')) normalized_attributes['iso_topic_category'] = n_metadata.get( 'ISO_topic_category', pti.get_iso19115_topic_category('Oceans')) # Find coverage to set number of points in the geolocation if nansat_object.vrt.dataset.GetGCPs(): nansat_object.reproject_gcps() normalized_attributes['location_geometry'] = GEOSGeometry( nansat_object.get_border_wkt(n_points=n_points), srid=4326) json_dumped_dataset_parameters = n_metadata.get( 'dataset_parameters', None) if json_dumped_dataset_parameters: json_loads_result = json.loads(json_dumped_dataset_parameters) if isinstance(json_loads_result, list): normalized_attributes['dataset_parameters'] = [ get_cf_or_wkv_standard_name(dataset_param) for dataset_param in json_loads_result ] else: raise TypeError( f"Can't ingest '{dataset_info}': the 'dataset_parameters' section of the " "metadata returned by nansat is not a JSON list") else: normalized_attributes['dataset_parameters'] = [] return normalized_attributes