# create a GeometricFeatures object that points to a local cache of geometric # data and knows which branch of geometric_feature to use to download # missing data gf = GeometricFeatures('./geometric_data') # create a FeatureCollection containing all iceshelf regions wtih one of the # 27 IMBIE basin tags tags fc = FeatureCollection() for basin in range(1, 28): print('Adding feature from IMBIE basin {:d}'.format(basin)) basinName = 'Antarctica_IMBIE{:d}'.format(basin) tags = [basinName] # load the iceshelf regions for one IMBIE basin fcBasin = gf.read(componentName='iceshelves', objectType='region', tags=tags) # combine all regions in the basin into a single feature fcBasin = fcBasin.combine(featureName=basinName) # merge the feature for the basin into the collection of all basins fc.merge(fcBasin) # save the feature collection to a geojson file fc.to_geojson('Extended_Antarctic_Basins.geojson') if plot: fc.plot(projection='southpole') plt.show()
iceMask = numpy.logical_or(iceMask, bedMask) groundedMask = numpy.logical_or(groundedMask, bedMask) masks = dict() masks['AntarcticIceCoverage'] = iceMask masks['AntarcticGroundedIceCoverage'] = groundedMask fc = FeatureCollection() for name in masks: properties = dict() properties['name'] = name properties['component'] = 'bedmachine' properties['author'] = \ 'Morlighem et al. (2019) doi:10.1038/s41561-019-0510-8' properties['object'] = 'region' properties['tags'] = '' feature = dict() feature['properties'] = properties feature['geometry'] = extract_geometry(masks[name]) fc.add_feature(feature) fc.to_geojson(out_file_name) gf = GeometricFeatures(cacheLocation='./geometric_data') gf.split(fc) write_feature_names_and_tags(gf.cacheLocation) os.rename('features_and_tags.json', 'geometric_features/features_and_tags.json')
def main(): author = 'Xylar Asay-Davis, Alice Barthel, Nicolas Jourdain' tags = 'Antarctic;ISMIP6' # make a geometric features object that knows about the geometric data # cache up a couple of directories gf = GeometricFeatures('../../geometric_data') bedmap2_bin_to_netcdf('bedmap2.nc') fcContour1500 = get_longest_contour(contourValue=-1500., author=author) fc = FeatureCollection() lons = [-65., -25., -25., -65.] lats = [-80., -80., -77., -71.] fc.merge( shelf_polygon(lons, lats, name='ISMIP6 Weddell Sea', author=author, tags=tags, fcContour=fcContour1500)) lons = [-128., -128., -90., -90.] lats = [-76., -69., -69., -76.] fc.merge( shelf_polygon(lons, lats, name='ISMIP6 Amundsen Sea', author=author, tags=tags, fcContour=fcContour1500)) lons = [45., 45., 90., 90.] lats = [-70., -60., -60., -70.] fc.merge( shelf_polygon(lons, lats, name='ISMIP6 Amery Sector', author=author, tags=tags, fcContour=fcContour1500)) lons = [-22.5, -22.5, 22.5, 22.5] lats = [-75., -65., -65., -75.] fc.merge( shelf_polygon(lons, lats, name='ISMIP6 Dronning Maud Land', author=author, tags=tags, fcContour=fcContour1500)) lons = [110., 110., 130., 130.] lats = [-70., -60., -60., -70.] fc.merge( shelf_polygon(lons, lats, name='ISMIP6 Totten Region', author=author, tags=tags, fcContour=fcContour1500)) lons = [165., 165., 180., 180.] lats = [-80., -71., -73., -80.] fc_ross = shelf_polygon(lons, lats, name='ISMIP6 Western Ross Sea', author=author, tags=tags, fcContour=fcContour1500) lons = [-180., -180., -150., -150.] lats = [-80., -73., -77., -80.] fc_ross.merge( shelf_polygon(lons, lats, name='ISMIP6 Eastern Ross Sea', author=author, tags=tags, fcContour=fcContour1500)) old_props = fc_ross.features[0]['properties'] fc_ross = fc_ross.combine('ISMIP6 Ross Sea') props = fc_ross.features[0]['properties'] for prop in ['tags', 'zmin', 'zmax']: props[prop] = old_props[prop] fc.merge(fc_ross) fc.plot(projection='southpole') fc.to_geojson('ismip6_antarctic_ocean_regions.geojson') # "split" these features into individual files in the geometric data cache gf.split(fc) # update the database of feature names and tags write_feature_names_and_tags(gf.cacheLocation) # move the resulting file into place shutil.copyfile('features_and_tags.json', '../../geometric_features/features_and_tags.json') plt.show()
def main(): author = 'Xylar Asay-Davis' timTags = 'Antarctic;Timmermann' orsiTags = 'Antarctic;Orsi' # make a geometric fieatures object that knows about the geometric data # cache up a couple of directories gf = GeometricFeatures('../../geometric_data') bedmap2_bin_to_netcdf('bedmap2.nc') fcContour700 = get_longest_contour(contourValue=-700., author=author) fcContour800 = get_longest_contour(contourValue=-800., author=author) fc = FeatureCollection() fcWeddell = split_rectangle(lon0=-63., lon1=0., lat0=-80., lat1=-65., name='Weddell Sea', author=author, tags=timTags, fcContour=fcContour800) # get rid of the Weddell Sea because we're not that happy with this # definition, but keep the deep/shelf ones fcWeddell.features = fcWeddell.features[1:] fc.merge(fcWeddell) fcEW = split_rectangle(lon0=-20., lon1=25., lat0=-80., lat1=-55., name='Eastern Weddell Sea', author=author, tags=orsiTags, fcContour=fcContour800) fc.merge(fcEW) fcWW = split_rectangle(lon0=-63., lon1=-20., lat0=-80., lat1=-60., name='Western Weddell Sea', author=author, tags=orsiTags, fcContour=fcContour800) fc.merge(fcWW) # The Weddell feature is the sum of the Eastern and Western features before # splitting into shelf/deep fcWeddell = FeatureCollection() fcWeddell.features.append(fcEW.features[0]) fcWeddell.features.append(fcWW.features[0]) # now combine these into a single feature fcWeddell = fcWeddell.combine('Weddell Sea') props = fcWeddell.features[0]['properties'] props['tags'] = orsiTags props['zmin'] = -1000. props['zmax'] = -400. fc.merge(fcWeddell) # add the Weddell Sea back as the sum of Eastern and Western fc.merge( make_rectangle(lon0=-63., lon1=45., lat0=-80., lat1=-58., name='Weddell Sea', author=author, tags=orsiTags)) fc.merge( split_rectangle(lon0=-100., lon1=-63., lat0=-80., lat1=-67., name='Bellingshausen Sea', author=author, tags=timTags, fcContour=fcContour700)) fc.merge( split_rectangle(lon0=-140., lon1=-100., lat0=-80., lat1=-67., name='Amundsen Sea', author=author, tags=timTags, fcContour=fcContour800)) fc.merge( split_rectangle(lon0=-180., lon1=-140., lat0=-80., lat1=-67., name='Eastern Ross Sea', author=author, tags=timTags, fcContour=fcContour700)) fc.merge( split_rectangle(lon0=160., lon1=180., lat0=-80., lat1=-67., name='Western Ross Sea', author=author, tags=timTags, fcContour=fcContour700)) fc.merge( split_rectangle(lon0=25., lon1=160., lat0=-80., lat1=-62., name='East Antarctic Seas', author=author, tags=orsiTags, fcContour=fcContour800)) fc.merge( make_rectangle(lon0=-180., lon1=180., lat0=-80., lat1=-60., name='Southern Ocean 60S', author=author, tags=timTags)) fcSO = gf.read('ocean', 'region', ['Southern Ocean']) props = fcSO.features[0]['properties'] props['zmin'] = -1000. props['zmax'] = -400. fc.merge(fcSO) fc.plot(projection='southpole') fc.to_geojson('antarctic_ocean_regions.geojson') # "split" these features into individual files in the geometric data cache gf.split(fc) # update the database of feature names and tags write_feature_names_and_tags(gf.cacheLocation) # move the resulting file into place shutil.copyfile('features_and_tags.json', '../../geometric_features/features_and_tags.json') plt.show()
tags=subNames, allTags=False) print(' * combining features') fcShelf = fcShelf.combine(featureName=shelfName) # merge the feature for the basin into the collection of all basins fc.merge(fcShelf) # build ice shelves from regions with the appropriate tags for shelfName in iceShelfNames: print(shelfName) print(' * merging features') fcShelf = gf.read(componentName='iceshelves', objectType='region', tags=[shelfName]) print(' * combining features') fcShelf = fcShelf.combine(featureName=shelfName) # merge the feature for the basin into the collection of all basins fc.merge(fcShelf) # save the feature collection to a geojson file fc.to_geojson('iceShelves20200621.geojson') if plot: fc.plot(projection='southpole') plt.show()
tags=subNames, allTags=False) print(' * combining features') fcShelf = fcShelf.combine(featureName=shelfName) # merge the feature for the basin into the collection of all basins fc.merge(fcShelf) # build ice shelves from regions with the appropriate tags for shelfName in iceShelfNames: print(shelfName) print(' * merging features') fcShelf = gf.read(componentName='iceshelves', objectType='region', tags=[shelfName]) print(' * combining features') fcShelf = fcShelf.combine(featureName=shelfName) # merge the feature for the basin into the collection of all basins fc.merge(fcShelf) # save the feature collection to a geojson file fc.to_geojson('iceShelves.geojson') if plot: fc.plot(projection='southpole') plt.show()
def _cull_mesh_with_logging(logger, with_cavities, with_critical_passages, custom_critical_passages, custom_land_blockages, preserve_floodplain, use_progress_bar, process_count): """ Cull the mesh once the logger is defined for sure """ critical_passages = with_critical_passages or \ (custom_critical_passages is not None) land_blockages = with_critical_passages or \ (custom_land_blockages is not None) gf = GeometricFeatures() # start with the land coverage from Natural Earth fcLandCoverage = gf.read(componentName='natural_earth', objectType='region', featureNames=['Land Coverage']) # remove the region south of 60S so we can replace it based on ice-sheet # topography fcSouthMask = gf.read(componentName='ocean', objectType='region', featureNames=['Global Ocean 90S to 60S']) fcLandCoverage = fcLandCoverage.difference(fcSouthMask) # Add "land" coverage from either the full ice sheet or just the grounded # part if with_cavities: fcAntarcticLand = gf.read( componentName='bedmachine', objectType='region', featureNames=['AntarcticGroundedIceCoverage']) else: fcAntarcticLand = gf.read( componentName='bedmachine', objectType='region', featureNames=['AntarcticIceCoverage']) fcLandCoverage.merge(fcAntarcticLand) # save the feature collection to a geojson file fcLandCoverage.to_geojson('land_coverage.geojson') # these defaults may have been updated from config options -- pass them # along to the subprocess netcdf_format = mpas_tools.io.default_format netcdf_engine = mpas_tools.io.default_engine # Create the land mask based on the land coverage, i.e. coastline data args = ['compute_mpas_region_masks', '-m', 'base_mesh.nc', '-g', 'land_coverage.geojson', '-o', 'land_mask.nc', '-t', 'cell', '--process_count', '{}'.format(process_count), '--format', netcdf_format, '--engine', netcdf_engine] check_call(args, logger=logger) dsBaseMesh = xarray.open_dataset('base_mesh.nc') dsLandMask = xarray.open_dataset('land_mask.nc') dsLandMask = add_land_locked_cells_to_mask(dsLandMask, dsBaseMesh, latitude_threshold=43.0, nSweeps=20) # create seed points for a flood fill of the ocean # use all points in the ocean directory, on the assumption that they are, # in fact, in the ocean fcSeed = gf.read(componentName='ocean', objectType='point', tags=['seed_point']) if land_blockages: if with_critical_passages: # merge transects for critical land blockages into # critical_land_blockages.geojson fcCritBlockages = gf.read( componentName='ocean', objectType='transect', tags=['Critical_Land_Blockage']) else: fcCritBlockages = FeatureCollection() if custom_land_blockages is not None: fcCritBlockages.merge(read_feature_collection( custom_land_blockages)) # create masks from the transects fcCritBlockages.to_geojson('critical_blockages.geojson') args = ['compute_mpas_transect_masks', '-m', 'base_mesh.nc', '-g', 'critical_blockages.geojson', '-o', 'critical_blockages.nc', '-t', 'cell', '-s', '10e3', '--process_count', '{}'.format(process_count), '--format', netcdf_format, '--engine', netcdf_engine] check_call(args, logger=logger) dsCritBlockMask = xarray.open_dataset('critical_blockages.nc') dsLandMask = add_critical_land_blockages(dsLandMask, dsCritBlockMask) fcCritPassages = FeatureCollection() dsPreserve = [] if critical_passages: if with_critical_passages: # merge transects for critical passages into fcCritPassages fcCritPassages.merge(gf.read(componentName='ocean', objectType='transect', tags=['Critical_Passage'])) if custom_critical_passages is not None: fcCritPassages.merge(read_feature_collection( custom_critical_passages)) # create masks from the transects fcCritPassages.to_geojson('critical_passages.geojson') args = ['compute_mpas_transect_masks', '-m', 'base_mesh.nc', '-g', 'critical_passages.geojson', '-o', 'critical_passages.nc', '-t', 'cell', 'edge', '-s', '10e3', '--process_count', '{}'.format(process_count), '--format', netcdf_format, '--engine', netcdf_engine] check_call(args, logger=logger) dsCritPassMask = xarray.open_dataset('critical_passages.nc') # Alter critical passages to be at least two cells wide, to avoid sea # ice blockage dsCritPassMask = widen_transect_edge_masks(dsCritPassMask, dsBaseMesh, latitude_threshold=43.0) dsPreserve.append(dsCritPassMask) if preserve_floodplain: dsPreserve.append(dsBaseMesh) # cull the mesh based on the land mask dsCulledMesh = cull(dsBaseMesh, dsMask=dsLandMask, dsPreserve=dsPreserve, logger=logger) # create a mask for the flood fill seed points dsSeedMask = compute_mpas_flood_fill_mask(dsMesh=dsCulledMesh, fcSeed=fcSeed, logger=logger) # cull the mesh a second time using a flood fill from the seed points dsCulledMesh = cull(dsCulledMesh, dsInverse=dsSeedMask, graphInfoFileName='culled_graph.info', logger=logger) write_netcdf(dsCulledMesh, 'culled_mesh.nc') if critical_passages: # make a new version of the critical passages mask on the culled mesh fcCritPassages.to_geojson('critical_passages.geojson') args = ['compute_mpas_transect_masks', '-m', 'culled_mesh.nc', '-g', 'critical_passages.geojson', '-o', 'critical_passages_mask_final.nc', '-t', 'cell', '-s', '10e3', '--process_count', '{}'.format(process_count), '--format', netcdf_format, '--engine', netcdf_engine] check_call(args, logger=logger) if with_cavities: fcAntarcticIce = gf.read( componentName='bedmachine', objectType='region', featureNames=['AntarcticIceCoverage']) fcAntarcticIce.to_geojson('ice_coverage.geojson') args = ['compute_mpas_region_masks', '-m', 'culled_mesh.nc', '-g', 'ice_coverage.geojson', '-o', 'ice_coverage.nc', '-t', 'cell', '--process_count', '{}'.format(process_count), '--format', netcdf_format, '--engine', netcdf_engine] check_call(args, logger=logger) dsMask = xarray.open_dataset('ice_coverage.nc') landIceMask = dsMask.regionCellMasks.isel(nRegions=0) dsLandIceMask = xarray.Dataset() dsLandIceMask['landIceMask'] = landIceMask write_netcdf(dsLandIceMask, 'land_ice_mask.nc') dsLandIceCulledMesh = cull(dsCulledMesh, dsMask=dsMask, logger=logger) write_netcdf(dsLandIceCulledMesh, 'no_ISC_culled_mesh.nc') extract_vtk(ignore_time=True, dimension_list=['maxEdges='], variable_list=['allOnCells'], filename_pattern='culled_mesh.nc', out_dir='culled_mesh_vtk', use_progress_bar=use_progress_bar) if with_cavities: extract_vtk(ignore_time=True, dimension_list=['maxEdges='], variable_list=['allOnCells'], filename_pattern='no_ISC_culled_mesh.nc', out_dir='no_ISC_culled_mesh_vtk', use_progress_bar=use_progress_bar)