def make_wmm_variable_layer( *, variable: WmmVariable, year: int, ) -> Layer: variable_config = _wmm_variable_config[variable] contour_label = variable_config['contour_units'] return Layer( id=f'wmm_{variable}_{year}', title=variable_config['title'], description=variable_config['description'], # We keep the main field declination layers (`d`) in the core # package. All other variables will only be available from the plugin. in_package=True if variable == 'd' else False, style='wmm_contours', input=LayerInput( dataset=wmm.wmm, asset=wmm.wmm.assets[str(year)], ), steps=unzip_and_reproject_wmm_vector( zip_filename=f'WMM_{year}_all_shape_geographic.zip', unzip_contents_mask=f'*{variable.upper()}_{year}*', partial_filename=f'{variable.upper()}_{year}', contour_units=contour_label, ), )
def make_layers() -> list[Layer]: return [ Layer( id=key, title=params['title'], description=params['description'], tags=[], input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *ogr2ogr( input_file='{input_dir}/' + params['input_filename'], output_file='{output_dir}/final.gpkg', boundary_filepath=project.boundaries['background']. filepath, ogr2ogr_args=[ '-where', f'"\"layer\" = \'{params["layer_name"]}\'"', ], ), ], ) for key, params in LAYER_PARAMS.items() ]
def _make_layers() -> Generator[Layer, None, None]: for start_date, end_date in _gravimetric_mass_balance_date_ranges: start_year = start_date.year end_year = end_date.year yield Layer( id=f'esa_cci_gravimetric_mass_balance_dtu_{start_year}_{end_year}', title=f'Mass balance trend {start_year}-{end_year}', description= (f"""Trend derived from the period {start_year}-01-01 to {end_year}-12-31 via gravity measurements. Data is on a ~50 km resolution grid (displayed as points)."""), tags=[], style='gmb_dtu_space', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *ogr2ogr( input_file=( '{input_dir}/QGREENLAND_GEOPACKAGES/' f'points_{start_date:%Y-%m-%d}_{end_date:%Y-%m-%d}.gpkg' ), output_file=( '{output_dir}/' f'points_{start_date:%Y-%m-%d}_{end_date:%Y-%m-%d}.gpkg' ), boundary_filepath=project.boundaries['data'].filepath, ), ], )
def _make_hotosm_populated_places() -> Layer: return Layer( id='hotosm_populated_places', title='Populated places', description=("""Points representing populated places in Greenland."""), tags=[], style='hotosm_populated_places_point', input=LayerInput( dataset=dataset, asset=dataset.assets['populated_places'], ), steps=[ *compressed_vector( input_file= '{input_dir}/hotosm_grl_populated_places_points_shp.zip', output_file='{output_dir}/hotosm_populated_places.gpkg', ogr2ogr_args=[ '-dialect', 'sqlite', '-sql', ('"SELECT' ' osm_id,' ' is_in,' ' source,' ' name,' ' place,' ' geometry,' ' CAST(population AS INTEGER) as population' ' FROM hotosm_grl_populated_places_points"'), ], ), ], )
def _make_racmo_wind_speed() -> Layer: return Layer( id='racmo_wind_speed', title='Annual mean wind speed 1958-2019 (5km)', description= ("""Averaged annual mean wind speed in meters per second from RACMO2.3p2 for the period 1958-2019."""), tags=[], style='racmo_wind_speed', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ decompress_step( input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip', decompress_contents_mask='magnitudes.nc', ), *warp_and_cut( input_file='{input_dir}/magnitudes.nc', output_file='{output_dir}/racmo_wind_speed.tif', cut_file=project.boundaries['data'].filepath, ), *compress_and_add_overviews( input_file='{input_dir}/racmo_wind_speed.tif', output_file='{output_dir}/racmo_wind_speed.tif', dtype_is_float=True, ), ], )
def _make_masked_racmo_layer( *, layer_id: str, title: str, description: str, style: str, input_filename: str, decompress_contents_mask: str, variable: str, nodata: int = -9999, gdal_edit_args=(), ) -> Layer: return Layer( id=layer_id, title=title, description=description, tags=[], style=style, input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ decompress_step( input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip', decompress_contents_mask=decompress_contents_mask, ), # Apply the promice mask. The `Promicemask` values are 3 = Greenland ice # sheet; 2,1 = Greenland peripheral ice caps; 0 = Ocean. This step masks # out the ocean as 'nodata'. CommandStep(args=[ 'gdal_calc.py', f'--calc="numpy.where((B != 0), A, {nodata})"', f'--NoDataValue={nodata}', '--outfile={output_dir}/' + f'{variable}.tif', '-A', 'NETCDF:{input_dir}/' + f'{input_filename}:{variable}', '-B', ('NETCDF:{input_dir}/' 'Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc:Promicemask' ), ], ), *gdal_edit( input_file='{input_dir}/' + f'{variable}.tif', output_file='{output_dir}/edited.tif', gdal_edit_args=[ '-a_srs', project.crs, *gdal_edit_args, ], ), *compress_and_add_overviews( input_file='{input_dir}/edited.tif', output_file='{output_dir}/' + f'racmo_{variable}.tif', dtype_is_float=True, ), ], )
def sea_ice_age_layer(year: int, age_type: AgeType) -> Layer: layer_info = seaice_age_layers[year][age_type] return Layer( id=f'seaice_{age_type}_age_{year}', title=f"{layer_info['date_range']} {year}", description=( f"""Age of sea ice derived from weekly averaged ice motion vectors. A value of N indicates ice aged N-1 to N years. A value of 20 represents land; 21 represents ocean cells where ice age was not calculated. Week of {age_type} extent chosen based on NSDIC's Sea Ice Index 5-day average.""" ), tags=[], style='sea_ice_age', input=LayerInput( dataset=dataset, asset=dataset.assets[str(year)], ), steps=[ CommandStep( args=[ 'gdal_translate', '-b', layer_info['band_num'], ( 'NETCDF:{input_dir}/' f'iceage_nh_12.5km_{year}0101_{year}1231_v4.1.nc:age_of_sea_ice' ), '{output_dir}/age_of_sea_ice.tif', ], ), *gdal_edit( input_file='{input_dir}/age_of_sea_ice.tif', output_file='{output_dir}/edited.tif', gdal_edit_args=[ '-a_ullr', '-4518421 4518421 4506579 -4506579', ], ), *warp_and_cut( input_file='{input_dir}/edited.tif', output_file='{output_dir}/warped_and_cut.tif', cut_file=project.boundaries['background'].filepath, reproject_args=[ '-tr', '12500', '12500', ], ), *compress_and_add_overviews( input_file='{input_dir}/warped_and_cut.tif', output_file='{output_dir}/overviews.tif', dtype_is_float=False, ), ], )
def _make_layer( *, id: str, title: str, description: str, asset_id: str, partial_filename: str, ) -> Layer: common_description = """The geomagnetic dip poles are positions on the Earth's surface where the geomagnetic field is perpendicular to the ellipsoid, that is, vertical. The north and south dip poles do not have to be (and are not now) antipodal. """ return Layer( id=id, title=title, description=description.format(common_description=common_description), tags=['wmm'], style='geomagnetic_north_pole', input=LayerInput( dataset=wmm.wmm, asset=wmm.wmm.assets[asset_id], ), steps=[ # Add a header to the downloaded txt file so that it can be processed as # 'csv' by `ogr2ogr` CommandStep(args=[ 'sed', '"1i longitude latitude year"', '{input_dir}/' + f'{partial_filename}.xy', '>', '{output_dir}/' + f'{partial_filename}_with_header.xy', ], ), *ogr2ogr( input_file='CSV:{input_dir}/' + f'{partial_filename}_with_header.xy', output_file='{output_dir}/geomagnetic_north_pole.gpkg', ogr2ogr_args=( '-oo', 'X_POSSIBLE_NAMES=longitude', '-oo', 'Y_POSSIBLE_NAMES=latitude', '-s_srs', 'EPSG:4326', ), ), ], )
def _make_lonlat_layer(asset: RepositoryAsset, ) -> Layer: deg_str = asset.id.rsplit('_', maxsplit=1)[0].split('_', maxsplit=1)[1] deg = deg_str.replace('_', '.') ogr2ogr_clip_args: List[Union[str, EvalFilePath]] if asset.id.startswith('lat'): title_prefix = 'Latitude' segment_max_distance = 1 ogr2ogr_clip_args = [ '-where', '"wgs84Decimal >= 40"', ] elif asset.id.startswith('lon'): title_prefix = 'Longitude' segment_max_distance = 100 ogr2ogr_clip_args = [ '-clipdst', project.boundaries['background'].filepath, ] else: raise RuntimeError( "Expected asset ID starting with 'lon' or 'lat'; received:" f' {asset.id}', ) return Layer( id=asset.id, title=f'{title_prefix} lines ({deg} degree)', description=( f'Lines of {title_prefix.lower()} in {deg}-degree resolution.'), tags=['reference'], style='lonlat', input=LayerInput( dataset=dataset, asset=asset, ), steps=[ CommandStep(args=[ 'ogr2ogr', *STANDARD_OGR2OGR_ARGS, '-segmentize', segment_max_distance, *ogr2ogr_clip_args, '{output_dir}/clipped.gpkg', '{input_dir}/*.geojson', ], ), ], )
def surface_elevation_layer( *, array_index: int, start_year: int, end_year: int, variable: SurfaceElevVar, ) -> Layer: if variable == 'SEC': description = 'Rate of surface elevation change in meters per year.' style = 'surface_elevation_change' else: description = 'Error of rate of surface elevation change in meters per year.' style = 'surface_elevation_change_errors' return Layer( id=f'surface_elevation_change_{variable.lower()}_{start_year}_{end_year}', title=f'Surface elevation change {start_year}-{end_year}', description=description, tags=[], style=style, input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ CommandStep( args=[ 'gdalmdimtranslate', '-co', 'COMPRESS=DEFLATE', '-array', f'name={variable},view=[:,:,{array_index}]', '{input_dir}/Release/CCI_GrIS_RA_SEC_5km_Vers2.0_2020-08-26.nc', '{output_dir}/' + f'{variable.lower()}_{start_year}_{end_year}.tif', ], ), *compress_and_add_overviews( input_file=( '{input_dir}/' f'{variable.lower()}_{start_year}_{end_year}.tif' ), output_file='{output_dir}/overviews.tif', dtype_is_float=True, ), ], )
def make_land_ocean_layer(layer_id: str) -> Layer: return Layer( id=layer_id, title=layer_id.capitalize(), description=(f"""Polygons representing the {layer_id}."""), tags=[], style=layer_id, input=LayerInput( dataset=layer_params[layer_id], asset=layer_params[layer_id].assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/' + f'ne_10m_{layer_id}.zip', output_file='{output_dir}/final.gpkg', ), ], )
def _make_layer( *, layer_id: str, title: str, description: str, style: str, filename: str, dataset: Dataset, ) -> Layer: return Layer( id=layer_id, title=title, description=description, tags=[], style=style, input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ decompress_step( input_file='{input_dir}/archive.zip', decompress_contents_mask=filename, ), *warp_and_cut( input_file='{input_dir}/' + filename, output_file='{output_dir}/' + filename, reproject_args=[ # Source data is 0.02x-0.02 degrees resolution. Rene noted in # his email to QGreenland on 2021-01-22 that the geoid and # gravity anomaly grids are 2km resolution. '-tr', '2000', '2000', ], cut_file=project.boundaries['data'].filepath, ), *compress_and_add_overviews( input_file='{input_dir}/' + filename, output_file='{output_dir}/' + filename, dtype_is_float=True, ), ], )
def make_boz_layer(*, year: int) -> Layer: return Layer( id=f'wmm_boz_{year}', title='Blackout zones', description=""" Based on the WMM military specification, we define “Blackout Zones” (BoZ) around the north and south magnetic poles where compass accuracy is highly degraded. The BoZ are defined as regions around the north and south magnetic poles where the horizontal intensity of Earth’s magnetic field (H) is less than 2000 nT. In BoZs, WMM declination values are not accurate and compasses are unreliable. We additionally define a “Caution Zone” (2000 nT <= H < 6000 nT) around the BoZ, where caution must be exercised while using a compass. Compass accuracy may be degraded in this region. """, tags=['wmm'], in_package=True, show=False, style='blackout_zones', input=LayerInput( dataset=wmm.wmm, asset=wmm.wmm.assets['blackout_zones'], ), steps=[ CommandStep(args=[ 'unzip', '-j', '-d', '{output_dir}', '{input_dir}/WMM2020-2025_BoZ_Shapefile.zip', f'"*BOZ_arctic_all/BOZ_{year}*"', ], ), CommandStep(args=[ 'ogr2ogr', *STANDARD_OGR2OGR_ARGS, '-clipdst', project.boundaries['background'].filepath, '{output_dir}/' + f'BOZ_{year}.gpkg', '{input_dir}/' + f'BOZ_{year}.shp', ], ), ], )
def _make_layer( *, layer_id: str, title: str, description: str, style: str, where_sql: str, ) -> Layer: return Layer( id=layer_id, title=title, description=description, tags=[], style=style, input=LayerInput( dataset=nunagis_protected_areas, asset=nunagis_protected_areas.assets['only'], ), steps=[ *ogr2ogr( input_file='{input_dir}/fetched.geojson', output_file='{output_dir}/' + f'{layer_id}.gpkg', ogr2ogr_args=[ '-dialect', 'sqlite', '-sql', f"""\"SELECT DATETIME( CAST(created_date AS INTEGER) / 1000, 'unixepoch' ) as created_date, DATETIME( CAST(last_edited_date AS INTEGER) / 1000, 'unixepoch' ) as last_edited_date, * FROM ESRIJSON WHERE {where_sql}\" """, ], ), ], )
def make_layers() -> list[Layer]: return [ Layer( id=f'continental_shelf_{key}', title=params['title'], description=params['description'], tags=[], input=LayerInput( dataset=dataset, asset=dataset.assets[key], ), steps=[ decompress_step(input_file='{input_dir}/*.zip', ), CommandStep(args=[ 'ogr2ogr', *STANDARD_OGR2OGR_ARGS, '-makevalid', '{output_dir}/final.gpkg', '{input_dir}/*.shp', ], ), ], ) for key, params in LAYER_PARAMS.items() ]
def _layer(year) -> Layer: month = conc_max_month(year) month_name = calendar.month_name[month] return Layer( id=f'seaice_maximum_concentration_{year}', title=f'{month_name} {year}', description=CONCENTRATION_DESCRIPTION, tags=[], style=CONCENTRATION_STYLE, input=LayerInput( dataset=dataset, asset=dataset.assets[f'maximum_concentration_{year}'], ), # TODO: Extract to helper steps=[ CommandStep( args=[ 'gdal_calc.py', '--calc', "'A / 10.0'", '-A', '{input_dir}/*.tif', '--outfile={output_dir}/downscaled.tif', ], ), *warp_and_cut( input_file='{input_dir}/downscaled.tif', output_file='{output_dir}/warped_and_cut.tif', cut_file=project.boundaries['background'].filepath, ), *compress_and_add_overviews( input_file='{input_dir}/warped_and_cut.tif', output_file='{output_dir}/overviews.tif', dtype_is_float=False, ), ], )
def make_layer(*, layer_id: str, layer_params: dict) -> Layer: return Layer( id=layer_id, title=layer_params['title'], description=layer_params['description'], tags=[], style=layer_params['style'], input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/as_2159.zip', output_file='{output_dir}/final.gpkg', boundary_filepath=project.boundaries['background'].filepath, decompress_step_kwargs={ 'decompress_contents_mask': layer_params['input_filepath'] + '.*', }, vector_filename=layer_params['input_filepath'] + '.shp', ), ], )
def _make_layer( *, id: str, title: str, partial_filename: str, ) -> Layer: return Layer( id=id, title=title, in_package=False, description=""" The WMM representation of the field includes a magnetic dipole at the center of the Earth. This dipole defines an axis that intersects the Earth's surface at two antipodal points. These points are called geomagnetic poles. The geomagnetic poles, otherwise known as the dipole poles, can be computed from the first three Gauss coefficients of the WMM. Based on the WMM2020 coefficients for 2020.0 the geomagnetic north pole is at 72.68°W longitude and 80.59°N geocentric latitude (80.65°N geodetic latitude), and the geomagnetic south pole is at 107.32°E longitude and 80.59°S geocentric latitude (80.65°S geodetic latitude). The axis of the dipole is currently inclined at 9.41° to the Earth's rotation axis. The same dipole is the basis for the simple geomagnetic coordinate system of geomagnetic latitude and longitude. """, style='lonlat', input=LayerInput( dataset=wmm.wmm, asset=wmm.wmm.assets['geomagnetic_coordinates'], ), steps=unzip_and_reproject_wmm_vector( zip_filename='WMM2020_geomagnetic_coordinate_shapefiles.zip', unzip_contents_mask=f'"*geographic_projection/*{partial_filename}*"', partial_filename=partial_filename, contour_units='°', ), )
def _make_other_hotosm_layers() -> list[Layer]: layers = [] for asset_id, params in _other_hotosm_layer_params.items(): asset = cast(HttpAsset, dataset.assets[asset_id]) layers.append( Layer( id=f'hotosm_{asset_id}', title=f"{asset_id.capitalize().replace('_', ' ')}", description=params['description'], tags=[], style=params['style'], input=LayerInput( dataset=dataset, asset=asset, ), steps=[ *compressed_vector( input_file=('{input_dir}/' + Path(asset.urls[0]).name), output_file='{output_dir}/' + f'{asset_id}.gpkg', ), ], ), ) return layers
def _make_racmo_wind_vectors() -> Layer: return Layer( id='racmo_wind_vectors', title='Annual mean wind vectors 1958-2019 (5km)', description=( """Averaged annual mean wind direction in meters per second from RACMO2.3p2 for the period 1958-2019."""), tags=[], style='racmo_wind_vectors', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip', output_file='{output_dir}/racmo_wind_vectors.gpkg', vector_filename='wind_vector_points.gpkg', decompress_step_kwargs={ 'decompress_contents_mask': 'wind_vector_points.gpkg', }, ), ], )
from qgreenland.config.helpers.steps.compress_and_add_overviews import compress_and_add_overviews from qgreenland.config.helpers.steps.gdal_edit import gdal_edit from qgreenland.config.helpers.steps.warp import warp from qgreenland.config.project import project from qgreenland.models.config.layer import Layer, LayerInput from qgreenland.models.config.step import CommandStep arctic_dem = Layer( id='arctic_dem', title='Arctic DEM (100m)', description='Surface elevation in meters using hillshade symbology.', in_package=False, tags=[], style='arctic_dem', input=LayerInput( dataset=dataset, asset=dataset.assets['100m'], ), steps=[ *warp( input_file='{input_dir}/arcticdem_mosaic_100m_v3.0.tif', output_file='{output_dir}/arctic_dem.tif', cut_file=project.boundaries['data'].filepath, ), CommandStep(args=[ 'gdal_calc.py', '--calc', '"A * 100.0"', '--NoDataValue', '-9999', '--type', 'Int32',
def make_racmo_supplemental_layers() -> list[Layer]: layers = [] _racmo_mask_layer_params = { 'racmo_promicemask': { 'title': 'PROMICE mask (1km)', 'description': ("""Mask of categorized Greenland ice. 3 = Greenland ice sheet; 2,1 = Greenland peripheral ice caps; 0 = Ocean."""), 'extract_filename': 'Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc', 'variable': 'Promicemask', }, 'racmo_grounded_ice': { 'title': 'Grounded ice mask (1km)', 'description': 'Mask of grounded ice. 1 = grounded.', 'extract_filename': 'Icemask_Topo_Iceclasses_lon_lat_average_1km_Aug2020.nc', 'variable': 'grounded_ice', }, } for layer_id, params in _racmo_mask_layer_params.items(): layers.append( Layer( id=layer_id, title=params['title'], description=params['description'], tags=[], style='racmo_promicemask', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ decompress_step( input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip', decompress_contents_mask=params['extract_filename'], ), CommandStep( args=[ 'gdal_translate', '-a_srs', project.crs, '-a_ullr', RACMO_ULLR, # Data is stored as Float32 but uses integers for mask values. '-ot', 'Byte', '-a_nodata', 'none', ('NETCDF:{input_dir}/' + f"{params['extract_filename']}:{params['variable']}" ), '{output_dir}/' + f"{params['variable']}.tif", ], ), *compress_and_add_overviews( input_file='{input_dir}/' + f"{params['variable']}.tif", output_file='{output_dir}/' + f'{layer_id}.tif', dtype_is_float=False, ), ], ), ) racmo_topography = _make_masked_racmo_layer( layer_id='racmo_topography', title='Ice surface topography (1km)', description= ("""Ice sheet surface elevation in meters upscaled from the Greenland Mapping Project (GIMP) Digital Elevation Model."""), style='racmo_topography', decompress_contents_mask= 'Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc', input_filename='Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc', variable='Topography', gdal_edit_args=[ '-a_ullr', RACMO_ULLR, ], ) layers.append(racmo_topography) return layers
) from qgreenland.config.helpers.steps.compress_and_add_overviews import compress_and_add_overviews from qgreenland.config.helpers.steps.warp_and_cut import warp_and_cut from qgreenland.config.project import project from qgreenland.models.config.layer import Layer, LayerInput from qgreenland.models.config.step import CommandStep layers = [ Layer( id=f'seaice_minimum_concentration_{year}', title=f'September {year}', description=CONCENTRATION_DESCRIPTION, tags=[], style=CONCENTRATION_STYLE, input=LayerInput( dataset=dataset, asset=dataset.assets[f'minimum_concentration_{year}'], ), steps=[ CommandStep(args=[ 'gdal_calc.py', '--calc', "'A / 10.0'", '-A', '{input_dir}/*.tif', '--outfile={output_dir}/downscaled.tif', ], ), *warp_and_cut( input_file='{input_dir}/downscaled.tif', output_file='{output_dir}/warped_and_cut.tif', cut_file=project.boundaries['background'].filepath, ),
from qgreenland.config.datasets.online import image_mosaic from qgreenland.models.config.layer import Layer, LayerInput image_mosaic_layers = [ Layer( id=f'image_mosaic_{year}', title=f'Greenland image mosaic {year} ({resolution}m)', description=f'Sentinel-2 multispectral satellite imagery from {year}.', tags=['online'], style='transparent_rgb', input=LayerInput( dataset=image_mosaic, asset=image_mosaic.assets[year], ), ) for year, resolution in (('2015', '15'), ('2019', '10')) ]
from qgreenland.config.datasets.glacier_terminus import glacier_terminus as dataset from qgreenland.config.helpers.layers.glacier_terminus import LAYER_YEARS from qgreenland.config.helpers.layers.glacier_terminus import id_str from qgreenland.config.helpers.steps.ogr2ogr import ogr2ogr from qgreenland.models.config.layer import Layer, LayerInput layers = [ Layer( id=id_str(start=START, end=END), title=f'Glacier termini {START} to {END}', description=f'Glacier terminus during the {START}-{END} winter season.', tags=[], input=LayerInput( dataset=dataset, asset=dataset.assets[f'{START}_{END}'], ), steps=[ *ogr2ogr( input_file='{input_dir}/termini_*.shp', output_file='{output_dir}/boundary.gpkg', ), ], ) for (START, END) in LAYER_YEARS ]
' as ice/ocean by regional climate models (filled).'), 'input_filename': 'basins_filled.gpkg', }, } layers = [ Layer( id=layer_id, title=layer_id.replace('_', ' ').capitalize(), description=("""Calculated locations for subglacial hydrologic basin ice-margin-terminating outlets."""), tags=[], style=layer_id.replace('_filled', ''), input=LayerInput( dataset=dataset, asset=dataset.assets[layer_id], ), steps=[ *ogr2ogr( input_file='{input_dir}/' + params['input_filename'], output_file='{output_dir}/' + f'{layer_id}.gpkg', ogr2ogr_args=(_stream_selection_ogr2ogr_args if 'streams' in layer_id else []), ), ], ) for layer_id, params in _layer_params.items() ] ORDERED_LAYER_IDS = list(_layer_params.keys())
from qgreenland.config.helpers.steps.ogr2ogr import ogr2ogr from qgreenland.models.config.layer import Layer, LayerInput arctic_circle = Layer( id='arctic_circle', title="Arctic Circle (66° 34' North)", description=( """The Arctic Circle is an imaginary line that circles the globe at approximately 66° 34' N and marks the latitude above which the sun does not set on the summer solstice, and does not rise on the winter solstice.""" ), tags=[], style='arctic_circle', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *ogr2ogr( input_file='{input_dir}/arctic_circle.geojson', output_file='{output_dir}/arctic_circle.gpkg', ogr2ogr_args=( '-segmentize', '1', '-s_srs', 'EPSG:4326', ), ), ], )
), 'asset_id': 'gc_net', 'table_name': 'GCN%20info%20ca.2000', }, } layers = [ Layer( id=id, title=params['title'], description=params['description'], tags=[], style='labeled_point', input=LayerInput( dataset=dataset, asset=dataset.assets[params['asset_id']], ), steps=[ *ogr2ogr( # This CSV data is tab-delimeted, but ogr2ogr can # auto-detect that. input_file='{input_dir}/*.csv', output_file='{output_dir}/final.gpkg', ogr2ogr_args=( '-s_srs', 'EPSG:4326', '-oo', 'X_POSSIBLE_NAMES=lon', '-oo', 'Y_POSSIBLE_NAMES=lat', '-sql', fr'"SELECT *, name as label from \"{params["table_name"]}\""', ), ), ],
85, ) layers = [ Layer( id=f'future_ice_sheet_coverage_rcp_{rcp}', title=( f'Future ice sheet coverage for RCP {Decimal(rcp) / 10} scenario' ' for the year 3007 (1.8km)'), description=( """Fraction of a grid cell covered by ice (grounded or floating) in the year 3007. Values less than or equal to 16% are masked."""), tags=[], style='future_ice_sheet_coverage', input=LayerInput( dataset=dataset, asset=dataset.assets[f'rcp_{rcp}'], ), steps=[ *warp( input_file=( 'NETCDF:{input_dir}/' f'percent_gris_g1800m_v3a_rcp_{rcp}_0_1000.nc:sftgif'), output_file='{output_dir}/extracted.tif', warp_args=( '-srcnodata', '0', '-tr', '1800', '1800', ), cut_file=project.boundaries['data'].filepath,
derived from the International Bathymetric Chart of the Arctic Ocean.""" ), 'style': 'bathymetry', 'input_filepath': 'data/shape/base/bathymetry', 'fn_mask': 'bathymetry.*', } bathymetric_raster = Layer( id=bathymetric_raster_params['id'], title=bathymetric_raster_params['title'], description=bathymetric_raster_params['description'], tags=[], style=bathymetric_raster_params['style'], input=LayerInput( dataset=bathymetric_raster_params['dataset'], asset=dataset.assets['only'], ), steps=[ *warp( input_file='NETCDF:{input_dir}/IBCAO_v4_400m_ice.nc:z', output_file='{output_dir}/bathymetric_chart.tif', warp_args=( '-s_srs', '"+proj=stere +lat_0=90 +lat_ts=75 +datum=WGS84"', '-dstnodata', '-9999', '-tr', '400', '400', # This dataset does not contain CF-compliant fields or # geotransform array. Set # `GDAL_NETCDF_IGNORE_XY_AXIS_NAME_CHECKS` to `true` to use # the provided `x` and `y` dims as coordinate values so that # gdal can compute the transform on its own. See # https://github.com/OSGeo/gdal/issues/4075