def _make_hotosm_populated_places() -> Layer: return Layer( id='hotosm_populated_places', title='Populated places', description=("""Points representing populated places in Greenland."""), tags=[], style='hotosm_populated_places_point', input=LayerInput( dataset=dataset, asset=dataset.assets['populated_places'], ), steps=[ *compressed_vector( input_file= '{input_dir}/hotosm_grl_populated_places_points_shp.zip', output_file='{output_dir}/hotosm_populated_places.gpkg', ogr2ogr_args=[ '-dialect', 'sqlite', '-sql', ('"SELECT' ' osm_id,' ' is_in,' ' source,' ' name,' ' place,' ' geometry,' ' CAST(population AS INTEGER) as population' ' FROM hotosm_grl_populated_places_points"'), ], ), ], )
def make_land_ocean_layer(layer_id: str) -> Layer: return Layer( id=layer_id, title=layer_id.capitalize(), description=(f"""Polygons representing the {layer_id}."""), tags=[], style=layer_id, input=LayerInput( dataset=layer_params[layer_id], asset=layer_params[layer_id].assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/' + f'ne_10m_{layer_id}.zip', output_file='{output_dir}/final.gpkg', ), ], )
def _make_other_hotosm_layers() -> list[Layer]: layers = [] for asset_id, params in _other_hotosm_layer_params.items(): asset = cast(HttpAsset, dataset.assets[asset_id]) layers.append( Layer( id=f'hotosm_{asset_id}', title=f"{asset_id.capitalize().replace('_', ' ')}", description=params['description'], tags=[], style=params['style'], input=LayerInput( dataset=dataset, asset=asset, ), steps=[ *compressed_vector( input_file=('{input_dir}/' + Path(asset.urls[0]).name), output_file='{output_dir}/' + f'{asset_id}.gpkg', ), ], ), ) return layers
def _make_racmo_wind_vectors() -> Layer: return Layer( id='racmo_wind_vectors', title='Annual mean wind vectors 1958-2019 (5km)', description=( """Averaged annual mean wind direction in meters per second from RACMO2.3p2 for the period 1958-2019."""), tags=[], style='racmo_wind_vectors', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip', output_file='{output_dir}/racmo_wind_vectors.gpkg', vector_filename='wind_vector_points.gpkg', decompress_step_kwargs={ 'decompress_contents_mask': 'wind_vector_points.gpkg', }, ), ], )
def make_layer(*, layer_id: str, layer_params: dict) -> Layer: return Layer( id=layer_id, title=layer_params['title'], description=layer_params['description'], tags=[], style=layer_params['style'], input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/as_2159.zip', output_file='{output_dir}/final.gpkg', boundary_filepath=project.boundaries['background'].filepath, decompress_step_kwargs={ 'decompress_contents_mask': layer_params['input_filepath'] + '.*', }, vector_filename=layer_params['input_filepath'] + '.shp', ), ], )
also including regions of glacial lakes, debris cover, rocks within the glacier (nunataks), and other polygonal features."""), }, } layers = [ Layer( id=f'glims_{key}', title=f'Peripherical glacier/feature {params["hint"]}', description=params['description'], tags=[], input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/*.zip', output_file='{output_dir}/final.gpkg', boundary_filepath=project.boundaries['data'].filepath, decompress_step_kwargs={ 'decompress_type': '7z', 'decompress_contents_mask': f'glims_download_82381/*_{key}.*', }, vector_filename='glims_download_82381/*.shp', ), ], ) for key, params in LAYER_PARAMS.items() ]
title=params['title'], description=params['description'], tags=[], style=params['style'], input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/features.zip', output_file='{output_dir}/final.gpkg', boundary_filepath=project.boundaries['background'].filepath, decompress_step_kwargs={ 'decompress_contents_mask': f'features/features-{key}.*', }, vector_filename=f'features/features-{key}.shp', ogr2ogr_args=( *params['extra_ogr2ogr_args'], '-sql', ( f"""'SELECT *, name as label FROM "features-{key}"'""" ), ), ), ], ) for key, params in LAYER_PARAMS.items() ]
bas_greenland_coastlines = Layer( id='bas_greenland_coastlines', title='Greenland coastlines 2017', description=("""This layer should be used as the 'reference coastline' for Greenland."""), tags=[], show=True, style='greenland_coastline', input=LayerInput( dataset=bas_coastlines, asset=bas_coastlines.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/Greenland_coast.zip', output_file='{output_dir}/greenland_coastline.gpkg', ), ], ) global_coastlines = Layer( id='coastlines', title='Global coastlines', description=( """Note that the 'Greenland coastlines 2017' layer is preferred for Greenland."""), tags=[], style='transparent_shape', input=LayerInput( dataset=gshhg_coastlines, asset=gshhg_coastlines.assets['only'],
from qgreenland.config.helpers.steps.compressed_vector import compressed_vector from qgreenland.config.project import project from qgreenland.models.config.layer import Layer, LayerInput FN_HASH = '339b0c56563c118307b1f4542703047f5f698fae' FN = f'tectonicplates-{FN_HASH}' tectonic_plate_boundaries = Layer( id='tectonic_plate_boundaries', title='Tectonic plate boundaries', description=( """Linestrings representing borders between tectonic plates."""), tags=[], style='tectonic_plate_boundaries', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/' + f'{FN}.zip', output_file='{output_dir}/final.gpkg', boundary_filepath=project.boundaries['background'].filepath, decompress_step_kwargs={ 'decompress_contents_mask': f'{FN}/PB2002_boundaries.*', }, vector_filename=f'{FN}/*.shp', ), ], )
) ne_states_provinces = Layer( id='ne_states_provinces', title='Global administrative divisions', description=("""Polygons representing countries' internal administrative boundaries."""), tags=[], style='administrative_divisions', input=LayerInput( dataset=dataset.ne_states_provinces, asset=dataset.ne_states_provinces.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/ne_10m_admin_1_states_provinces.zip', output_file='{output_dir}/ne_states_provinces.gpkg', ), ], ) ne_countries = Layer( id='ne_countries', title='Countries', description=("""Polygons representing countries."""), tags=[], style='countries', input=LayerInput( dataset=dataset.ne_countries, asset=dataset.ne_countries.assets['only'], ), steps=[
output_file='{output_dir}/geothermal_heat_flow_map_55km.tif', cut_file=project.boundaries['data'].filepath, ), *compress_and_add_overviews( input_file='{input_dir}/geothermal_heat_flow_map_55km.tif', output_file='{output_dir}/geothermal_heat_flow_map_55km.tif', dtype_is_float=True, ), ], ) geothermal_heat_flow_measurements = Layer( id='geothermal_heat_flow_measurements', title='Flow measurement locations', description= ("""Heat flow measurement database used in the creation of the 'Geothermal heat flow map (10km)' layer."""), tags=[], input=LayerInput( dataset=dataset, asset=dataset.assets['heat_flow_measurements'], ), steps=[ *compressed_vector( input_file='{input_dir}/Greenland_heat_flow_measurements.zip', output_file='{output_dir}/heat_flow_measurements.gpkg', boundary_filepath=project.boundaries['data'].filepath, ), ], )
'mcas_mlsa_public_historic': { 'title': 'Historic public licenses', 'description': ("""Historic mining licenses granted by the government of Greenland.""" ), 'asset_id': 'mcas_mlsa_public_historic', }, } layers = [ Layer( id=key, title=params['title'], description=params['description'], tags=[], style='mcas_mlsa_licenses', input=LayerInput( dataset=dataset, asset=dataset.assets[params['asset_id']], ), steps=[ *compressed_vector( input_file='{input_dir}/*.zip', output_file='{output_dir}/final.gpkg', ), ], ) for key, params in layer_params.items() ]
style='semitransparent_polygon', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/Arctic_Char_2010.zip', output_file='{output_dir}/final.gpkg', ogr2ogr_args=( '-dialect', 'sqlite', '-sql', ("""'SELECT Geometry, SPECIES, INTRODUCED, OWNER, DATA_URL, SOURCE, CREATED, DATE(substr(MODIFIED, 7, 4) || "-" || substr(MODIFIED, 4, 2) || "-" || substr(MODIFIED, 1, 2)) as MODIFIED, CONTACT FROM Arctic_Char_2010'"""), ), ), ], )
}, 'thickbilled_murre': { 'name': 'Thickbilled Murre', }, } layers = [ Layer( id=f'caff_{key}_colonies', title=f'{params["name"]} colonies 2010', description=( f"""Point locations of {params['name']} colonies as surveyed in 2010."""), tags=[], style=f'{key}_colonies', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/Murres_distribution.zip', output_file='{output_dir}/final.gpkg', vector_filename= (f'Distribution_{params["name"].replace(" ", "_")}_Colonies.shp' ), ), ], ) for key, params in murre_layers.items() ]
def _asset(dataset: Dataset, month: int) -> HttpAsset: asset = dataset.assets[f'median_extent_line_{month:02d}'] if type(asset) is HttpAsset: return asset else: raise RuntimeError(f'Expected HTTP asset. Received: {asset}') layers = [ Layer( id=layer_id(month), title=layer_title(month), description=( """Ice edge position line that is typical for a month, based on median extent from the period 1981 through 2010.""" ), tags=[], input=LayerInput( dataset=dataset, asset=_asset(dataset, month), ), steps=[ *compressed_vector( input_file='{input_dir}/' + _asset(dataset, month).urls[0].split('/')[-1], output_file='{output_dir}/final.gpkg', ), ], ) for month in range(1, 12 + 1) ]
from qgreenland.config.datasets.utm_zones import utm_zones as dataset from qgreenland.config.helpers.steps.compressed_vector import compressed_vector from qgreenland.models.config.layer import Layer, LayerInput utm_zones = Layer( id='utm_zones', title='Universal Transverse Mercator (UTM) zones', description=( """Polygons representing Universal Transverse Mercator (UTM) zones.""" ), tags=[], style='utm_zones', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( ogr2ogr_args=( '-where', '"\"ZONE\" != 0"', ), input_file='{input_dir}/utmzone.zip', output_file='{output_dir}/utm_zones.gpkg', # TODO: Use SQL to generate the label currently set in the style: # `concat(ZONE, ROW_)` # ? ), ], )
from qgreenland.config.datasets.nafo_divisions import nafo_divisions as dataset from qgreenland.config.helpers.steps.compressed_vector import compressed_vector from qgreenland.models.config.layer import Layer, LayerInput nafo_divisions = Layer( id='nafo_divisions', title='NAFO divisions', description=("""The Northwest Atlantic Fisheries Organization zones."""), tags=[], style='nafo_divisions', input=LayerInput( dataset=dataset, asset=dataset.assets['only'], ), steps=[ *compressed_vector( input_file='{input_dir}/Divisions.zip', output_file='{output_dir}/final.gpkg', vector_filename='Divisions/*.shp', ), ], )
from qgreenland.config.datasets.esa_cci import esa_cci_marginal_lakes from qgreenland.config.helpers.steps.compressed_vector import compressed_vector from qgreenland.models.config.layer import Layer, LayerInput marginal_lakes = Layer( id='marginal_lakes', title='Inventory of ice marginal lakes, 2017', description=( """Polygons representing marginal lake locations identified from remote sensing."""), tags=[], style=None, input=LayerInput( dataset=esa_cci_marginal_lakes, asset=esa_cci_marginal_lakes.assets['only'], ), steps=[ *compressed_vector( input_file= '{input_dir}/20170101-ESACCI-L3S_GLACIERS-IML-MERGED-fv1.zip', output_file='{output_dir}/marginal_lakes.gpkg', ), ], )