Ejemplo n.º 1
0
def ogr2ogr(
    *,
    input_file: str,
    output_file: str,
    boundary_filepath: EvalFilePath = project.boundaries['background'].
    filepath,
    ogr2ogr_args: StepArgs = (),
    enable_partial_reprojection=False,
) -> list[CommandStep]:
    """Warp to project CRS and do other stuff as specified in args."""
    init_args: list[Any] = []
    if enable_partial_reprojection:
        init_args.append('OGR_ENABLE_PARTIAL_REPROJECTION=TRUE')

    return [
        CommandStep(
            id='ogr2ogr',
            args=init_args + [
                'ogr2ogr',
                *STANDARD_OGR2OGR_ARGS,
                '-clipdst',
                boundary_filepath,
                '-makevalid',
                *ogr2ogr_args,
                output_file,
                input_file,
            ],
        )
    ]
Ejemplo n.º 2
0
def _make_masked_racmo_layer(
        *,
        layer_id: str,
        title: str,
        description: str,
        style: str,
        input_filename: str,
        decompress_contents_mask: str,
        variable: str,
        nodata: int = -9999,
        gdal_edit_args=(),
) -> Layer:
    return Layer(
        id=layer_id,
        title=title,
        description=description,
        tags=[],
        style=style,
        input=LayerInput(
            dataset=dataset,
            asset=dataset.assets['only'],
        ),
        steps=[
            decompress_step(
                input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip',
                decompress_contents_mask=decompress_contents_mask,
            ),
            # Apply the promice mask. The `Promicemask` values are 3 = Greenland ice
            # sheet; 2,1 = Greenland peripheral ice caps; 0 = Ocean. This step masks
            # out the ocean as 'nodata'.
            CommandStep(args=[
                'gdal_calc.py',
                f'--calc="numpy.where((B != 0), A, {nodata})"',
                f'--NoDataValue={nodata}',
                '--outfile={output_dir}/' + f'{variable}.tif',
                '-A',
                'NETCDF:{input_dir}/' + f'{input_filename}:{variable}',
                '-B',
                ('NETCDF:{input_dir}/'
                 'Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc:Promicemask'
                 ),
            ], ),
            *gdal_edit(
                input_file='{input_dir}/' + f'{variable}.tif',
                output_file='{output_dir}/edited.tif',
                gdal_edit_args=[
                    '-a_srs',
                    project.crs,
                    *gdal_edit_args,
                ],
            ),
            *compress_and_add_overviews(
                input_file='{input_dir}/edited.tif',
                output_file='{output_dir}/' + f'racmo_{variable}.tif',
                dtype_is_float=True,
            ),
        ],
    )
Ejemplo n.º 3
0
def test_config_layer_cmd_step_args_validation():
    command_step = CommandStep(
        args=[
            'foo',
            'bar',
            'baz',
        ],
    )

    assert all(isinstance(arg, EvalStr) for arg in command_step.args)
Ejemplo n.º 4
0
def make_boz_layer(*, year: int) -> Layer:
    return Layer(
        id=f'wmm_boz_{year}',
        title='Blackout zones',
        description="""
Based on the WMM military specification, we define “Blackout Zones” (BoZ)
around the north and south magnetic poles where compass accuracy is highly
degraded. The BoZ are defined as regions around the north and south magnetic
poles where the horizontal intensity of Earth’s magnetic field (H) is less
than 2000 nT. In BoZs, WMM declination values are not accurate and compasses
are unreliable.

We additionally define a “Caution Zone” (2000 nT <= H < 6000 nT) around the
BoZ, where caution must be exercised while using a compass. Compass accuracy
may be degraded in this region.
""",
        tags=['wmm'],
        in_package=True,
        show=False,
        style='blackout_zones',
        input=LayerInput(
            dataset=wmm.wmm,
            asset=wmm.wmm.assets['blackout_zones'],
        ),
        steps=[
            CommandStep(args=[
                'unzip',
                '-j',
                '-d',
                '{output_dir}',
                '{input_dir}/WMM2020-2025_BoZ_Shapefile.zip',
                f'"*BOZ_arctic_all/BOZ_{year}*"',
            ], ),
            CommandStep(args=[
                'ogr2ogr',
                *STANDARD_OGR2OGR_ARGS,
                '-clipdst',
                project.boundaries['background'].filepath,
                '{output_dir}/' + f'BOZ_{year}.gpkg',
                '{input_dir}/' + f'BOZ_{year}.shp',
            ], ),
        ],
    )
Ejemplo n.º 5
0
def sea_ice_age_layer(year: int, age_type: AgeType) -> Layer:
    layer_info = seaice_age_layers[year][age_type]

    return Layer(
        id=f'seaice_{age_type}_age_{year}',
        title=f"{layer_info['date_range']} {year}",
        description=(
            f"""Age of sea ice derived from weekly averaged ice motion vectors. A
            value of N indicates ice aged N-1 to N years. A value of 20 represents
            land; 21 represents ocean cells where ice age was not calculated. Week
            of {age_type} extent chosen based on NSDIC's Sea Ice Index 5-day
            average."""
        ),
        tags=[],
        style='sea_ice_age',
        input=LayerInput(
            dataset=dataset,
            asset=dataset.assets[str(year)],
        ),
        steps=[
            CommandStep(
                args=[
                    'gdal_translate',
                    '-b', layer_info['band_num'],
                    (
                        'NETCDF:{input_dir}/'
                        f'iceage_nh_12.5km_{year}0101_{year}1231_v4.1.nc:age_of_sea_ice'
                    ),
                    '{output_dir}/age_of_sea_ice.tif',
                ],
            ),
            *gdal_edit(
                input_file='{input_dir}/age_of_sea_ice.tif',
                output_file='{output_dir}/edited.tif',
                gdal_edit_args=[
                    '-a_ullr', '-4518421 4518421 4506579 -4506579',
                ],
            ),
            *warp_and_cut(
                input_file='{input_dir}/edited.tif',
                output_file='{output_dir}/warped_and_cut.tif',
                cut_file=project.boundaries['background'].filepath,
                reproject_args=[
                    '-tr', '12500', '12500',
                ],
            ),
            *compress_and_add_overviews(
                input_file='{input_dir}/warped_and_cut.tif',
                output_file='{output_dir}/overviews.tif',
                dtype_is_float=False,
            ),
        ],
    )
Ejemplo n.º 6
0
def _make_layer(
    *,
    id: str,
    title: str,
    description: str,
    asset_id: str,
    partial_filename: str,
) -> Layer:
    common_description = """The geomagnetic dip poles are positions on the
Earth's surface where the geomagnetic field is perpendicular to the ellipsoid,
that is, vertical. The north and south dip poles do not have to be (and are not
now) antipodal.
"""

    return Layer(
        id=id,
        title=title,
        description=description.format(common_description=common_description),
        tags=['wmm'],
        style='geomagnetic_north_pole',
        input=LayerInput(
            dataset=wmm.wmm,
            asset=wmm.wmm.assets[asset_id],
        ),
        steps=[
            # Add a header to the downloaded txt file so that it can be processed as
            # 'csv' by `ogr2ogr`
            CommandStep(args=[
                'sed',
                '"1i longitude latitude year"',
                '{input_dir}/' + f'{partial_filename}.xy',
                '>',
                '{output_dir}/' + f'{partial_filename}_with_header.xy',
            ], ),
            *ogr2ogr(
                input_file='CSV:{input_dir}/' +
                f'{partial_filename}_with_header.xy',
                output_file='{output_dir}/geomagnetic_north_pole.gpkg',
                ogr2ogr_args=(
                    '-oo',
                    'X_POSSIBLE_NAMES=longitude',
                    '-oo',
                    'Y_POSSIBLE_NAMES=latitude',
                    '-s_srs',
                    'EPSG:4326',
                ),
            ),
        ],
    )
Ejemplo n.º 7
0
def warp_and_cut(
    *,
    # TODO: think about how to require all step template functions to take
    # input_file, output_file.
    input_file,
    output_file,
    cut_file,
    resampling_method: ResamplingMethod = 'bilinear',
    reproject_args: StepArgs = (),
    cut_args: StepArgs = (),
) -> list[CommandStep]:
    reproject = CommandStep(
        args=[
            'gdalwarp',
            '-t_srs', project.crs,
            '-r', resampling_method,
            *reproject_args,
            input_file,
            '{output_dir}/warped.tif',
        ],
    )

    cut = CommandStep(
        args=[
            'gdalwarp',
            '-cutline',
            cut_file,
            '-crop_to_cutline',
            '-co', 'COMPRESS=DEFLATE',
            *cut_args,
            '{input_dir}/warped.tif',
            output_file,
        ],
    )

    return [reproject, cut]
Ejemplo n.º 8
0
def unzip_and_reproject_wmm_vector(
    *,
    partial_filename: str,
    contour_units: str,
    unzip_contents_mask: str,
    zip_filename: str,
) -> list[CommandStep]:
    unzip = CommandStep(args=[
        'unzip',
        '-j',
        '-d',
        '{output_dir}',
        '{input_dir}/' + zip_filename,
        unzip_contents_mask,
    ], )

    reproject_with_sql = CommandStep(
        id='ogr2ogr',
        args=[
            'OGR_ENABLE_PARTIAL_REPROJECTION=TRUE',
            'ogr2ogr',
            *STANDARD_OGR2OGR_ARGS,
            '-clipdst',
            project.boundaries['background'].filepath,
            '-dialect',
            'sqlite',
            '-sql',
            (r'"Select Geometry, Contour, SIGN, \"INDEX\", '
             fr'CAST(Contour AS TEXT) || \" {contour_units}\" as label '
             f'FROM {partial_filename}"'),
            '{output_dir}/' + f'{partial_filename}.gpkg',
            '{input_dir}/' + f'{partial_filename}.shp',
        ],
    )

    return [unzip, reproject_with_sql]
Ejemplo n.º 9
0
def _make_lonlat_layer(asset: RepositoryAsset, ) -> Layer:
    deg_str = asset.id.rsplit('_', maxsplit=1)[0].split('_', maxsplit=1)[1]
    deg = deg_str.replace('_', '.')

    ogr2ogr_clip_args: List[Union[str, EvalFilePath]]
    if asset.id.startswith('lat'):
        title_prefix = 'Latitude'
        segment_max_distance = 1
        ogr2ogr_clip_args = [
            '-where',
            '"wgs84Decimal >= 40"',
        ]
    elif asset.id.startswith('lon'):
        title_prefix = 'Longitude'
        segment_max_distance = 100
        ogr2ogr_clip_args = [
            '-clipdst',
            project.boundaries['background'].filepath,
        ]
    else:
        raise RuntimeError(
            "Expected asset ID starting with 'lon' or 'lat'; received:"
            f' {asset.id}', )

    return Layer(
        id=asset.id,
        title=f'{title_prefix} lines ({deg} degree)',
        description=(
            f'Lines of {title_prefix.lower()} in {deg}-degree resolution.'),
        tags=['reference'],
        style='lonlat',
        input=LayerInput(
            dataset=dataset,
            asset=asset,
        ),
        steps=[
            CommandStep(args=[
                'ogr2ogr',
                *STANDARD_OGR2OGR_ARGS,
                '-segmentize',
                segment_max_distance,
                *ogr2ogr_clip_args,
                '{output_dir}/clipped.gpkg',
                '{input_dir}/*.geojson',
            ], ),
        ],
    )
Ejemplo n.º 10
0
def gdal_edit(
    *,
    input_file: str,
    output_file: str,
    gdal_edit_args: StepArgs = (),
) -> list[CommandStep]:

    return [CommandStep(
        id='gdal_edit',
        args=[
            'cp', input_file, output_file,
            '&&',
            'gdal_edit.py',
            *gdal_edit_args,
            output_file,
        ],
    )]
Ejemplo n.º 11
0
def decompress_step(
    *,
    input_file: str,
    decompress_type: Literal['unzip', '7z', 'gzip'] = 'unzip',
    decompress_contents_mask: str = '',
) -> CommandStep:
    args: list[str]
    if decompress_type == 'unzip':
        args = [
            'unzip',
            input_file,
            '-d',
            '{output_dir}',
            decompress_contents_mask,
        ]
    elif decompress_type == '7z':
        args = [
            '7z',
            'x',
            input_file,
            '-o{output_dir}',
            decompress_contents_mask,
        ]
    elif decompress_type == 'gzip':
        if decompress_contents_mask:
            raise NotImplementedError(
                ('The `decompress_contents_mask` kwarg is not supported for'
                 ' the `gzip` decompression type.'))

        args = [
            'cp',
            input_file,
            '{output_dir}/',
            '&&',
            'gzip',
            '-d',
            '{output_dir}/*.gz',
        ]
    else:
        raise NotImplementedError(
            f'Unexpected decompress type: {decompress_type}.', )

    return CommandStep(
        id=f'decompress_{decompress_type}',
        args=args,
    )
Ejemplo n.º 12
0
def surface_elevation_layer(
    *,
    array_index: int,
    start_year: int,
    end_year: int,
    variable: SurfaceElevVar,
) -> Layer:

    if variable == 'SEC':
        description = 'Rate of surface elevation change in meters per year.'
        style = 'surface_elevation_change'
    else:
        description = 'Error of rate of surface elevation change in meters per year.'
        style = 'surface_elevation_change_errors'

    return Layer(
        id=f'surface_elevation_change_{variable.lower()}_{start_year}_{end_year}',
        title=f'Surface elevation change {start_year}-{end_year}',
        description=description,
        tags=[],
        style=style,
        input=LayerInput(
            dataset=dataset,
            asset=dataset.assets['only'],
        ),
        steps=[
            CommandStep(
                args=[
                    'gdalmdimtranslate',
                    '-co', 'COMPRESS=DEFLATE',
                    '-array', f'name={variable},view=[:,:,{array_index}]',
                    '{input_dir}/Release/CCI_GrIS_RA_SEC_5km_Vers2.0_2020-08-26.nc',
                    '{output_dir}/' + f'{variable.lower()}_{start_year}_{end_year}.tif',
                ],
            ),
            *compress_and_add_overviews(
                input_file=(
                    '{input_dir}/'
                    f'{variable.lower()}_{start_year}_{end_year}.tif'
                ),
                output_file='{output_dir}/overviews.tif',
                dtype_is_float=True,
            ),
        ],
    )
Ejemplo n.º 13
0
def warp(
    *,
    input_file: str,
    output_file: str,
    cut_file: EvalFilePath,
    resampling_method: ResamplingMethod = 'bilinear',
    warp_args: StepArgs = (),
) -> list[CommandStep]:

    return [CommandStep(
        args=[
            'gdalwarp',
            '-cutline',
            cut_file,
            '-crop_to_cutline',
            '-r', resampling_method,
            '-t_srs', project.crs,
            *warp_args,
            '-co', 'COMPRESS=DEFLATE',
            input_file,
            output_file,
        ],
    )]
Ejemplo n.º 14
0
def make_layers() -> list[Layer]:
    return [
        Layer(
            id=f'continental_shelf_{key}',
            title=params['title'],
            description=params['description'],
            tags=[],
            input=LayerInput(
                dataset=dataset,
                asset=dataset.assets[key],
            ),
            steps=[
                decompress_step(input_file='{input_dir}/*.zip', ),
                CommandStep(args=[
                    'ogr2ogr',
                    *STANDARD_OGR2OGR_ARGS,
                    '-makevalid',
                    '{output_dir}/final.gpkg',
                    '{input_dir}/*.shp',
                ], ),
            ],
        ) for key, params in LAYER_PARAMS.items()
    ]
Ejemplo n.º 15
0
def _layer(year) -> Layer:
    month = conc_max_month(year)
    month_name = calendar.month_name[month]

    return Layer(
        id=f'seaice_maximum_concentration_{year}',
        title=f'{month_name} {year}',
        description=CONCENTRATION_DESCRIPTION,
        tags=[],
        style=CONCENTRATION_STYLE,
        input=LayerInput(
            dataset=dataset,
            asset=dataset.assets[f'maximum_concentration_{year}'],
        ),
        # TODO: Extract to helper
        steps=[
            CommandStep(
                args=[
                    'gdal_calc.py',
                    '--calc', "'A / 10.0'",
                    '-A', '{input_dir}/*.tif',
                    '--outfile={output_dir}/downscaled.tif',
                ],
            ),
            *warp_and_cut(
                input_file='{input_dir}/downscaled.tif',
                output_file='{output_dir}/warped_and_cut.tif',
                cut_file=project.boundaries['background'].filepath,
            ),
            *compress_and_add_overviews(
                input_file='{input_dir}/warped_and_cut.tif',
                output_file='{output_dir}/overviews.tif',
                dtype_is_float=False,
            ),
        ],
    )
Ejemplo n.º 16
0
     dataset=dataset,
     asset=dataset.assets['100m'],
 ),
 steps=[
     *warp(
         input_file='{input_dir}/arcticdem_mosaic_100m_v3.0.tif',
         output_file='{output_dir}/arctic_dem.tif',
         cut_file=project.boundaries['data'].filepath,
     ),
     CommandStep(args=[
         'gdal_calc.py',
         '--calc',
         '"A * 100.0"',
         '--NoDataValue',
         '-9999',
         '--type',
         'Int32',
         '-A',
         '{input_dir}/arctic_dem.tif',
         '--outfile',
         '{output_dir}/arctic_dem_scaled.tif',
     ], ),
     *gdal_edit(
         input_file='{input_dir}/arctic_dem_scaled.tif',
         output_file='{output_dir}/arctic_dem.tif',
         gdal_edit_args=[
             '-scale',
             '0.01',
         ],
     ),
     *compress_and_add_overviews(
Ejemplo n.º 17
0
layers = [
    Layer(
        id=f'seaice_minimum_concentration_{year}',
        title=f'September {year}',
        description=CONCENTRATION_DESCRIPTION,
        tags=[],
        style=CONCENTRATION_STYLE,
        input=LayerInput(
            dataset=dataset,
            asset=dataset.assets[f'minimum_concentration_{year}'],
        ),
        steps=[
            CommandStep(args=[
                'gdal_calc.py',
                '--calc',
                "'A / 10.0'",
                '-A',
                '{input_dir}/*.tif',
                '--outfile={output_dir}/downscaled.tif',
            ], ),
            *warp_and_cut(
                input_file='{input_dir}/downscaled.tif',
                output_file='{output_dir}/warped_and_cut.tif',
                cut_file=project.boundaries['background'].filepath,
            ),
            *compress_and_add_overviews(
                input_file='{input_dir}/warped_and_cut.tif',
                output_file='{output_dir}/overviews.tif',
                dtype_is_float=False,
            ),
        ],
    ) for year in MIN_CONCENTRATION_YEARS
Ejemplo n.º 18
0
         '-wo', 'SAMPLE_GRID=YES',
     ],
     cut_file='{assets_dir}/latitude_shape_40_degrees.geojson',
 ),
 # Because the background image is large, we use JPEG compression
 # (`compress_and_add_overviews` step below). To do so without JPEG
 # artifacts around the curved clip boundary the image (appears as black
 # pixels around the outside edges of the image), a mask band can be
 # used. This step creates the mask file that will be added as a fourth
 # band to the RGB background image.
 CommandStep(
     id='create_mask',
     args=[
         'cp', '{input_dir}/warped_and_cut.tif', '{output_dir}/input.tif',
         '&&',
         'gdal_calc.py',
         '--calc="numpy.invert(numpy.isnan(A)).astype(int)"',
         '--outfile={output_dir}/mask.tif',
         '-A', '{output_dir}/input.tif',
         '--A_band=1',
     ],
 ),
 # The next step uses `gdal_merge.py` to combine the background image
 # with the mask to create the 4 band raster. This step separates the
 # input file's bands (R, G, B) into three separate files (b1.tif,
 # b2.tif, b3.tif) that can be used by `gdal_merge.py`. NOTE: this step
 # is not strictly necessary but substantially speeds up the merge
 # operation. Without splitting into individual bands, the `gdal_merge`
 # operation takes ~1 hour to complete vs ~40 seconds when the bands are
 # pre-separated.
 CommandStep(
     id='separate_bands',
Ejemplo n.º 19
0
 in_package=False,
 style='wdmam',
 input=LayerInput(
     dataset=dataset,
     asset=dataset.assets['only'],
 ),
 steps=[
     CommandStep(
         id='convert_to_csv',
         args=[
             'sed',
             # Trim leading whitespace
             '-e',
             r'"s/^\s\+//g"',
             # Replace all other whitespace with ','
             '-e',
             r'"s/\s\+/,/g"',
             # Add a header
             '-e',
             '1i"longitude,latitude,magnetic_anomaly,index,long_wavelength"',
             '{input_dir}/full_wdmam.xyz',
             '>',
             '{output_dir}/full_wdmam.csv',
         ],
     ),
     *ogr2ogr(
         input_file='{input_dir}/full_wdmam.csv',
         output_file='{output_dir}/wdmam_greenland.gpkg',
         boundary_filepath=project.boundaries['data'].filepath,
         ogr2ogr_args=[
             '-oo',
             'X_POSSIBLE_NAMES=longitude',
Ejemplo n.º 20
0
def compress_and_add_overviews(
        *,
        input_file: str,
        output_file: str,
        dtype_is_float: Optional[bool] = None,
        resampling_algorithm: ResamplingAlgorithm = 'average',
        compress_type: CompressionType = 'DEFLATE',
        compress_args: StepArgs = (),
) -> list[AnyStep]:
    """Compress raster and build overviews.

    If `dtype_is_float`, we use floating-point prediction with our compression,
    otherwise we use horizontal differencing.

            https://gdal.org/drivers/raster/gtiff.html
    """
    dtype_unexp_not_passed = compress_type == 'DEFLATE' and dtype_is_float is None
    dtype_unexp_passed = compress_type != 'DEFLATE' and dtype_is_float is not None
    if dtype_unexp_passed or dtype_unexp_not_passed:
        raise RuntimeError(
            '`dtype_is_float` may only be specified for DEFLATE compression'
            ' type.', )

    compress_creation_options = [
        '-co',
        'TILED=YES',
        '-co',
        f'COMPRESS={compress_type}',
    ]
    if compress_type == 'DEFLATE':
        predictor_value = 3 if dtype_is_float else 2
        compress_creation_options.extend([
            '-co',
            f'PREDICTOR={predictor_value}',
        ])

    compress = [
        'gdal_translate',
        *compress_creation_options,
        *compress_args,
        input_file,
        '{output_dir}/compressed.tif',
    ]

    copy_into_place = [
        'cp',
        '{input_dir}/compressed.tif',
        output_file,
    ]

    add_overviews = [
        'gdaladdo',
        '-r',
        resampling_algorithm,
        output_file,
        '2',
        '4',
        '8',
        '16',
    ]

    return [
        CommandStep(
            id='compress_raster',
            args=compress,
        ),
        CommandStep(
            id='build_overviews',
            args=copy_into_place + ['&&'] + add_overviews,
        ),
    ]
Ejemplo n.º 21
0
 description=(
     """Supraglacial lake delineation on Sermeq Kujalleq/Jakobshavn for
     2019/05/01 and 2019/10/01 generated using Sentinel-2 satellite data."""
 ),
 tags=['water'],
 style='supraglacial_lakes',
 input=LayerInput(
     dataset=dataset,
     asset=dataset.assets['only'],
 ),
 steps=[
     # TODO: *compressed_vector(...)??
     CommandStep(
         args=[
             'unzip',
             '{input_dir}/greenland_sgl_s2_20190501_20191001_jakobshavn_v1_1.zip',
             '"*merged*"',
             '-d', '{output_dir}',
         ],
     ),
     *ogr2ogr(
         input_file='{input_dir}/greenland_sgl_s2_20190501_20191001_jakobshavn_merged_v1_1.shp',
         output_file='{output_dir}/selected.gpkg',
         boundary_filepath=project.boundaries['data'].filepath,
         ogr2ogr_args=(
             '-dialect', 'sqlite',
             '-sql',
             """\"SELECT
                 Geometry,
                 id1,
                 DATE(
                   substr(date, 1, 4)
Ejemplo n.º 22
0
earthquakes = Layer(
    id='earthquakes',
    title='Earthquakes M above 2.5 1900-2020',
    description=("""Location and magnitude of earthquakes."""),
    tags=[],
    style='earthquakes',
    input=LayerInput(
        dataset=dataset,
        asset=dataset.assets['only'],
    ),
    steps=[
        CommandStep(args=[
            'ogrmerge.py',
            '-single',
            '-o',
            '{output_dir}/earthquakes.gpkg',
            '{input_dir}/*geojson',
        ], ),
        *ogr2ogr(
            input_file='{input_dir}/earthquakes.gpkg',
            output_file='{output_dir}/earthquakes.gpkg',
            boundary_filepath=project.boundaries['background'].filepath,
            ogr2ogr_args=(
                '-dialect',
                'sqlite',
                '-sql',
                """\"SELECT
                    geom,
                    id,
                    mag,
Ejemplo n.º 23
0
def make_racmo_supplemental_layers() -> list[Layer]:
    layers = []

    _racmo_mask_layer_params = {
        'racmo_promicemask': {
            'title':
            'PROMICE mask (1km)',
            'description':
            ("""Mask of categorized Greenland ice. 3 = Greenland ice sheet; 2,1 = Greenland
                peripheral ice caps; 0 = Ocean."""),
            'extract_filename':
            'Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc',
            'variable':
            'Promicemask',
        },
        'racmo_grounded_ice': {
            'title': 'Grounded ice mask (1km)',
            'description': 'Mask of grounded ice. 1 = grounded.',
            'extract_filename':
            'Icemask_Topo_Iceclasses_lon_lat_average_1km_Aug2020.nc',
            'variable': 'grounded_ice',
        },
    }

    for layer_id, params in _racmo_mask_layer_params.items():
        layers.append(
            Layer(
                id=layer_id,
                title=params['title'],
                description=params['description'],
                tags=[],
                style='racmo_promicemask',
                input=LayerInput(
                    dataset=dataset,
                    asset=dataset.assets['only'],
                ),
                steps=[
                    decompress_step(
                        input_file='{input_dir}/RACMO_QGreenland_Jan2021.zip',
                        decompress_contents_mask=params['extract_filename'],
                    ),
                    CommandStep(
                        args=[
                            'gdal_translate',
                            '-a_srs',
                            project.crs,
                            '-a_ullr',
                            RACMO_ULLR,
                            # Data is stored as Float32 but uses integers for mask values.
                            '-ot',
                            'Byte',
                            '-a_nodata',
                            'none',
                            ('NETCDF:{input_dir}/' +
                             f"{params['extract_filename']}:{params['variable']}"
                             ),
                            '{output_dir}/' + f"{params['variable']}.tif",
                        ], ),
                    *compress_and_add_overviews(
                        input_file='{input_dir}/' +
                        f"{params['variable']}.tif",
                        output_file='{output_dir}/' + f'{layer_id}.tif',
                        dtype_is_float=False,
                    ),
                ],
            ), )

    racmo_topography = _make_masked_racmo_layer(
        layer_id='racmo_topography',
        title='Ice surface topography (1km)',
        description=
        ("""Ice sheet surface elevation in meters upscaled from the Greenland Mapping
            Project (GIMP) Digital Elevation Model."""),
        style='racmo_topography',
        decompress_contents_mask=
        'Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc',
        input_filename='Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc',
        variable='Topography',
        gdal_edit_args=[
            '-a_ullr',
            RACMO_ULLR,
        ],
    )

    layers.append(racmo_topography)

    return layers
Ejemplo n.º 24
0
 id=id_str(depth=depth, season=season),
 title=f'{depth_str(depth)}, {season.title()}',
 description=(
     f'Seawater temperature at {depth_str(depth).lower()} depth in °C.'
 ),
 tags=[],
 style='seawater_temperature',
 input=LayerInput(
     dataset=dataset,
     asset=dataset.assets[f'seasonal_{season}'],
 ),
 steps=[
     CommandStep(args=[
         'gdal_translate',
         '-b',
         DEPTHS_BANDS[depth],
         'NETCDF:{input_dir}/' + f'{SEASONS_FNS[season]}:t_an',
         '{output_dir}/extracted.tif',
     ], ),
     *warp_and_cut(
         input_file='{input_dir}/extracted.tif',
         output_file='{output_dir}/warped_and_cut.tif',
         cut_file=project.boundaries['data'].filepath,
         reproject_args=(
             '-tr',
             '25000',
             '25000',
             # A "target extent" bounding box is required to reproject
             # this correctly, or we receive an error like:
             #     ERROR 1: Attempt to create 0x1 dataset is
             #     illegal,sizes must be larger than zero.
Ejemplo n.º 25
0

arctic_sea_routes = Layer(
    id='arctic_sea_routes',
    title='Arctic sea routes',
    description=(
        """Lines depict the Northern Sea Route, Northwest Passate, and
        hypothetical Transpolar Route."""
    ),
    tags=[],
    style='arctic_sea_routes',
    input=LayerInput(
        dataset=dataset,
        asset=dataset.assets['only'],
    ),
    steps=[
        # TODO: *compressed_vector(...)?
        CommandStep(
            args=[
                'unzip',
                '-d', '{output_dir}',
                '{input_dir}/Shipping_and_Hydrography-shp.zip',
            ],
        ),
        *ogr2ogr(
            input_file='{input_dir}/Arctic_Sea_Routes.shp',
            output_file='{output_dir}/arctic_sea_routes.gpkg',
        ),
    ],
)
Ejemplo n.º 26
0
 id='timezones',
 title='Time zones',
 description=("""Polygons representing time zones."""),
 tags=[],
 style='transparent_labeled_shape',
 input=LayerInput(
     dataset=dataset,
     asset=dataset.assets['only'],
 ),
 steps=[
     # TODO: these steps the same as `compressed_vector` except with
     # `OGR_ENABLE_PARTIAL_REPROJECTION` envvar set for the `ogr2ogr`
     # step. DRY out?
     CommandStep(args=[
         'unzip',
         '{input_dir}/ne_10m_time_zones.zip',
         '-d',
         '{output_dir}',
     ], ),
     CommandStep(
         id='ogr2ogr',
         args=[
             'OGR_ENABLE_PARTIAL_REPROJECTION=True',
             'ogr2ogr',
             *STANDARD_OGR2OGR_ARGS,
             '-clipdst',
             project.boundaries['background'].filepath,
             '-sql',
             ("""'SELECT *, name as label
                 FROM "ne_10m_time_zones"'"""),
             '{output_dir}/reprojected_and_clipped.gpkg',
             '{input_dir}/*.shp',
Ejemplo n.º 27
0
basal_thermal_state = Layer(
    id='basal_thermal_state',
    title='Likely basal thermal state June 23 1993 - April 26 2013 (5km)',
    description=(
        """Likely basal frozen/thawed state of the Greenland Ice Sheet."""),
    tags=[],
    style='basal_thermal_state',
    input=LayerInput(
        dataset=dataset,
        asset=dataset.assets['only'],
    ),
    steps=[
        CommandStep(args=[
            'gdalmdimtranslate',
            '-array',
            'name=likely_basal_thermal_state,transpose=[1,0],view=[::-1,:]',
            '{input_dir}/RDBTS4_Greenland_1993_2013_01_basal_thermal_state.nc',
            '{output_dir}/basal_thermal_state.tif',
        ], ),
        # Convert the dataset to `Int16` data type to save a little extra space
        # in the final output.
        CommandStep(
            args=[
                'gdal_calc.py',
                '--type',
                'Int16',
                # Set a nodata value of 3. This value does not occur in the data
                # (valid values are -1, 0, 1).
                '--NoDataValue',
                '3',
                # This dataset contains nans. Replace them with the nodata value (3)
Ejemplo n.º 28
0
 id=layer_id,
 title=params['title'],
 description=params['description'],
 in_package=False,
 tags=[],
 style=params['style'],
 input=LayerInput(
     dataset=dataset,
     asset=dataset.assets['only'],
 ),
 steps=[
     CommandStep(args=[
         'gdal_calc.py',
         '--calc="A*B"',
         '--outfile={output_dir}/' + f'masked_{layer_id}.tif',
         '-A',
         'NETCDF:{input_dir}/GRE_G0120_0000.nc:v',
         '-B',
         'NETCDF:{input_dir}/GRE_G0120_0000.nc:ice',
     ], ),
     *warp_and_cut(
         input_file='{input_dir}/' + f'masked_{layer_id}.tif',
         output_file='{output_dir}/' + f'{layer_id}.tif',
         cut_file=project.boundaries['data'].filepath,
     ),
     *compress_and_add_overviews(
         input_file='{input_dir}/' + f'{layer_id}.tif',
         output_file='{output_dir}/' + f'{layer_id}.tif',
         dtype_is_float=True,
     ),
 ],