Exemple #1
0
def test_set_crs_epsg(tmpdir):
    """ Tests for EPSG codes specifically """

    ds = Dataset(str(tmpdir.join('test.nc')), 'w')
    data_var = ds.createVariable('data', 'S1')
    set_crs(ds, 'data', Proj(init='EPSG:4326'), set_proj4_att=True)
    data_atts = get_ncattrs(data_var)
    crs_var = ds.variables[data_atts['grid_mapping']]
    ncatts = get_ncattrs(crs_var)

    assert data_atts['proj4'] == '+proj=latlong +datum=WGS84 +no_defs'
    assert ncatts['grid_mapping_name'] == 'latitude_longitude'
    assert ncatts['semi_major_axis'] == 6378137.0
    assert ncatts['inverse_flattening'] == 298.257223563

    data_var = ds.createVariable('data2', 'S1')
    set_crs(ds, 'data2', Proj(init='EPSG:4269'), set_proj4_att=True)
    data_atts = get_ncattrs(data_var)
    crs_var = ds.variables[data_atts['grid_mapping']]
    ncatts = get_ncattrs(crs_var)

    assert data_atts['proj4'] == '+proj=latlong +datum=NAD83 +no_defs'
    assert ncatts['grid_mapping_name'] == 'latitude_longitude'
    assert ncatts['semi_major_axis'] == 6378137.0
    assert ncatts['inverse_flattening'] == 298.257223563
Exemple #2
0
def test_symmetric_proj4(tmpdir):
    """ Test writing and reading proj4 string as attribute of variable """

    ds = Dataset(str(tmpdir.join('test.nc')), 'w')
    proj4 = '+proj=stere +units=m +datum=WGS84 +lat_ts=60 +lat_0=90 +lon_0=263 +lat_1=60 +x_0=3475000 +y_0=7475000'
    ds.createVariable('data', 'S1')
    set_crs(ds, 'data', Proj(proj4), set_proj4_att=True)
    out_proj4 = get_crs(ds, 'data')

    out_data = CRS.from_string(out_proj4).to_dict()

    assert len(out_data) == 9  # There should be 9 parameters
    assert CRS.from_string(proj4).to_dict() == out_data
Exemple #3
0
def test_utm(tmpdir):
    ds = Dataset(str(tmpdir.join('test.nc')), 'w')
    proj4 = '+init=epsg:3157'  # UTM Zone 10
    ds.createVariable('data', 'S1')
    set_crs(ds, 'data', Proj(proj4), set_proj4_att=True)
    out_proj4 = get_crs(ds, 'data')

    out_data = CRS.from_string(out_proj4).to_dict()

    # ESPG will have been converted to long form
    assert len(out_data) == 6

    expected = {
        u'zone': 10,
        u'ellps': u'GRS80',
        u'no_defs': True,
        u'proj': u'utm',
        u'units': u'm',
        u'towgs84': u'0,0,0,0,0,0,0'
    }
    assert expected == out_data
Exemple #4
0
def test_set_crs(tmpdir):
    """ Test proper encoding of projection into CF Convention parameters """

    ds = Dataset(str(tmpdir.join('test.nc')), 'w')

    # Test polar stereographic
    proj4 = '+proj=stere +datum=WGS84 +lat_ts=60 +lat_0=90 +lon_0=263 +lat_1=60 +x_0=3475000 +y_0=7475000'
    data_var = ds.createVariable('data', 'S1')
    set_crs(ds, 'data', Proj(proj4))
    crs_var = ds.variables[get_ncattrs(data_var)['grid_mapping']]
    ncatts = get_ncattrs(crs_var)

    assert ncatts['grid_mapping_name'] == 'polar_stereographic'
    assert ncatts['inverse_flattening'] == 298.257223563
    assert ncatts['latitude_of_projection_origin'] == 90
    assert ncatts['straight_vertical_longitude_from_pole'] == 263
    assert ncatts['standard_parallel'] == 60
    assert ncatts['false_northing'] == 7475000
    assert ncatts['false_easting'] == 3475000

    # Test Lambert conformal conic
    proj4 = '+proj=lcc +lat_1=30 +lat_2=60 +lat_0=47.5 +lon_0=-97 +x_0=3825000 +y_0=3200000'
    data_var = ds.createVariable('data2', 'S1')
    set_crs(ds, 'data2', Proj(proj4))
    crs_var = ds.variables[get_ncattrs(data_var)['grid_mapping']]
    ncatts = get_ncattrs(crs_var)

    assert ncatts['grid_mapping_name'] == 'lambert_conformal_conic'
    assert ncatts['latitude_of_projection_origin'] == 47.5
    assert ncatts['longitude_of_central_meridian'] == -97
    assert ncatts['standard_parallel'] == [30, 60]
    assert ncatts['false_northing'] == 3200000
    assert ncatts['false_easting'] == 3825000

    # Unsupported projection should fail
    proj4 = '+proj=merc +lat_1=30 +lat_2=60 +lat_0=47.5 +lon_0=-97 +x_0=3825000 +y_0=3200000'
    ds.createVariable('data3', 'S1')
    with pytest.raises(ValueError):
        set_crs(ds, 'data3', Proj(proj4))
Exemple #5
0
def process_web_outputs(results, job, publish_raster_results=False, renderer_or_fn=None):
    outputs = results.format_args()

    for k, v in six.iteritems(outputs):
        if is_raster(v) and publish_raster_results:
            service_name = '{0}/{1}'.format(job.uuid, k)
            rel_path = '{}.nc'.format(service_name)
            abs_path = os.path.join(SERVICE_DATA_ROOT, rel_path)
            os.makedirs(os.path.dirname(abs_path))

            with Dataset(abs_path, 'w', format='NETCDF4') as ds:
                if v.extent.projection.is_latlong():
                    x_var = 'longitude'
                    y_var = 'latitude'
                else:
                    x_var = 'x'
                    y_var = 'y'

                coord_vars = SpatialCoordinateVariables.from_bbox(v.extent, *reversed(v.shape))
                coord_vars.add_to_dataset(ds, x_var, y_var)

                fill_value = v.fill_value if is_masked(v) else None
                data_var = ds.createVariable('data', v.dtype, dimensions=(y_var, x_var), fill_value=fill_value)
                data_var[:] = v
                set_crs(ds, 'data', v.extent.projection)

            if callable(renderer_or_fn):
                renderer = renderer_or_fn(v)
            elif renderer_or_fn is None:
                renderer = StretchedRenderer(
                    [(numpy.min(v).item(), Color(0, 0, 0)), (numpy.max(v).item(), Color(255, 255, 255))]
                )
            else:
                renderer = renderer_or_fn

            with transaction.atomic():
                service = Service.objects.create(
                    name=service_name,
                    description='This service has been automatically generated from the result of a geoprocessing job.',
                    data_path=rel_path,
                    projection=v.extent.projection.srs,
                    full_extent=v.extent,
                    initial_extent=v.extent,
                )
                Variable.objects.create(
                    service=service,
                    index=0,
                    variable='data',
                    projection=v.extent.projection.srs,
                    x_dimension=x_var,
                    y_dimension=y_var,
                    name='data',
                    renderer=renderer,
                    full_extent=v.extent
                )
                ProcessingResultService.objects.create(job=job, service=service)

            outputs[k] = service_name

        elif is_ndarray(v):
            if v.size < numpy.get_printoptions()['threshold']:
                outputs[k] = v.tolist()
            else:
                outputs[k] = str(v)

    return outputs
    def handle(self, *args, **options):
        message = (
            "WARNING: This will update all service data, casting each to it's smallest possible data type. Do you want "
            "to continue? [y/n]"
        )
        if input(message).lower() not in {'y', 'yes'}:
            return

        for service in Service.objects.all():
            if service.variable_set.all().count() > 1:
                print("Skipping service '{}' with more than one variable...".format(service.name))
                continue

            variable = service.variable_set.all().get()
            path = os.path.join(SERVICE_DATA_ROOT, service.data_path)

            tmp_dir = mkdtemp()
            tmp_path = os.path.join(tmp_dir, os.path.basename(service.data_path))

            try:
                with Dataset(path, 'r') as ds:
                    data = ds.variables[variable.variable][:]
                    coords = SpatialCoordinateVariables.from_bbox(service.full_extent, *reversed(data.shape))

                if data.dtype.kind != 'i':
                    print("Ignoring service '{}' with non-int type".format(service.name))
                    continue

                # The fill value will be the minimum value of the chosen type, so we want to make sure it's not
                # included in the actual data range
                min_value = data.min() - 1
                max_value = data.max()

                # Determine the most suitable data type by finding the minimum type for the min/max values and then
                # using the type that will accurately represent both
                min_type = str(numpy.min_scalar_type(min_value))
                max_type = str(numpy.min_scalar_type(max_value))

                min_unsigned, min_size = min_type.split('int')
                max_unsigned, max_size = max_type.split('int')

                dtype = '{}int{}'.format(min_unsigned and max_unsigned, max(int(min_size), int(max_size)))

                if data.dtype == dtype:
                    print("Service '{}' already has the smallest possible type: {}".format(service.name, dtype))
                    continue

                print("Converting service '{}' to type: {}".format(service.name, dtype))

                with Dataset(tmp_path, 'w', format='NETCDF4') as ds:
                    coords.add_to_dataset(ds, variable.x_dimension, variable.y_dimension)

                    data = data.astype(dtype)
                    fill_value = numpy.ma.maximum_fill_value(numpy.dtype(dtype))
                    numpy.ma.set_fill_value(data, fill_value)

                    data_var = ds.createVariable(
                        variable.variable, dtype, dimensions=(variable.y_dimension, variable.x_dimension),
                        fill_value=fill_value
                    )
                    data_var[:] = data

                    set_crs(ds, variable.variable, service.full_extent.projection)

                os.unlink(path)
                shutil.copy2(tmp_path, path)

            finally:
                try:
                    shutil.rmtree(tmp_dir)
                except OSError:
                    pass
Exemple #7
0
def raster_to_netcdf(filename_or_raster,
                     outfilename=None,
                     variable_name='data',
                     format='NETCDF4',
                     **kwargs):
    """
    Parameters
    ----------
    filename_or_raster: name of file to open with rasterio, or opened rasterio raster dataset
    outfilename: name of output file.  If blank, will be same name as input with *.nc extension added
    variable_name: output format for netCDF file: NETCDF3_CLASSIC, NETCDF3_64BIT, NETCDF4_CLASSIC, NETCDF4
    format
    kwargs: arguments passed to variable creation: zlib

    Note: only rasters with descending y coordinates are currently supported
    """

    start = time.time()

    if isinstance(filename_or_raster, string_types):
        if not os.path.exists(filename_or_raster):
            raise ValueError(
                'File does not exist: {0}'.format(filename_or_raster))

        src = rasterio.open(filename_or_raster)
        managed_raster = True
    else:
        src = filename_or_raster
        managed_raster = False

    if not src.count == 1:
        raise NotImplementedError(
            'ERROR: multi-band rasters not yet supported for this operation')

    prj = pyproj.Proj(**src.crs)

    outfilename = outfilename or src.name + '.nc'
    with Dataset(outfilename, 'w', format=format) as target:
        if prj.is_latlong():
            x_varname = 'longitude'
            y_varname = 'latitude'
        else:
            x_varname = 'x'
            y_varname = 'y'

        # TODO: may need to do this in blocks if source is big
        data = src.read(1, masked=True)

        coords = SpatialCoordinateVariables.from_bbox(BBox(src.bounds, prj),
                                                      src.width, src.height)
        coords.add_to_dataset(target, x_varname, y_varname, **kwargs)

        out_var = target.createVariable(variable_name,
                                        data.dtype,
                                        dimensions=(y_varname, x_varname),
                                        **kwargs)
        out_var[:] = data
        set_crs(target, variable_name, prj, set_proj4_att=False)

    if managed_raster:
        src.close()

    print('Elapsed {0:.3f} seconds'.format(time.time() - start))