Ejemplo n.º 1
0
    def _create_storage_unit(self, stat: OutputProduct, output_filename: Path):
        all_measurement_defns = list(stat.product.measurements.values())

        datasets = self._find_source_datasets(stat,
                                              uri=output_filename.as_uri())

        variable_params = self._create_netcdf_var_params(stat)
        nco = self._nco_from_sources(datasets, self._geobox,
                                     all_measurement_defns, variable_params,
                                     output_filename)

        netcdf_writer.create_variable(nco, 'dataset', datasets, zlib=True)
        nco['dataset'][:] = netcdf_writer.netcdfy_data(datasets.values)
        return nco
Ejemplo n.º 2
0
def make_fake_netcdf_dataset(nc_name, doc_text):
    t = np.asarray([parse_time('2001-01-29 07:06:05.432')], dtype=np.datetime64)
    npdata = np.asarray([doc_text], dtype='S')

    with create_netcdf(nc_name) as nco:
        create_coordinate(nco, 'time', t, 'seconds since 1970-01-01 00:00:00')

        nc_dataset = create_variable(nco, 'dataset',
                                     Variable(npdata.dtype, None, ('time',), None))
        nc_dataset[:] = netcdfy_data(npdata)
        assert 'dataset_nchar' in nco.dimensions
Ejemplo n.º 3
0
def test_chunksizes(tmpnetcdf_filename):
    nco = create_netcdf(tmpnetcdf_filename)

    x = numpy.arange(3, dtype='float32')
    y = numpy.arange(5, dtype='float32')

    coord1 = create_coordinate(nco, 'x', x, 'm')
    coord2 = create_coordinate(nco, 'y', y, 'm')

    assert coord1 is not None and coord2 is not None

    no_chunks = create_variable(nco, 'no_chunks',
                                Variable(numpy.dtype('int16'), None, ('x', 'y'), None))

    min_max_chunks = create_variable(nco, 'min_max_chunks',
                                     Variable(numpy.dtype('int16'), None, ('x', 'y'), None),
                                     chunksizes=(2, 50))

    assert no_chunks is not None
    assert min_max_chunks is not None

    strings = numpy.array(["AAa", 'bbb', 'CcC'], dtype='S')
    strings = xr.DataArray(strings, dims=['x'], coords={'x': x})
    create_variable(nco, 'strings_unchunked', strings)
    create_variable(nco, 'strings_chunked', strings, chunksizes=(1,))

    nco.close()

    with netCDF4.Dataset(tmpnetcdf_filename) as nco:
        assert nco['no_chunks'].chunking() == 'contiguous'
        assert nco['min_max_chunks'].chunking() == [2, 5]
        assert nco['strings_unchunked'].chunking() == 'contiguous'
        assert nco['strings_chunked'].chunking() == [1, 3]
Ejemplo n.º 4
0
def test_create_string_variable(tmpnetcdf_filename, s1, s2, s3):
    str_var = 'str_var'
    nco = create_netcdf(tmpnetcdf_filename)
    coord = create_coordinate(nco, 'greg', numpy.array([1.0, 3.0, 9.0]), 'cubic gregs')
    assert coord is not None

    dtype = numpy.dtype('S100')
    data = numpy.array([s1, s2, s3], dtype=dtype)

    var = create_variable(nco, str_var, Variable(dtype, None, ('greg',), None))
    var[:] = netcdfy_data(data)
    nco.close()

    with netCDF4.Dataset(tmpnetcdf_filename) as nco:
        assert str_var in nco.variables

    for returned, expected in zip(read_strings_from_netcdf(tmpnetcdf_filename, variable=str_var), (s1, s2, s3)):
        assert returned == expected
Ejemplo n.º 5
0
def make_fake_netcdf_dataset(nc_name, doc_text):
    from datacube.drivers.netcdf.writer import (
        Variable,
        create_variable,
        create_coordinate,
        netcdfy_data,
        create_netcdf
    )
    from datacube.utils.dates import parse_time
    import numpy as np

    t = np.asarray([parse_time('2001-01-29 07:06:05.432')], dtype=np.datetime64)
    npdata = np.asarray([doc_text], dtype='S')

    with create_netcdf(nc_name) as nco:
        create_coordinate(nco, 'time', t, 'seconds since 1970-01-01 00:00:00')

        nc_dataset = create_variable(nco, 'dataset',
                                     Variable(npdata.dtype, None, ('time',), None))
        nc_dataset[:] = netcdfy_data(npdata)
        assert 'dataset_nchar' in nco.dimensions
Ejemplo n.º 6
0
def saveNC(output, filename, history):

    logging.info('saveNC: dataset {} - {}'.format(type(output), output))

    start = time.time()
    nco = netcdf_writer.create_netcdf(filename)
    nco.history = (history.encode('ascii', 'replace'))

    coords = output.coords
    cnames = ()

    # This 3 lines were created by Aurelio
    # if an error occurs in this function please
    # check this 3 lines first.
    # we reorder the coordinates system to match
    coord_names = list(output.coords.keys())

    print('coord_names_antes', coord_names)

    sample_coords = []
    if 'time' in coord_names:
        sample_coords.append('time')
        coord_names.remove('time')

    if 'latitude' in coord_names:
        sample_coords.append('latitude')
        coord_names.remove('latitude')
    else:
        raise Exception("No hay 'latitude' como coordenada en el dataset")

    if 'longitude' in coord_names:
        sample_coords.append('longitude')
        coord_names.remove('longitude')
    else:
        raise Exception("No hay 'longitude' como coordenada en el dataset")

    sample_coords = sample_coords + coord_names
    coord_names = sample_coords

    print('coord_names_despues', coord_names)

    #for x in coords:
    for x in coord_names:
        if not 'units' in coords[x].attrs:
            if x == "time":
                coords[x].attrs["units"] = u"seconds since 1970-01-01 00:00:00"
        netcdf_writer.create_coordinate(nco, x, coords[x].values,
                                        coords[x].units)
        cnames = cnames + (x, )
    _crs = output.crs
    if isinstance(_crs, xr.DataArray):
        _crs = CRS(str(_crs.spatial_ref))
    netcdf_writer.create_grid_mapping_variable(nco, _crs)
    for band in output.data_vars:
        #Para evitar problemas con xarray <0.11
        if band in coords.keys() or band == 'crs':
            continue
        output.data_vars[band].values[np.isnan(
            output.data_vars[band].values)] = nodata
        var = netcdf_writer.create_variable(nco,
                                            band,
                                            netcdf_writer.Variable(
                                                output.data_vars[band].dtype,
                                                nodata, cnames, None),
                                            set_crs=True)
        var[:] = netcdf_writer.netcdfy_data(output.data_vars[band].values)
    nco.close()

    end = time.time()
    logging.info('TIEMPO SALIDA NC:' + str((end - start)))