Beispiel #1
0
 def write_data(self, prod_name, measurement_name, chunk, values):
     self._output_file_handles[prod_name][measurement_name][
         (0, ) + chunk[1:]] = netcdf_writer.netcdfy_data(values)
     self._output_file_handles[prod_name].sync()
     _LOG.debug(
         "Updated %s %s", measurement_name,
         "({})".format(", ".join(prettier_slice(x) for x in chunk[1:])))
Beispiel #2
0
def make_fake_netcdf_dataset(nc_name, doc_text):
    t = np.asarray([parse_time('2001-01-29 07:06:05.432')], dtype=np.datetime64)
    npdata = np.asarray([doc_text], dtype='S')

    with create_netcdf(nc_name) as nco:
        create_coordinate(nco, 'time', t, 'seconds since 1970-01-01 00:00:00')

        nc_dataset = create_variable(nco, 'dataset',
                                     Variable(npdata.dtype, None, ('time',), None))
        nc_dataset[:] = netcdfy_data(npdata)
        assert 'dataset_nchar' in nco.dimensions
Beispiel #3
0
    def _create_storage_unit(self, stat: OutputProduct, output_filename: Path):
        all_measurement_defns = list(stat.product.measurements.values())

        datasets = self._find_source_datasets(stat,
                                              uri=output_filename.as_uri())

        variable_params = self._create_netcdf_var_params(stat)
        nco = self._nco_from_sources(datasets, self._geobox,
                                     all_measurement_defns, variable_params,
                                     output_filename)

        netcdf_writer.create_variable(nco, 'dataset', datasets, zlib=True)
        nco['dataset'][:] = netcdf_writer.netcdfy_data(datasets.values)
        return nco
def test_create_string_variable(tmpnetcdf_filename, s1, s2, s3):
    str_var = 'str_var'
    nco = create_netcdf(tmpnetcdf_filename)
    coord = create_coordinate(nco, 'greg', numpy.array([1.0, 3.0, 9.0]), 'cubic gregs')
    assert coord is not None

    dtype = numpy.dtype('S100')
    data = numpy.array([s1, s2, s3], dtype=dtype)

    var = create_variable(nco, str_var, Variable(dtype, None, ('greg',), None))
    var[:] = netcdfy_data(data)
    nco.close()

    with netCDF4.Dataset(tmpnetcdf_filename) as nco:
        assert str_var in nco.variables

    for returned, expected in zip(read_strings_from_netcdf(tmpnetcdf_filename, variable=str_var), (s1, s2, s3)):
        assert returned == expected
Beispiel #5
0
def make_fake_netcdf_dataset(nc_name, doc_text):
    from datacube.drivers.netcdf.writer import (
        Variable,
        create_variable,
        create_coordinate,
        netcdfy_data,
        create_netcdf
    )
    from datacube.utils.dates import parse_time
    import numpy as np

    t = np.asarray([parse_time('2001-01-29 07:06:05.432')], dtype=np.datetime64)
    npdata = np.asarray([doc_text], dtype='S')

    with create_netcdf(nc_name) as nco:
        create_coordinate(nco, 'time', t, 'seconds since 1970-01-01 00:00:00')

        nc_dataset = create_variable(nco, 'dataset',
                                     Variable(npdata.dtype, None, ('time',), None))
        nc_dataset[:] = netcdfy_data(npdata)
        assert 'dataset_nchar' in nco.dimensions
def saveNC(output, filename, history):

    logging.info('saveNC: dataset {} - {}'.format(type(output), output))

    start = time.time()
    nco = netcdf_writer.create_netcdf(filename)
    nco.history = (history.encode('ascii', 'replace'))

    coords = output.coords
    cnames = ()

    # This 3 lines were created by Aurelio
    # if an error occurs in this function please
    # check this 3 lines first.
    # we reorder the coordinates system to match
    coord_names = list(output.coords.keys())

    print('coord_names_antes', coord_names)

    sample_coords = []
    if 'time' in coord_names:
        sample_coords.append('time')
        coord_names.remove('time')

    if 'latitude' in coord_names:
        sample_coords.append('latitude')
        coord_names.remove('latitude')
    else:
        raise Exception("No hay 'latitude' como coordenada en el dataset")

    if 'longitude' in coord_names:
        sample_coords.append('longitude')
        coord_names.remove('longitude')
    else:
        raise Exception("No hay 'longitude' como coordenada en el dataset")

    sample_coords = sample_coords + coord_names
    coord_names = sample_coords

    print('coord_names_despues', coord_names)

    #for x in coords:
    for x in coord_names:
        if not 'units' in coords[x].attrs:
            if x == "time":
                coords[x].attrs["units"] = u"seconds since 1970-01-01 00:00:00"
        netcdf_writer.create_coordinate(nco, x, coords[x].values,
                                        coords[x].units)
        cnames = cnames + (x, )
    _crs = output.crs
    if isinstance(_crs, xr.DataArray):
        _crs = CRS(str(_crs.spatial_ref))
    netcdf_writer.create_grid_mapping_variable(nco, _crs)
    for band in output.data_vars:
        #Para evitar problemas con xarray <0.11
        if band in coords.keys() or band == 'crs':
            continue
        output.data_vars[band].values[np.isnan(
            output.data_vars[band].values)] = nodata
        var = netcdf_writer.create_variable(nco,
                                            band,
                                            netcdf_writer.Variable(
                                                output.data_vars[band].dtype,
                                                nodata, cnames, None),
                                            set_crs=True)
        var[:] = netcdf_writer.netcdfy_data(output.data_vars[band].values)
    nco.close()

    end = time.time()
    logging.info('TIEMPO SALIDA NC:' + str((end - start)))