Beispiel #1
0
def to_swan(
    self,
    filename,
    append=False,
    id="Created by wavespectra",
    unique_times=False,
    dircap_270=False,
):
    """Write spectra in SWAN ASCII format.

    Args:
        - filename (str): str, name for output SWAN ASCII file.
        - append (bool): if True append to existing filename.
        - id (str): used for header in output file.
        - unique_times (bool): if True, only last time is taken from
          duplicate indices.
        - dircap_270 (bool): if True, ensure directions do not exceed 270 degrees
          as requerid for swan to prescribe boundaries.

    Note:
        - Only datasets with lat/lon coordinates are currently supported.
        - Extra dimensions other than time, site, lon, lat, freq, dim not yet
          supported.
        - Only 2D spectra E(f,d) are currently supported.

    """
    # If grid reshape into site, otherwise ensure there is site dim to iterate over
    dset = self._check_and_stack_dims()

    # When prescribing bnds, SWAN doesn't like dir>270
    if dircap_270:
        direc = dset[attrs.DIRNAME].values
        direc[direc > 270] = direc[direc > 270] - 360
        dset = dset.update({attrs.DIRNAME: direc}).sortby("dir", ascending=False)

    darray = dset[attrs.SPECNAME]
    is_time = attrs.TIMENAME in darray.dims

    # Instantiate swan object
    try:
        x = dset.lon.values
        y = dset.lat.values
    except NotImplementedError(
        "lon/lat not found in dset, cannot dump SWAN file without locations"
    ):
        raise
    sfile = SwanSpecFile(
        filename,
        freqs=darray.freq,
        dirs=darray.dir,
        time=is_time,
        x=x,
        y=y,
        append=append,
        id=id,
    )

    # Dump each timestep
    if is_time:
        for t in darray.time:
            darrout = darray.sel(time=t, method="nearest")
            if darrout.time.size == 1:
                sfile.write_spectra(
                    darrout.transpose(
                        attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME
                    ).values,
                    time=to_datetime(t.values),
                )
            elif unique_times:
                sfile.write_spectra(
                    darrout.isel(time=-1)
                    .transpose(attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME)
                    .values,
                    time=to_datetime(t.values),
                )
            else:
                for it, tt in enumerate(darrout.time):
                    sfile.write_spectra(
                        darrout.isel(time=it)
                        .transpose(attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME)
                        .values,
                        time=to_datetime(t.values),
                    )
    else:
        sfile.write_spectra(
            darray.transpose(attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME).values
        )
    sfile.close()
Beispiel #2
0
def to_swan(
    self,
    filename,
    append=False,
    id="Created by wavespectra",
    ntime=None
):
    """Write spectra in SWAN ASCII format.

    Args:
        - filename (str): str, name for output SWAN ASCII file.
        - append (bool): if True append to existing filename.
        - id (str): used for header in output file.
        - ntime (int, None): number of times to load into memory before dumping output
          file if full dataset does not fit into memory, choose None to load all times.

    Note:
        - Only datasets with lat/lon coordinates are currently supported.
        - Extra dimensions other than time, site, lon, lat, freq, dim not yet
          supported.
        - Only 2D spectra E(f,d) are currently supported.
        - ntime=None optimises speed as the dataset is loaded into memory however the
          dataset may not fit into memory in which case a smaller number of times may
          be prescribed.

    """
    # If grid reshape into site, otherwise ensure there is site dim to iterate over
    dset = self._check_and_stack_dims()
    ntime = min(ntime or dset.time.size, dset.time.size)

    # Ensure time dimension exists
    is_time = attrs.TIMENAME in dset[attrs.SPECNAME].dims
    if not is_time:
        dset = dset.expand_dims({attrs.TIMENAME: [None]})
        times = dset[attrs.TIMENAME].values
    else:
        times = dset[attrs.TIMENAME].to_index().to_pydatetime()
        times = [f"{t:%Y%m%d.%H%M%S}" for t in times]

    # Keeping only supported dimensions
    dims_to_keep = {attrs.TIMENAME, attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME}
    dset = dset.drop_dims(set(dset.dims) - dims_to_keep)

    # Ensure correct shape
    dset = dset.transpose(attrs.TIMENAME, attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME)

    # Instantiate swan object
    try:
        x = dset.lon.values
        y = dset.lat.values
    except AttributeError as err:
        raise NotImplementedError(
            "lon-lat variables are required to write SWAN spectra file"
        ) from err
    sfile = SwanSpecFile(
        filename,
        freqs=dset.freq,
        dirs=dset.dir,
        time=is_time,
        x=x,
        y=y,
        append=append,
        id=id,
    )

    # Dump each timestep
    i0 = 0
    i1 = ntime
    while i1 <= dset.time.size or i0 < dset.time.size:
        ds = dset.isel(time=slice(i0, i1))
        part_times = times[i0:i1]
        i0 = i1
        i1 += ntime
        specarray = ds[attrs.SPECNAME].values
        for itime, time in enumerate(part_times):
            darrout = specarray[itime]
            sfile.write_spectra(darrout, time=time)

    sfile.close()
Beispiel #3
0
def to_swan(self,
            filename,
            append=False,
            id='Created by wavespectra',
            unique_times=False):
    """Write spectra in SWAN ASCII format.

    Args:
        - filename (str): str, name for output SWAN ASCII file.
        - append (bool): if True append to existing filename.
        - id (str): used for header in output file.
        - unique_times (bool): if True, only last time is taken from
          duplicate indices.

    Note:
        - Only datasets with lat/lon coordinates are currently supported.
        - Extra dimensions other than time, site, lon, lat, freq, dim not yet
          supported.
        - Only 2D spectra E(f,d) are currently supported.

    """
    # If grid reshape into site, otherwise ensure there is site dim to iterate over
    dset = self._check_and_stack_dims()

    darray = dset[attrs.SPECNAME]
    is_time = attrs.TIMENAME in darray.dims

    # Instantiate swan object
    try:
        x = dset.lon.values
        y = dset.lat.values
    except NotImplementedError(
            'lon/lat not found in dset, cannot dump SWAN file without locations'
    ):
        raise
    sfile = SwanSpecFile(filename,
                         freqs=darray.freq,
                         dirs=darray.dir,
                         time=is_time,
                         x=x,
                         y=y,
                         append=append,
                         id=id)

    # Dump each timestep
    if is_time:
        for t in darray.time:
            darrout = darray.sel(time=t, method='nearest')
            if darrout.time.size == 1:
                sfile.write_spectra(darrout.transpose(attrs.SITENAME,
                                                      attrs.FREQNAME,
                                                      attrs.DIRNAME).values,
                                    time=to_datetime(t.values))
            elif unique_times:
                sfile.write_spectra(darrout.isel(time=-1).transpose(
                    attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME).values,
                                    time=to_datetime(t.values))
            else:
                for it, tt in enumerate(darrout.time):
                    sfile.write_spectra(darrout.isel(time=it).transpose(
                        attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME).values,
                                        time=to_datetime(t.values))
    else:
        sfile.write_spectra(
            darray.transpose(attrs.SITENAME, attrs.FREQNAME,
                             attrs.DIRNAME).values)
    sfile.close()