class IceConcentration(GriddedProp, Environment, serializable.Serializable):
    _state = copy.deepcopy(serializable.Serializable._state)

    _schema = GridPropSchema

    _state.add_field([
        serializable.Field('units', save=True, update=True),
        serializable.Field('varname', save=True, update=False),
        serializable.Field('time', save=True, update=True),
        serializable.Field('data_file', save=True, update=True),
        serializable.Field('grid_file', save=True, update=True)
    ])

    default_names = [
        'ice_fraction',
    ]

    def __eq__(self, o):
        t1 = (self.name == o.name and self.units == o.units
              and self.time == o.time and self.varname == o.varname)
        t2 = self.data == o.data
        return t1 and t2

    def __str__(self):
        return self.serialize(json_='save').__repr__()
Example #2
0
class IceAwareCurrent(GridCurrent):

    _ref_as = ['current', 'ice_aware']
    _req_refs = {'ice_concentration': IceConcentration, 'ice_velocity': IceVelocity}

    _schema = IceAwareCurrentSchema
    _state = copy.deepcopy(GridCurrent._state)

    _state.add_field([serializable.Field('ice_velocity', save=True, update=True, save_reference=True),
                      serializable.Field('ice_concentration', save=True, update=True, save_reference=True)])

    def __init__(self,
                 ice_velocity=None,
                 ice_concentration=None,
                 *args,
                 **kwargs):
        self.ice_velocity = ice_velocity
        self.ice_concentration = ice_concentration
        super(IceAwareCurrent, self).__init__(*args, **kwargs)

    @classmethod
    @GridCurrent._get_shared_vars()
    def from_netCDF(cls,
                    ice_concentration=None,
                    ice_velocity=None,
                    **kwargs):
        if ice_concentration is None:
            ice_concentration = IceConcentration.from_netCDF(**kwargs)
        if ice_velocity is None:
            ice_velocity = IceVelocity.from_netCDF(**kwargs)
        return super(IceAwareCurrent, cls).from_netCDF(ice_concentration=ice_concentration,
                                                       ice_velocity=ice_velocity,
                                                       **kwargs)

    def at(self, points, time, units=None, extrapolate=False, **kwargs):
        interp = self.ice_concentration.at(points, time, extrapolate=extrapolate, **kwargs).copy()
        interp_mask = np.logical_and(interp >= 0.2, interp < 0.8)
        interp_mask = interp_mask.reshape(-1)
        if len(interp > 0.2):
            ice_mask = interp >= 0.8

            water_v = super(IceAwareCurrent, self).at(points, time, units, extrapolate, **kwargs)
            ice_v = self.ice_velocity.at(points, time, units, extrapolate, **kwargs).copy()
            interp = (interp - 0.2) * 10 / 6.

            vels = water_v.copy()
            vels[ice_mask] = ice_v[ice_mask]
            diff_v = ice_v
            diff_v -= water_v
            vels[interp_mask] += (diff_v[interp_mask] * interp[interp_mask][:, np.newaxis])
            return vels
        else:
            return super(IceAwareCurrent, self).at(points, time, units, extrapolate, **kwargs)
class VelocityGrid(GridVectorProp, serializable.Serializable):
    _state = copy.deepcopy(serializable.Serializable._state)

    _schema = VelocityGridSchema

    _state.add_field([
        serializable.Field('units', save=True, update=True),
        serializable.Field('varnames', save=True, update=True),
        serializable.Field('time', save=True, update=True),
        serializable.Field('data_file', save=True, update=True),
        serializable.Field('grid_file', save=True, update=True)
    ])

    def __init__(self,
                 name=None,
                 units=None,
                 time=None,
                 grid=None,
                 variables=None,
                 data_file=None,
                 grid_file=None,
                 dataset=None,
                 **kwargs):

        if len(variables) > 2:
            raise ValueError('Only 2 dimensional velocities are supported')
        GridVectorProp.__init__(self,
                                name=name,
                                units=units,
                                time=time,
                                grid=grid,
                                variables=variables,
                                data_file=data_file,
                                grid_file=grid_file,
                                dataset=dataset)

    def __eq__(self, o):
        if o is None:
            return False
        t1 = (self.name == o.name and self.units == o.units
              and self.time == o.time)
        t2 = True
        for i in range(0, len(self._variables)):
            if self._variables[i] != o._variables[i]:
                t2 = False
                break

        return t1 and t2

    def __str__(self):
        return self.serialize(json_='save').__repr__()
Example #4
0
class IceAwareWind(GridWind):

    _ref_as = ['wind', 'ice_aware']
    _req_refs = {'ice_concentration': IceConcentration}

    _schema = IceAwarePropSchema
    _state = copy.deepcopy(GridWind._state)

    _state.add_field([serializable.Field('ice_concentration', save=True, update=True, save_reference=True)])

    def __init__(self,
                 ice_concentration=None,
                 *args,
                 **kwargs):
        self.ice_concentration = ice_concentration
        super(IceAwareWind, self).__init__(*args, **kwargs)

    @classmethod
    @GridCurrent._get_shared_vars()
    def from_netCDF(cls,
                    ice_concentration=None,
                    ice_velocity=None,
                    **kwargs):
        if ice_concentration is None:
            ice_concentration = IceConcentration.from_netCDF(**kwargs)
        if ice_velocity is None:
            ice_velocity = IceVelocity.from_netCDF(**kwargs)
        return super(IceAwareWind, cls).from_netCDF(ice_concentration=ice_concentration,
                                                    ice_velocity=ice_velocity,
                                                    **kwargs)

    def at(self, points, time, units=None, extrapolate=False, **kwargs):
        interp = self.ice_concentration.at(points, time, extrapolate=extrapolate, **kwargs)
        interp_mask = np.logical_and(interp >= 0.2, interp < 0.8)
        interp_mask = interp_mask
        if len(interp >= 0.2) != 0:
            ice_mask = interp >= 0.8

            wind_v = super(IceAwareWind, self).at(points, time, units, extrapolate, **kwargs)
            interp = (interp - 0.2) * 10 / 6.

            vels = wind_v.copy()
            vels[ice_mask] = 0
            vels[interp_mask] = vels[interp_mask] * (1 - interp[interp_mask])[:, np.newaxis]  # scale winds from 100-0% depending on ice coverage
            return vels
        else:
            return super(IceAwareWind, self).at(points, time, units, extrapolate, **kwargs)
class IceAwareProp(serializable.Serializable, Environment):
    _state = copy.deepcopy(serializable.Serializable._state)
    _schema = VelocityGridSchema
    _state.add_field([
        serializable.Field('units', save=True, update=True),
        serializable.Field('time', save=True, update=True),
        serializable.Field('data_file', save=True, update=True),
        serializable.Field('grid_file', save=True, update=True)
    ])

    def __init__(self,
                 name=None,
                 units=None,
                 time=None,
                 ice_var=None,
                 ice_conc_var=None,
                 grid=None,
                 grid_file=None,
                 data_file=None,
                 **kwargs):
        self.name = name
        self.units = units
        self.time = time
        self.ice_var = ice_var
        self.ice_conc_var = ice_conc_var
        self.grid = grid
        self.grid_file = grid_file
        self.data_file = data_file

    @classmethod
    def from_netCDF(cls,
                    filename=None,
                    grid_topology=None,
                    name=None,
                    units=None,
                    time=None,
                    ice_var=None,
                    ice_conc_var=None,
                    grid=None,
                    dataset=None,
                    grid_file=None,
                    data_file=None,
                    **kwargs):
        if filename is not None:
            data_file = filename
            grid_file = filename

        ds = None
        dg = None
        if dataset is None:
            if grid_file == data_file:
                ds = dg = _get_dataset(grid_file)
            else:
                ds = _get_dataset(data_file)
                dg = _get_dataset(grid_file)
        else:
            ds = dg = dataset

        if grid is None:
            grid = init_grid(grid_file,
                             grid_topology=grid_topology,
                             dataset=dg)
        if ice_var is None:
            ice_var = IceVelocity.from_netCDF(filename, grid=grid, dataset=ds)
        if time is None:
            time = ice_var.time

        if ice_conc_var is None:
            ice_conc_var = IceConcentration.from_netCDF(filename,
                                                        time=time,
                                                        grid=grid,
                                                        dataset=ds)
        if name is None:
            name = 'IceAwareProp'
        if units is None:
            units = ice_var.units
        return cls(name='foo',
                   units=units,
                   time=time,
                   ice_var=ice_var,
                   ice_conc_var=ice_conc_var,
                   grid=grid,
                   grid_file=grid_file,
                   data_file=data_file,
                   **kwargs)
class VelocityTS(TSVectorProp, serializable.Serializable):

    _state = copy.deepcopy(serializable.Serializable._state)
    _schema = VelocityTSSchema

    _state.add_field([
        serializable.Field('units', save=True, update=True),
        serializable.Field('timeseries', save=True, update=True),
        serializable.Field('varnames', save=True, update=True)
    ])

    def __init__(self,
                 name=None,
                 units=None,
                 time=None,
                 variables=None,
                 **kwargs):

        if len(variables) > 2:
            raise ValueError('Only 2 dimensional velocities are supported')
        TSVectorProp.__init__(self,
                              name,
                              units,
                              time=time,
                              variables=variables)

    def __eq__(self, o):
        if o is None:
            return False
        t1 = (self.name == o.name and self.units == o.units
              and self.time == o.time)
        t2 = True
        for i in range(0, len(self._variables)):
            if self._variables[i] != o._variables[i]:
                t2 = False
                break

        return t1 and t2

    def __str__(self):
        return self.serialize(json_='save').__repr__()

    @property
    def timeseries(self):
        x = self.variables[0].data
        y = self.variables[1].data
        return map(lambda t, x, y: (t, (x, y)), self._time, x, y)

    def serialize(self, json_='webapi'):
        dict_ = serializable.Serializable.serialize(self, json_=json_)
        # The following code is to cover the needs of webapi
        if json_ == 'webapi':
            dict_.pop('timeseries')
            dict_.pop('units')
            x = self.variables[0].data
            y = self.variables[1].data
            direction = -(np.arctan2(y, x) * 180 / np.pi + 90)
            magnitude = np.sqrt(x**2 + y**2)
            ts = (unicode(tx.isoformat()) for tx in self._time)
            dict_['timeseries'] = map(lambda t, x, y: (t, (x, y)), ts,
                                      magnitude, direction)
            dict_['units'] = (unicode(self.variables[0].units), u'degrees')
            dict_['varnames'] = [
                u'magnitude', u'direction', dict_['varnames'][0],
                dict_['varnames'][1]
            ]
        return dict_

    @classmethod
    def deserialize(cls, json_):
        dict_ = super(VelocityTS, cls).deserialize(json_)

        ts, data = zip(*dict_.pop('timeseries'))
        ts = np.array(ts)
        data = np.array(data).T
        units = dict_['units']
        if len(units) > 1 and units[1] == 'degrees':
            u_data, v_data = data
            v_data = ((-v_data - 90) * np.pi / 180)
            u_t = u_data * np.cos(v_data)
            v_data = u_data * np.sin(v_data)
            u_data = u_t
            data = np.array((u_data, v_data))
            dict_['varnames'] = dict_['varnames'][2:]

        units = units[0]
        dict_['units'] = units
        dict_['time'] = ts
        dict_['data'] = data
        return dict_

    @classmethod
    def new_from_dict(cls, dict_):
        varnames = dict_['varnames']
        vs = []
        for i, varname in enumerate(varnames):
            vs.append(
                TimeSeriesProp(name=varname,
                               units=dict_['units'],
                               time=dict_['time'],
                               data=dict_['data'][i]))
        dict_.pop('data')
        dict_['variables'] = vs
        return super(VelocityTS, cls).new_from_dict(dict_)
Example #7
0
class CatsMover(CurrentMoversBase, serializable.Serializable):

    _state = copy.deepcopy(CurrentMoversBase._state)

    _update = [
        'scale', 'scale_refpoint', 'scale_value', 'up_cur_uncertain',
        'down_cur_uncertain', 'right_cur_uncertain', 'left_cur_uncertain',
        'uncertain_eddy_diffusion', 'uncertain_eddy_v0'
    ]
    _create = []
    _create.extend(_update)
    _state.add(update=_update, save=_create)
    _state.add_field([
        serializable.Field('filename',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('tide', save=True, update=True, save_reference=True)
    ])
    _schema = CatsMoverSchema

    def __init__(self, filename, tide=None, uncertain_duration=48, **kwargs):
        """
        Uses super to invoke base class __init__ method.

        :param filename: file containing currents patterns for Cats

        Optional parameters (kwargs).
        Defaults are defined by CyCatsMover object.

        :param tide: a gnome.environment.Tide object to be attached to
                     CatsMover
        :param scale: a boolean to indicate whether to scale value at
                      reference point or not
        :param scale_value: value used for scaling at reference point
        :param scale_refpoint: reference location (long, lat, z). The scaling
                               applied to all data is determined by scaling
                               the raw value at this location.

        :param uncertain_duration: how often does a given uncertain element
                                   gets reset
        :param uncertain_time_delay: when does the uncertainly kick in.
        :param up_cur_uncertain: Scale for uncertainty along the flow
        :param down_cur_uncertain: Scale for uncertainty along the flow
        :param right_cur_uncertain: Scale for uncertainty across the flow
        :param left_cur_uncertain: Scale for uncertainty across the flow
        :param uncertain_eddy_diffusion: Diffusion coefficient for
                                         eddy diffusion. Default is 0.
        :param uncertain_eddy_v0: Default is .1 (Check that this is still used)
        Remaining kwargs are passed onto Mover's __init__ using super.
        See Mover documentation for remaining valid kwargs.
        """
        if not os.path.exists(filename):
            raise ValueError(
                'Path for Cats filename does not exist: {0}'.format(filename))

        self._filename = filename

        # check if this is stored with cy_cats_mover?
        self.mover = CyCatsMover()
        self.mover.text_read(filename)
        self.name = os.path.split(filename)[1]

        self._tide = None
        if tide is not None:
            self.tide = tide

        self.scale = kwargs.pop('scale', self.mover.scale_type)
        self.scale_value = kwargs.get('scale_value', self.mover.scale_value)

        self.up_cur_uncertain = kwargs.pop('up_cur_uncertain', .3)
        self.down_cur_uncertain = kwargs.pop('down_cur_uncertain', -.3)
        self.right_cur_uncertain = kwargs.pop('right_cur_uncertain', .1)
        self.left_cur_uncertain = kwargs.pop('left_cur_uncertain', -.1)
        self.uncertain_eddy_diffusion = kwargs.pop('uncertain_eddy_diffusion',
                                                   0)
        self.uncertain_eddy_v0 = kwargs.pop('uncertain_eddy_v0', .1)
        # TODO: no need to check for None since properties that are None
        # are not persisted

        if 'scale_refpoint' in kwargs:
            self.scale_refpoint = kwargs.pop('scale_refpoint')
            self.mover.compute_velocity_scale()

        if (self.scale and self.scale_value != 0.0
                and self.scale_refpoint is None):
            raise TypeError("Provide a reference point in 'scale_refpoint'.")

        super(CatsMover, self).__init__(uncertain_duration, **kwargs)

    def __repr__(self):
        return 'CatsMover(filename={0})'.format(self.filename)

    # Properties
    filename = property(lambda self: basename(self._filename),
                        lambda self, val: setattr(self, '_filename', val))

    scale = property(
        lambda self: bool(self.mover.scale_type),
        lambda self, val: setattr(self.mover, 'scale_type', int(val)))

    scale_value = property(
        lambda self: self.mover.scale_value,
        lambda self, val: setattr(self.mover, 'scale_value', val))

    up_cur_uncertain = property(
        lambda self: self.mover.up_cur_uncertain,
        lambda self, val: setattr(self.mover, 'up_cur_uncertain', val))

    down_cur_uncertain = property(
        lambda self: self.mover.down_cur_uncertain,
        lambda self, val: setattr(self.mover, 'down_cur_uncertain', val))

    right_cur_uncertain = property(
        lambda self: self.mover.right_cur_uncertain,
        lambda self, val: setattr(self.mover, 'right_cur_uncertain', val))

    left_cur_uncertain = property(
        lambda self: self.mover.left_cur_uncertain,
        lambda self, val: setattr(self.mover, 'left_cur_uncertain', val))

    uncertain_eddy_diffusion = property(
        lambda self: self.mover.uncertain_eddy_diffusion,
        lambda self, val: setattr(self.mover, 'uncertain_eddy_diffusion', val))

    uncertain_eddy_v0 = property(
        lambda self: self.mover.uncertain_eddy_v0,
        lambda self, val: setattr(self.mover, 'uncertain_eddy_v0', val))

    @property
    def ref_scale(self):
        return self.mover.ref_scale

    @property
    def scale_refpoint(self):
        return self.mover.ref_point

    @scale_refpoint.setter
    def scale_refpoint(self, val):
        '''
        Must be a tuple of length 2 or 3: (long, lat, z). If only (long, lat)
        is given, the set z = 0
        '''
        if len(val) == 2:
            self.mover.ref_point = (val[0], val[1], 0.)
        else:
            self.mover.ref_point = val

        self.mover.compute_velocity_scale()

    @property
    def tide(self):
        return self._tide

    @tide.setter
    def tide(self, tide_obj):
        if not isinstance(tide_obj, environment.Tide):
            raise TypeError('tide must be of type environment.Tide')

        if isinstance(tide_obj.cy_obj, CyShioTime):
            self.mover.set_shio(tide_obj.cy_obj)
        elif isinstance(tide_obj.cy_obj, CyOSSMTime):
            self.mover.set_ossm(tide_obj.cy_obj)
        else:
            raise TypeError('Tide.cy_obj attribute must be either '
                            'CyOSSMTime or CyShioTime type for CatsMover.')

        self._tide = tide_obj

    def get_grid_data(self):
        """
            Invokes the GetToplogyHdl method of TriGridVel_c object
        """
        # we are assuming cats are always triangle grids,
        # but may want to extend
        return self.get_triangles()

    def get_center_points(self):
        return self.get_triangle_center_points()

    def get_scaled_velocities(self, model_time):
        """
        Get file values scaled to ref pt value, with tide applied (if any)
        """
        velocities = self.mover._get_velocity_handle()
        ref_scale = self.ref_scale  # this needs to be computed, needs a time

        if self._tide is not None:
            time_value = self._tide.cy_obj.get_time_value(model_time)
            tide = time_value[0][0]
        else:
            tide = 1

        velocities['u'] *= ref_scale * tide
        velocities['v'] *= ref_scale * tide

        return velocities

    def serialize(self, json_='webapi'):
        """
        Since 'wind' property is saved as a reference when used in save file
        and 'save' option, need to add appropriate node to WindMover schema
        """
        toserial = self.to_serialize(json_)
        schema = self.__class__._schema()

        if json_ == 'save':
            toserial['filename'] = self._filename

        if 'tide' in toserial:
            schema.add(environment.TideSchema(name='tide'))

        return schema.serialize(toserial)

    @classmethod
    def deserialize(cls, json_):
        """
        append correct schema for wind object
        """
        if not cls.is_sparse(json_):
            schema = cls._schema()

            if 'tide' in json_:
                schema.add(environment.TideSchema())

            return schema.deserialize(json_)
        else:
            return json_
Example #8
0
class Time(serializable.Serializable):

    _state = copy.deepcopy(serializable.Serializable._state)
    _schema = TimeSchema

    _state.add_field([serializable.Field('filename', save=True, update=True, isdatafile=True),
                      serializable.Field('varname', save=True, update=True),
                      serializable.Field('data', save=True, update=True)])

    _const_time = None

    def __init__(self,
                 time=None,
                 filename=None,
                 varname=None,
                 tz_offset=None,
                 offset=None,
                 **kwargs):
        '''
        Representation of a time axis. Provides interpolation alphas and indexing.

        :param time: Ascending list of times to use
        :param tz_offset: offset to compensate for time zone shifts
        :type time: netCDF4.Variable or [] of datetime.datetime
        :type tz_offset: datetime.timedelta

        '''
        if isinstance(time, (nc4.Variable, nc4._netCDF4._Variable)):
            self.time = nc4.num2date(time[:], units=time.units)
        else:
            self.time = time

        self.filename = filename
        self.varname = varname

#         if self.filename is None:
#             self.filename = self.id + '_time.txt'

        if tz_offset is not None:
            self.time += tz_offset

        if not self._timeseries_is_ascending(self.time):
            raise ValueError("Time sequence is not ascending")
        if self._has_duplicates(self.time):
            raise ValueError("Time sequence has duplicate entries")

        self.name = time.name if hasattr(time, 'name') else None

    @classmethod
    def from_netCDF(cls,
                    filename=None,
                    dataset=None,
                    varname=None,
                    datavar=None,
                    tz_offset=None,
                    **kwargs):
        if dataset is None:
            dataset = _get_dataset(filename)
        if datavar is not None:
            if hasattr(datavar, 'time') and datavar.time in dataset.dimensions.keys():
                varname = datavar.time
            else:
                varname = datavar.dimensions[0] if 'time' in datavar.dimensions[0] else None
                if varname is None:
                    return None
        time = cls(time=dataset[varname],
                   filename=filename,
                   varname=varname,
                   tz_offset=tz_offset,
                   **kwargs
                       )
        return time

    @staticmethod
    def constant_time():
        if Time._const_time is None:
            Time._const_time = Time([datetime.now()])
        return Time._const_time

    @classmethod
    def from_file(cls, filename=None, **kwargs):
        if isinstance(filename, list):
            filename = filename[0]
        fn = open(filename, 'r')
        t = []
        for l in fn:
            l = l.rstrip()
            if l is not None:
                t.append(datetime.strptime(l, '%c'))
        fn.close()
        return Time(t)

    def save(self, saveloc, references=None, name=None):
        '''
        Write Wind timeseries to file or to zip,
        then call save method using super
        '''
#         if self.filename is None:
#             self.filename = self.id + '_time.txt'
#             if zipfile.is_zipfile(saveloc):
#                 self._write_time_to_zip(saveloc, self.filename)
#             else:
#                 datafile = os.path.join(saveloc, self.filename)
#                 self._write_time_to_file(datafile)
#             rv = super(Time, self).save(saveloc, references, name)
#             self.filename = None
#         else:
#             rv = super(Time, self).save(saveloc, references, name)
#         return rv
        super(Time, self).save(saveloc, references, name)

    def _write_time_to_zip(self, saveloc, ts_name):
        '''
        use a StringIO type of file descriptor and write directly to zipfile
        '''
        fd = StringIO.StringIO()
        self._write_time_to_fd(fd)
        self._write_to_zip(saveloc, ts_name, fd.getvalue())

    def _write_time_to_file(self, datafile):
        '''write timeseries data to file '''
        with open(datafile, 'w') as fd:
            self._write_time_to_fd(fd)

    def _write_time_to_fd(self, fd):
        for t in self.time:
            fd.write(t.strftime('%c') + '\n')

    @classmethod
    def new_from_dict(cls, dict_):
        if 'varname' not in dict_:
            dict_['time'] = dict_['data']
#             if 'filename' not in dict_:
#                 raise ValueError
            return cls(**dict_)
        else:
            return cls.from_netCDF(**dict_)

    @property
    def data(self):
        if self.filename is None:
            return self.time
        else:
            return None

    def __len__(self):
        return len(self.time)

    def __iter__(self):
        return self.time.__iter__()

    def __eq__(self, other):
        r = self.time == other.time
        return all(r) if hasattr(r, '__len__') else r

    def __ne__(self, other):
        return not self.__eq__(other)

    def _timeseries_is_ascending(self, ts):
        return all(np.sort(ts) == ts)

    def _has_duplicates(self, time):
        return len(np.unique(time)) != len(time) and len(time) != 1

    @property
    def min_time(self):
        '''
        First time in series

        :rtype: datetime.datetime
        '''
        return self.time[0]

    @property
    def max_time(self):
        '''
        Last time in series

        :rtype: datetime.datetime
        '''
        return self.time[-1]

    def get_time_array(self):
        return self.time[:]

    def time_in_bounds(self, time):
        '''
        Checks if time provided is within the bounds represented by this object.

        :param time: time to be queried
        :type time: datetime.datetime
        :rtype: boolean
        '''
        return not time < self.min_time or time > self.max_time

    def valid_time(self, time):
        if time < self.min_time or time > self.max_time:
            raise ValueError('time specified ({0}) is not within the bounds of the time ({1} to {2})'.format(
                time.strftime('%c'), self.min_time.strftime('%c'), self.max_time.strftime('%c')))

    def index_of(self, time, extrapolate=False):
        '''
        Returns the index of the provided time with respect to the time intervals in the file.

        :param time: Time to be queried
        :param extrapolate:
        :type time: datetime.datetime
        :type extrapolate: boolean
        :return: index of first time before specified time
        :rtype: integer
        '''
        if not (extrapolate or len(self.time) == 1):
            self.valid_time(time)
        index = np.searchsorted(self.time, time)
        return index

    def interp_alpha(self, time, extrapolate=False):
        '''
        Returns interpolation alpha for the specified time

        :param time: Time to be queried
        :param extrapolate:
        :type time: datetime.datetime
        :type extrapolate: boolean
        :return: interpolation alpha
        :rtype: double (0 <= r <= 1)
        '''
        if not len(self.time) == 1 or not extrapolate:
            self.valid_time(time)
        i0 = self.index_of(time, extrapolate)
        if i0 > len(self.time) - 1:
            return 1
        if i0 == 0:
            return 0
        t0 = self.time[i0 - 1]
        t1 = self.time[i0]
        return (time - t0).total_seconds() / (t1 - t0).total_seconds()
Example #9
0
class Spill(serializable.Serializable):
    """
    Models a spill
    """
    _update = ['on', 'release', 'amount', 'units', 'amount_uncertainty_scale']

    _create = ['frac_coverage']
    _create.extend(_update)

    _state = copy.deepcopy(serializable.Serializable._state)
    _state.add(save=_create, update=_update)
    _state += serializable.Field('element_type',
                                 save=True,
                                 save_reference=True,
                                 update=True)
    _schema = SpillSchema

    valid_vol_units = _valid_units('Volume')
    valid_mass_units = _valid_units('Mass')

    def __init__(
            self,
            release,
            element_type=None,
            substance=None,
            on=True,
            amount=None,  # could be volume or mass
            units=None,
            amount_uncertainty_scale=0.0,
            name='Spill'):
        """
        Spills used by the gnome model. It contains a release object, which
        releases elements. It also contains an element_type object which
        contains the type of substance spilled and it initializes data arrays
        to non-default values (non-zero).

        :param release: an object defining how elements are to be released
        :type release: derived from :class:`~gnome.spill.Release`

        **Optional parameters (kwargs):**

        :param element_type: ElementType object defining the type
            of elements that are released. These are spill specific properties
            of the elements.
        :type element_type:
            :class:`~gnome.spill.elements.element_type.ElementType`
        :param bool on=True: Toggles the spill on/off.
        :param float amount=None: mass or volume of oil spilled.
        :param str units=None: must provide units for amount spilled.
        :param str name='Spill': a name for the spill.

        .. note::
            Define either volume or mass in 'amount' attribute and provide
            appropriate 'units'. Defines default element_type as floating
            elements with mass if the Spill's 'amount' property is not None.
            If amount property is None, then just floating elements
            (ie. 'windages')
        """
        self.release = release
        if element_type is None:
            element_type = elements.floating(substance=substance)

        self.element_type = element_type

        self.on = on  # spill is active or not
        self._units = None
        self.amount = amount

        if amount is not None:
            if units is None:
                raise TypeError("Units must be provided with amount spilled")
            else:
                self.units = units

        self.amount_uncertainty_scale = amount_uncertainty_scale
        '''
        fraction of area covered by oil
        '''
        self.frac_coverage = 1.0
        self.name = name

    def __repr__(self):
        return ('{0.__class__.__module__}.{0.__class__.__name__}('
                'release={0.release!r}, '
                'element_type={0.element_type}, '
                'on={0.on}, '
                'amount={0.amount}, '
                'units="{0.units}", '
                ')'.format(self))

    def __eq__(self, other):
        """
        over ride base == operator defined in Serializable class.
        Spill object contains nested objects like ElementType and Release
        objects. Check all properties here so nested objects properties
        can be checked in the __eq__ implementation within the nested objects
        """
        if not self._check_type(other):
            return False

        if (self._state.get_field_by_attribute('save') !=
                other._state.get_field_by_attribute('save')):
            return False

        for name in self._state.get_names('save'):
            if not hasattr(self, name):
                """
                for an attribute like obj_type, base class has
                obj_type_to_dict method so let base class convert the attribute
                to dict, then compare
                """
                if (self.attr_to_dict(name) != other.attr_to_dict(name)):
                    return False

            elif getattr(self, name) != getattr(other, name):
                return False

        return True

    def _check_units(self, units):
        """
        Checks the user provided units are in list of valid volume
        or mass units
        """

        if (units in self.valid_vol_units or units in self.valid_mass_units):
            return True
        else:
            msg = ('Units for amount spilled must be in volume or mass units. '
                   'Valid units for volume: {0}, for mass: {1} ').format(
                       self.valid_vol_units, self.valid_mass_units)
            #raise uc.InvalidUnitError(msg)
            #self.logger.exception(msg)
            ex = uc.InvalidUnitError(msg)
            self.logger.exception(ex, exc_info=True)
            raise ex  # this should be raised since run will fail otherwise

    def _get_all_props(self):
        'return all properties accessible through get'
        all_props = []

        # release properties
        rel_props = getmembers(self.release,
                               predicate=lambda p: (not ismethod(p)))
        rel_props = [a[0] for a in rel_props if not a[0].startswith('_')]

        all_props.extend(rel_props)

        # element_type properties
        et_props = getmembers(self.element_type,
                              predicate=lambda p: (not ismethod(p)))
        'remove _state - update this after we change _state to _state'
        et_props = [
            a[0] for a in et_props
            if not a[0].startswith('_') and a[0] != '_state'
        ]

        all_props.extend(et_props)

        # properties for each of the initializer objects
        i_props = []
        for val in self.element_type.initializers:
            toadd = getmembers(val, lambda p: (not ismethod(p)))
            i_props.extend([
                a[0] for a in toadd
                if not a[0].startswith('_') and a[0] != '_state'
            ])

            all_props.extend(i_props)
        return all_props

    def _elem_mass(self, num_new_particles, current_time, time_step):
        '''
        get the mass of each element released in duration specified by
        'time_step'
        Function is only called if num_new_particles > 0 - no check is made
        for this case
        '''
        # set 'mass' data array if amount is given
        le_mass = 0.
        _mass = self.get_mass('kg')
        self.logger.debug(self._pid + "spill mass (kg): {0}".format(_mass))

        if _mass is not None:
            rd_sec = self.get('release_duration')
            if rd_sec == 0:
                try:
                    le_mass = _mass / self.get('num_elements')
                except TypeError:
                    le_mass = _mass / self.get('num_per_timestep')
            else:
                time_at_step_end = current_time + timedelta(seconds=time_step)
                if self.get('release_time') > current_time:
                    # first time_step in which particles are released
                    time_step = (time_at_step_end -
                                 self.get('release_time')).total_seconds()

                if self.get('end_release_time') < time_at_step_end:
                    time_step = (self.get('end_release_time') -
                                 current_time).total_seconds()

                _mass_in_ts = _mass / rd_sec * time_step
                le_mass = _mass_in_ts / num_new_particles

        self.logger.debug(self._pid + "LE mass (kg): {0}".format(le_mass))

        return le_mass

    def contains_object(self, obj_id):
        for o in (self.element_type, self.release):
            if o.id == obj_id:
                return True

            if (hasattr(o, 'contains_object') and o.contains_object(obj_id)):
                return True

        return False

    def set(self, prop, val):
        """
        sets an existing property. The property could be of one of the
        contained objects like 'Release' or 'ElementType'
        It can also be a property of one of the initializers contained in
        the 'ElementType' object.

        If the property doesn't exist for any of these, then an error is raised
        since user cannot set a property that does not exist using this method

        For example: set('windage_range', (0.4, 0.4)) sets the windage_range
        assuming the element_type is floating

        .. todo::
            There is an issue in that if two initializers have the same
            property - could be the case if they both define a 'distribution',
            then it does not know which one to return
        """
        if prop == 'num_released':
            self.logger.warning("cannot set 'num_released' attribute")

        # we don't want to add an attribute that doesn't already exist
        # first check to see that the attribute exists, then change it else
        # raise error
        if hasattr(self.release, prop):
            setattr(self.release, prop, val)
        elif hasattr(self.element_type, prop):
            setattr(self.element_type, prop, val)
        else:
            for init in self.element_type.initializers:
                if hasattr(init, prop):
                    setattr(init, prop, val)
                    break
                else:
                    self.logger.warning('{0} attribute does not exist '
                                        'in element_type '
                                        'or release object'.format(prop))

    def get(self, prop=None):
        """
        for get(), return all properties of embedded release object and
        element_type initializer objects. If 'prop' is not None, then return
        the property

        For example: get('windage_range') returns the 'windage_range' assuming
        the element_type = floating()

        .. todo::
            There is an issue in that if two initializers have the same
            property - could be the case if they both define a 'distribution',
            then it does not know which one to return
        """
        'Return all properties'
        if prop is None:
            return self._get_all_props()

        try:
            return getattr(self.release, prop)
        except AttributeError:
            pass

        try:
            return getattr(self.element_type, prop)
        except AttributeError:
            pass

        for init in self.element_type.initializers:
            try:
                return getattr(init, prop)
            except AttributeError:
                pass

        # nothing returned, then property was not found - raise exception or
        # return None?
        self.logger.warning("{0} attribute does not exist in element_type"
                            " or release object or initializers".format(prop))
        return None

    def get_initializer_by_name(self, name):
        ''' get first initializer in list whose name matches 'name' '''
        init = [
            i for i in enumerate(self.element_type.initializers)
            if i.name == name
        ]

        if len(init) == 0:
            return None
        else:
            return init[0]

    def has_initializer(self, name):
        '''
        Returns True if an initializer is present in the list which sets the
        data_array corresponding with 'name', otherwise returns False
        '''
        for i in self.element_type.initializers:
            if name in i.array_types:
                return True

        return False

    def get_initializer(self, name=None):
        '''
        if name is None, return list of all initializers else return initializer
        that sets given 'name'. 'name' refers to the data_array initialized by
        initializer. For instance, if name='rise_vel', function will look in
        all initializers to find the one whose array_types contain 'rise_vel'.

        If multiple initializers set 'name', then return the first one in the
        list. Although nothing prevents the user from having two initializers
        for the same data_array, it doesn't make much sense.

        The default 'name' of an initializer is the data_array that a mover
        requires and that the initializer is setting. For instance,

            init = InitRiseVelFromDist()
            init.name is 'rise_vel' by default

        is an initializer that sets the 'rise_vel' if a RiseVelocityMover is
        included in the Model. User can change the name of the initializer

        '''
        if name is None:
            return self.element_type.initializers

        init = None
        for i in self.element_type.initializers:
            if name in i.array_types:
                return i

        return init

    def set_initializer(self, init):
        '''
        set/add given initializer. Function looks for first initializer in list
        with same array_types and replaces it if found else it appends this to
        list of initializers.

        .. note::
            nothing prevents user from defining two initializers that
            set the same data_arrays; however, there isn't a use case for it

        '''
        ix = [
            ix for ix, i in enumerate(self.element_type.initializers)
            if sorted(i.array_types) == sorted(init.array_types)
        ]
        if len(ix) == 0:
            self.element_type.initializers.append(init)
        else:
            self.element_type.initializers[ix[0]] = init

    def del_initializer(self, name):
        '''
        delete the initializer with given 'name'

        The default 'name' of an initializer is the data_array that a mover
        requires and that the initializer is setting. For instance,
        the following is an initializer that sets the 'rise_vel' if a
        RiseVelocityMover is included in the Model.

            init = InitRiseVelFromDist()
            init.name is 'rise_vel' by default

        If name = 'rise_vel', all initializers with this name will be deleted
        '''
        ixs = [
            ix for ix, i in enumerate(self.element_type.initializers)
            if i.name == name
        ]
        for ix in ixs:
            del self.element_type.initializers[ix]

    @property
    def units(self):
        """
        Default units in which amount of oil spilled was entered by user.
        The 'amount' property is returned in these 'units'
        """
        return self._units

    @units.setter
    def units(self, units):
        """
        set default units in which volume data is returned
        """
        self._check_units(units)  # check validity before setting
        self._units = units

    def get_mass(self, units=None):
        '''
        Return the mass released during the spill.
        User can also specify desired output units in the function.
        If units are not specified, then return in 'SI' units ('kg')
        If volume is given, then use density to find mass. Density is always
        at 15degC, consistent with API definition
        '''
        if self.amount is None:
            return self.amount

        # first convert amount to 'kg'
        if self.units in self.valid_mass_units:
            mass = uc.convert('Mass', self.units, 'kg', self.amount)
        elif self.units in self.valid_vol_units:
            vol = uc.convert('Volume', self.units, 'm^3', self.amount)
            mass = self.element_type.substance.get_density() * vol

        if units is None or units == 'kg':
            return mass
        else:
            self._check_units(units)
            return uc.convert('Mass', 'kg', units, mass)

    def uncertain_copy(self):
        """
        Returns a deepcopy of this spill for the uncertainty runs

        The copy has everything the same, including the spill_num,
        but it is a new object with a new id.

        Not much to this method, but it could be overridden to do something
        fancier in the future or a subclass.

        There are a number of python objects that cannot be deepcopied.
        - Logger objects

        So we copy them temporarily to local variables before we deepcopy
        our Spill object.
        """
        u_copy = copy.deepcopy(self)
        self.logger.debug(self._pid + "deepcopied spill {0}".format(self.id))

        return u_copy

    def set_amount_uncertainty(self, up_or_down=None):
        '''
            This function shifts the spill amount based on a scale value
            in the range [0.0 ... 1.0].  The maximum uncertainty scale value
            is (2/3) * spill_amount.
            We determine either an upper uncertainty or a lower uncertainty
            multiplier.  Then we shift our spill amount value based on it.

            Since we are irreversibly changing the spill amount value,
            we should probably do this only once.
        '''
        if (self.amount_uncertainty_scale <= 0.0
                or self.amount_uncertainty_scale > 1.0):
            return False

        if up_or_down == 'up':
            scale = (1.0 + (2.0 / 3.0) * self.amount_uncertainty_scale)
        elif up_or_down == 'down':
            scale = (1.0 - (2.0 / 3.0) * self.amount_uncertainty_scale)
        else:
            return False

        self.amount *= scale

        return True

    def rewind(self):
        """
        rewinds the release to original status (before anything has been
        released).
        """
        self.release.rewind()

    def num_elements_to_release(self, current_time, time_step):
        """
        Determines the number of elements to be released during:
        current_time + time_step

        It invokes the num_elements_to_release method for the the unerlying
        release object: self.release.num_elements_to_release()

        :param current_time: current time
        :type current_time: datetime.datetime
        :param int time_step: the time step, sometimes used to decide how many
            should get released.

        :returns: the number of elements that will be released. This is taken
            by SpillContainer to initialize all data_arrays.
        """
        return self.release.num_elements_to_release(current_time, time_step)

    def set_newparticle_values(self, num_new_particles, current_time,
                               time_step, data_arrays):
        """
        SpillContainer will release elements and initialize all data_arrays
        to default initial value. The SpillContainer gets passed as input and
        the data_arrays for 'position' get initialized correctly by the release
        object: self.release.set_newparticle_positions()

        If a Spill Amount is given, the Spill object also sets the 'mass' data
        array; else 'mass' array remains '0'

        :param int num_new_particles: number of new particles that were added.
            Always greater than 0
        :param current_time: current time
        :type current_time: datetime.datetime
        :param time_step: the time step, sometimes used to decide how many
            should get released.
        :type time_step: integer seconds
        :param data_arrays: dict of data_arrays provided by the SpillContainer.
            Look for 'positions' array in the dict and update positions for
            latest num_new_particles that are released
        :type data_arrays: dict containing numpy arrays for values

        Also, the set_newparticle_values() method for all element_type gets
        called so each element_type sets the values for its own data correctly
        """
        if self.element_type is not None:
            self.element_type.set_newparticle_values(num_new_particles, self,
                                                     data_arrays)

        self.release.set_newparticle_positions(num_new_particles, current_time,
                                               time_step, data_arrays)

        data_arrays['mass'][-num_new_particles:] = \
            self._elem_mass(num_new_particles, current_time, time_step)

        # set arrays that are spill specific - 'frac_coverage'
        if 'frac_coverage' in data_arrays:
            data_arrays['frac_coverage'][-num_new_particles:] = \
                self.frac_coverage

    def serialize(self, json_='webapi'):
        """
        override base serialize implementation
        Need to add node for release object and element_type object
        """
        toserial = self.to_serialize(json_)
        schema = self.__class__._schema()

        o_json_ = schema.serialize(toserial)
        o_json_['element_type'] = self.element_type.serialize(json_)
        o_json_['release'] = self.release.serialize(json_)

        return o_json_

    @classmethod
    def deserialize(cls, json_):
        """
        Instead of creating schema dynamically for Spill() before
        deserialization, call nested object's serialize/deserialize methods

        We also need to accept sparse json objects, in which case we will
        not treat them, but just send them back.
        """
        if not cls.is_sparse(json_):
            schema = cls._schema()

            dict_ = schema.deserialize(json_)
            relcls = class_from_objtype(json_['release']['obj_type'])
            dict_['release'] = relcls.deserialize(json_['release'])

            if json_['json_'] == 'webapi':
                '''
                save files store a reference to element_type so it will get
                deserialized, created and added to this dict by load method
                '''
                etcls = \
                    class_from_objtype(json_['element_type']['obj_type'])
                dict_['element_type'] = \
                    etcls.deserialize(json_['element_type'])

            else:
                '''
                Convert nested dict (release object) back into object. The
                ElementType is now saved as a reference so it is taken care of
                by load method
                For the 'webapi', we're not always creating a new object
                so do this only for 'save' files
                '''
                obj = relcls.new_from_dict(dict_.pop('release'))
                dict_['release'] = obj

            return dict_
        else:
            return json_
Example #10
0
class GridWindMover(WindMoversBase, serializable.Serializable):
    _state = copy.deepcopy(WindMoversBase._state)
    _state.add(update=['wind_scale', 'extrapolate'],
               save=['wind_scale', 'extrapolate'])
    _state.add_field([
        serializable.Field('wind_file',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('topology_file',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False)
    ])

    _schema = GridWindMoverSchema

    def __init__(self,
                 wind_file,
                 topology_file=None,
                 extrapolate=False,
                 time_offset=0,
                 **kwargs):
        """
        :param wind_file: file containing wind data on a grid
        :param topology_file: Default is None. When exporting topology, it
                              is stored in this file
        :param wind_scale: Value to scale wind data
        :param extrapolate: Allow current data to be extrapolated before and
                            after file data
        :param time_offset: Time zone shift if data is in GMT

        Pass optional arguments to base class
        uses super: super(GridWindMover,self).__init__(\*\*kwargs)
        """

        if not os.path.exists(wind_file):
            raise ValueError(
                'Path for wind file does not exist: {0}'.format(wind_file))

        if topology_file is not None:
            if not os.path.exists(topology_file):
                raise ValueError(
                    'Path for Topology file does not exist: {0}'.format(
                        topology_file))

        # is wind_file and topology_file is stored with cy_gridwind_mover?
        self.wind_file = wind_file
        self.topology_file = topology_file
        self.mover = CyGridWindMover(wind_scale=kwargs.pop('wind_scale', 1))
        self.name = os.path.split(wind_file)[1]
        super(GridWindMover, self).__init__(**kwargs)

        self.mover.text_read(wind_file, topology_file)
        self.real_data_start = time_utils.sec_to_datetime(
            self.mover.get_start_time())
        self.real_data_stop = time_utils.sec_to_datetime(
            self.mover.get_end_time())
        self.mover.extrapolate_in_time(extrapolate)
        self.mover.offset_time(time_offset * 3600.)

    def __repr__(self):
        """
        .. todo::
            We probably want to include more information.
        """
        info = 'GridWindMover(\n{0})'.format(self._state_as_str())
        return info

    def __str__(self):
        info = ('GridWindMover - current _state.\n'
                '{0}'.format(self._state_as_str()))
        return info

    wind_scale = property(
        lambda self: self.mover.wind_scale,
        lambda self, val: setattr(self.mover, 'wind_scale', val))

    extrapolate = property(
        lambda self: self.mover.extrapolate,
        lambda self, val: setattr(self.mover, 'extrapolate', val))

    time_offset = property(
        lambda self: self.mover.time_offset / 3600.,
        lambda self, val: setattr(self.mover, 'time_offset', val * 3600.))

    def export_topology(self, topology_file):
        """
        :param topology_file=None: absolute or relative path where topology
                                   file will be written.
        """
        if topology_file is None:
            raise ValueError(
                'Topology file path required: {0}'.format(topology_file))

        self.mover.export_topology(topology_file)

    def extrapolate_in_time(self, extrapolate):
        """
        :param extrapolate=false: Allow current data to be extrapolated before
                                  and after file data.
        """
        self.mover.extrapolate_in_time(extrapolate)

    def offset_time(self, time_offset):
        """
        :param offset_time=0: Allow data to be in GMT with a time zone offset
                              (hours).
        """
        self.mover.offset_time(time_offset * 3600.)

    def get_start_time(self):
        """
        :this will be the real_data_start time (seconds).
        """
        return (self.mover.get_start_time())

    def get_end_time(self):
        """
        :this will be the real_data_stop time (seconds).
        """
        return (self.mover.get_end_time())
Example #11
0
class Water(Environment, serializable.Serializable):
    '''
    Define the environmental conditions for a spill, like water_temperature,
    atmos_pressure (most likely a constant)

    Defined in a Serializable class since user will need to set/get some of
    these properties through the client
    '''
    _ref_as = 'water'
    _state = copy.deepcopy(Environment._state)
    _state += [
        serializable.Field('units', update=True, save=True),
        serializable.Field('temperature', update=True, save=True),
        serializable.Field('salinity', update=True, save=True),
        serializable.Field('sediment', update=True, save=True),
        serializable.Field('fetch', update=True, save=True),
        serializable.Field('wave_height', update=True, save=True),
        serializable.Field('density', update=True, save=True),
        serializable.Field('kinematic_viscosity', update=True, save=True)
    ]

    _schema = WaterSchema

    _units_type = {
        'temperature': ('temperature', _valid_temp_units),
        'salinity': ('salinity', _valid_salinity_units),
        'sediment': ('concentration in water', _valid_sediment_units),
        'wave_height': ('length', _valid_dist_units),
        'fetch': ('length', _valid_dist_units),
        'kinematic_viscosity': ('kinematic viscosity', _valid_kvis_units),
        'density': ('density', _valid_density_units),
    }

    # keep track of valid SI units for properties - these are used for
    # conversion since internal code uses SI units. Don't expect to change
    # these so make it a class level attribute
    _si_units = {
        'temperature': 'K',
        'salinity': 'psu',
        'sediment': 'kg/m^3',
        'wave_height': 'm',
        'fetch': 'm',
        'density': 'kg/m^3',
        'kinematic_viscosity': 'm^2/s'
    }

    def __init__(
            self,
            temperature=300.0,
            salinity=35.0,
            sediment=.005,  # kg/m^3 oceanic default
            wave_height=None,
            fetch=None,
            units={
                'temperature': 'K',
                'salinity': 'psu',
                'sediment': 'kg/m^3',  # do we need SI here?
                'wave_height': 'm',
                'fetch': 'm',
                'density': 'kg/m^3',
                'kinematic_viscosity': 'm^2/s'
            },
            name='Water'):
        '''
        Assume units are SI for all properties. 'units' attribute assumes SI
        by default. This can be changed, but initialization takes SI.
        '''
        # define properties in SI units
        # ask if we want unit conversion implemented here?
        self.temperature = temperature
        self.salinity = salinity
        self.sediment = sediment
        self.wave_height = wave_height
        self.fetch = fetch
        self.kinematic_viscosity = 0.000001
        self.name = name
        self._units = dict(self._si_units)
        self.units = units

    def __repr__(self):
        info = ("{0.__class__.__module__}.{0.__class__.__name__}"
                "(temperature={0.temperature},"
                " salinity={0.salinity})").format(self)
        return info

    __str__ = __repr__

    def get(self, attr, unit=None):
        '''
        return value in desired unit. If None, then return the value in SI
        units. The user_unit are given in 'units' attribute and each attribute
        carries the value in as given in these user_units.
        '''
        val = getattr(self, attr)
        if unit is None:
            # Note: salinity only have one units since we don't
            # have any conversions for them in unit_conversion yet - revisit
            # this per requirements
            if (attr not in self._si_units
                    or self._si_units[attr] == self._units[attr]):
                return val
            else:
                unit = self._si_units[attr]

        if unit in self._units_type[attr][1]:
            return uc.convert(self._units_type[attr][0], self.units[attr],
                              unit, val)
        else:
            # log to file if we have logger
            ex = uc.InvalidUnitError((unit, self._units_type[attr][0]))
            self.logger.error(str(ex))
            raise ex

    def set(self, attr, value, unit):
        '''
        provide a corresponding set method that requires value and units
        The attributes can be directly set. This function just sets the
        desired property and also updates the units dict
        '''
        if unit not in self._units_type[attr][1]:
            raise uc.InvalidUnitError((unit, self._units_type[attr][0]))

        setattr(self, attr, value)
        self.units[attr] = unit

    @lru_cache(2)
    def _get_density(self, salinity, temp):
        '''
        use lru cache so we don't recompute if temp is not changing
        '''
        temp_c = uc.convert('Temperature', self.units['temperature'], 'C',
                            temp)
        # sea level pressure in decibar - don't expect atmos_pressure to change
        # also expect constants to have SI units
        rho = gsw.rho(salinity, temp_c, constants.atmos_pressure * 0.0001)
        return rho

    @property
    def density(self):
        '''
        return the density based on water salinity and temperature. The
        salinity is in 'psu'; it is not being converted to absolute salinity
        units - for our purposes, this is sufficient. Using gsw.rho()
        internally which expects salinity in absolute units.
        '''
        return self._get_density(self.salinity, self.temperature)

    def update_from_dict(self, data):
        '''
        override base class:

        'fetch' and 'wave_height' get dropped by colander if value is None.
        In this case, toggle the values back to None.
        '''
        for attr in ('fetch', 'wave_height'):
            if attr not in data:
                setattr(self, attr, None)

        super(Water, self).update_from_dict(data)

    @property
    def units(self):
        return self._units

    @units.setter
    def units(self, u_dict):
        for prop, unit in u_dict.iteritems():
            if prop in self._units_type:
                if unit not in self._units_type[prop][1]:
                    msg = ("{0} are invalid units for {1}."
                           "Ignore it".format(unit, prop))
                    self.logger.error(msg)
                    # should we raise error?
                    raise uc.InvalidUnitError(msg)

            # allow user to add new keys to units dict.
            # also update prop if unit is valid
            self._units[prop] = unit

    def _convert_sediment_units(self, from_, to):
        '''
        used internally to convert to/from sediment units.
        '''
        if from_ == to:
            return self.sediment

        if from_ == 'mg/l':
            # convert to kg/m^3
            return self.sediment / 1000.0

        else:
            return self.sediment * 1000.0
Example #12
0
class PyWindMover(movers.PyMover, serializable.Serializable):

    _state = copy.deepcopy(movers.PyMover._state)

    _state.add_field([
        serializable.Field('filename',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('wind', save=True, read=True, save_reference=True)
    ])
    _state.add(update=['uncertain_duration', 'uncertain_time_delay'],
               save=['uncertain_duration', 'uncertain_time_delay'])
    _schema = PyWindMoverSchema

    _ref_as = 'py_wind_movers'

    _req_refs = {'wind': GridWind}

    def __init__(self,
                 wind=None,
                 filename=None,
                 extrapolate=False,
                 time_offset=0,
                 uncertain_duration=3,
                 uncertain_time_delay=0,
                 uncertain_speed_scale=2.,
                 uncertain_angle_scale=0.4,
                 default_num_method='Trapezoid',
                 **kwargs):
        """
        Uses super to call CyMover base class __init__

        :param wind: wind object -- provides the wind time series for the mover

        Remaining kwargs are passed onto WindMoversBase __init__ using super.
        See Mover documentation for remaining valid kwargs.

        .. note:: Can be initialized with wind=None; however, wind must be
            set before running. If wind is not None, toggle make_default_refs
            to False since user provided a valid Wind and does not wish to
            use the default from the Model.
        """
        self._wind = wind
        self.make_default_refs = False

        self.filename = filename
        self.extrapolate = extrapolate
        self.uncertain_duration = uncertain_duration
        self.uncertain_time_delay = uncertain_time_delay
        self.uncertain_speed_scale = uncertain_speed_scale

        # also sets self._uncertain_angle_units
        self.uncertain_angle_scale = uncertain_angle_scale
        super(PyWindMover,
              self).__init__(default_num_method=default_num_method, **kwargs)

        self.array_types.update(
            {'windages', 'windage_range', 'windage_persist'})

    @classmethod
    def from_netCDF(cls,
                    filename=None,
                    extrapolate=False,
                    time_offset=0,
                    current_scale=1,
                    uncertain_duration=24 * 3600,
                    uncertain_time_delay=0,
                    uncertain_along=.5,
                    uncertain_across=.25,
                    uncertain_cross=.25,
                    default_num_method='Trapezoid',
                    **kwargs):

        wind = GridWind.from_netCDF(filename, **kwargs)
        return cls(wind=wind,
                   filename=filename,
                   extrapolate=extrapolate,
                   time_offset=time_offset,
                   current_scale=current_scale,
                   uncertain_along=uncertain_along,
                   uncertain_across=uncertain_across,
                   uncertain_cross=uncertain_cross,
                   default_num_method=default_num_method)

    @property
    def wind(self):
        return self._wind

    @wind.setter
    def wind(self, value):
        self._wind = value

    def prepare_for_model_step(self, sc, time_step, model_time_datetime):
        """
        Call base class method using super
        Also updates windage for this timestep

        :param sc: an instance of gnome.spill_container.SpillContainer class
        :param time_step: time step in seconds
        :param model_time_datetime: current time of model as a date time object
        """
        super(PyWindMover,
              self).prepare_for_model_step(sc, time_step, model_time_datetime)

        # if no particles released, then no need for windage
        # TODO: revisit this since sc.num_released shouldn't be None
        if sc.num_released is None or sc.num_released == 0:
            return

        rand.random_with_persistance(sc['windage_range'][:, 0],
                                     sc['windage_range'][:, 1], sc['windages'],
                                     sc['windage_persist'], time_step)

    def get_move(self, sc, time_step, model_time_datetime, num_method=None):
        """
        Compute the move in (long,lat,z) space. It returns the delta move
        for each element of the spill as a numpy array of size
        (number_elements X 3) and dtype = gnome.basic_types.world_point_type

        Base class returns an array of numpy.nan for delta to indicate the
        get_move is not implemented yet.

        Each class derived from Mover object must implement it's own get_move

        :param sc: an instance of gnome.spill_container.SpillContainer class
        :param time_step: time step in seconds
        :param model_time_datetime: current model time as datetime object

        All movers must implement get_move() since that's what the model calls
        """
        method = None
        if num_method is None:
            method = self.num_methods[self.default_num_method]
        else:
            method = self.num_method[num_method]

        status = sc['status_codes'] != oil_status.in_water
        positions = sc['positions']
        pos = positions[:]

        deltas = method(sc, time_step, model_time_datetime, pos, self.wind)
        deltas[:, 0] *= sc['windages']
        deltas[:, 1] *= sc['windages']

        deltas = FlatEarthProjection.meters_to_lonlat(deltas, positions)
        deltas[status] = (0, 0, 0)
        return deltas
Example #13
0
class ComponentMover(CyMover, serializable.Serializable):

    _state = copy.deepcopy(CyMover._state)

    _update = [
        'scale_refpoint', 'pat1_angle', 'pat1_speed', 'pat1_speed_units',
        'pat1_scale_to_value', 'pat2_angle', 'pat2_speed', 'pat2_speed_units',
        'pat2_scale_to_value', 'scale_by'
    ]
    _create = []
    _create.extend(_update)
    _state.add(update=_update, save=_create)
    _state.add_field([
        serializable.Field('filename1',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('filename2',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('wind', save=True, update=True, save_reference=True)
    ])
    _schema = ComponentMoverSchema

    def __init__(self, filename1, filename2=None, wind=None, **kwargs):
        """
        Uses super to invoke base class __init__ method.

        :param filename: file containing currents for first Cats pattern

        Optional parameters (kwargs).
        Defaults are defined by CyCatsMover object.

        :param filename: file containing currents for second Cats pattern

        :param wind: A gnome.environment.Wind object to be used to drive the
                     CatsMovers.  Will want a warning that mover will
                     not be active without a wind
        :param scale: A boolean to indicate whether to scale value
                      at reference point or not
        :param scale_value: Value used for scaling at reference point
        :param scale_refpoint: Reference location (long, lat, z).
                               The scaling applied to all data is determined
                               by scaling the raw value at this location.

        Remaining kwargs are passed onto Mover's __init__ using super.
        See Mover documentation for remaining valid kwargs.
        """

        if not os.path.exists(filename1):
            raise ValueError(
                'Path for Cats filename1 does not exist: {0}'.format(
                    filename1))

        if filename2 is not None:
            if not os.path.exists(filename2):
                raise ValueError(
                    'Path for Cats filename2 does not exist: {0}'.format(
                        filename2))

        self.filename1 = filename1
        self.filename2 = filename2

        self.mover = CyComponentMover()
        self.mover.text_read(filename1, filename2)

        self._wind = None
        if wind is not None:
            self.wind = wind

        # self.scale = kwargs.pop('scale', self.mover.scale_type)
        # self.scale_value = kwargs.get('scale_value',
        #                               self.mover.scale_value)

        # TODO: no need to check for None since properties that are None
        #       are not persisted

        # I think this is required...
        if 'scale_refpoint' in kwargs:
            self.scale_refpoint = kwargs.pop('scale_refpoint')

#         if self.scale and self.scale_value != 0.0 \
#             and self.scale_refpoint is None:
#             raise TypeError("Provide a reference point in 'scale_refpoint'."
#                             )

        super(ComponentMover, self).__init__(**kwargs)

    def __repr__(self):
        """
        unambiguous representation of object
        """
        return 'ComponentMover(filename={0})'.format(self.filename1)

    # Properties

    # scale_type = property(lambda self: bool(self.mover.scale_type),
    #                       lambda self, val: setattr(self.mover, 'scale_type',
    #                                                 int(val)))

    # scale_by = property(lambda self: bool(self.mover.scale_by),
    #                     lambda self, val: setattr(self.mover, 'scale_by',
    #                                               int(val)))

    pat1_angle = property(
        lambda self: self.mover.pat1_angle,
        lambda self, val: setattr(self.mover, 'pat1_angle', val))

    pat1_speed = property(
        lambda self: self.mover.pat1_speed,
        lambda self, val: setattr(self.mover, 'pat1_speed', val))

    pat1_speed_units = property(
        lambda self: self.mover.pat1_speed_units,
        lambda self, val: setattr(self.mover, 'pat1_speed_units', val))

    pat1_scale_to_value = property(
        lambda self: self.mover.pat1_scale_to_value,
        lambda self, val: setattr(self.mover, 'pat1_scale_to_value', val))

    pat2_angle = property(
        lambda self: self.mover.pat2_angle,
        lambda self, val: setattr(self.mover, 'pat2_angle', val))

    pat2_speed = property(
        lambda self: self.mover.pat2_speed,
        lambda self, val: setattr(self.mover, 'pat2_speed', val))

    pat2_speed_units = property(
        lambda self: self.mover.pat2_speed_units,
        lambda self, val: setattr(self.mover, 'pat2_speed_units', val))

    pat2_scale_to_value = property(
        lambda self: self.mover.pat2_scale_to_value,
        lambda self, val: setattr(self.mover, 'pat2_scale_to_value', val))

    scale_by = property(lambda self: self.mover.scale_by,
                        lambda self, val: setattr(self.mover, 'scale_by', val))

    @property
    def scale_refpoint(self):
        return self.mover.ref_point

    @scale_refpoint.setter
    def scale_refpoint(self, val):
        '''
        Must be a tuple of length 2 or 3: (long, lat, z). If only (long, lat)
        is given, the set z = 0
        '''
        if len(val) == 2:
            self.mover.ref_point = (val[0], val[1], 0.)
        else:
            self.mover.ref_point = val

    @property
    def wind(self):
        return self._wind

    @wind.setter
    def wind(self, wind_obj):
        if not isinstance(wind_obj, environment.Wind):
            raise TypeError('wind must be of type environment.Wind')

        self.mover.set_ossm(wind_obj.ossm)
        self._wind = wind_obj

    def serialize(self, json_='webapi'):
        """
        Since 'wind' property is saved as a reference when used in save file
        and 'save' option, need to add appropriate node to WindMover schema
        """
        dict_ = self.to_serialize(json_)
        schema = self.__class__._schema()

        if json_ == 'webapi' and 'wind' in dict_:
            schema.add(environment.WindSchema(name='wind'))

        return schema.serialize(dict_)

    @classmethod
    def deserialize(cls, json_):
        """
        append correct schema for wind object
        """
        schema = cls._schema()

        if 'wind' in json_:
            # for 'webapi', there will be nested Wind structure
            # for 'save' option, there should be no nested 'wind'. It is
            # removed, loaded and added back after deserialization
            schema.add(environment.WindSchema())
        _to_dict = schema.deserialize(json_)

        return _to_dict
Example #14
0
class CurrentCycleMover(GridCurrentMover, serializable.Serializable):
    _state = copy.deepcopy(GridCurrentMover._state)
    _state.add_field([
        serializable.Field('tide', save=True, update=True, save_reference=True)
    ])
    _schema = CurrentCycleMoverSchema

    def __init__(self, filename, topology_file=None, **kwargs):
        """
        Initialize a CurrentCycleMover

        :param filename: Absolute or relative path to the data file:
                         could be netcdf or filelist
        :param topology_file=None: Absolute or relative path to topology file.
                                   If not given, the GridCurrentMover will
                                   compute the topology from the data file.
        :param tide: A gnome.environment.Tide object to be attached to
                     CatsMover
        :param active_start: datetime when the mover should be active
        :param active_stop: datetime after which the mover should be inactive
        :param current_scale: Value to scale current data
        :param uncertain_duration: How often does a given uncertain element
                                   get reset
        :param uncertain_time_delay: when does the uncertainly kick in.
        :param uncertain_cross: Scale for uncertainty perpendicular to the flow
        :param uncertain_along: Scale for uncertainty parallel to the flow
        :param extrapolate: Allow current data to be extrapolated
                            before and after file data
        :param time_offset: Time zone shift if data is in GMT

        uses super: super(CurrentCycleMover,self).__init__(**kwargs)
        """

        # NOTE: will need to add uncertainty parameters
        #       and other dialog fields.
        #       use super with kwargs to invoke base class __init__
        self.mover = CyCurrentCycleMover()

        tide = kwargs.pop('tide', None)
        self._tide = None

        if tide is not None:
            self.tide = tide

        super(CurrentCycleMover, self).__init__(filename=filename,
                                                topology_file=topology_file,
                                                **kwargs)

    def __repr__(self):
        return ('GridCurrentMover(uncertain_duration={0.uncertain_duration}, '
                'uncertain_time_delay={0.uncertain_time_delay}, '
                'uncertain_cross={0.uncertain_cross}, '
                'uncertain_along={0.uncertain_along}, '
                'active_start={1.active_start}, '
                'active_stop={1.active_stop}, '
                'on={1.on})'.format(self.mover, self))

    def __str__(self):
        return ('GridCurrentMover - current _state.\n'
                '  uncertain_duration={0.uncertain_duration}\n'
                '  uncertain_time_delay={0.uncertain_time_delay}\n'
                '  uncertain_cross={0.uncertain_cross}\n'
                '  uncertain_along={0.uncertain_along}'
                '  active_start time={1.active_start}'
                '  active_stop time={1.active_stop}'
                '  current on/off status={1.on}'.format(self.mover, self))

    @property
    def tide(self):
        return self._tide

    @tide.setter
    def tide(self, tide_obj):
        if not isinstance(tide_obj, environment.Tide):
            raise TypeError('tide must be of type environment.Tide')

        if isinstance(tide_obj.cy_obj, CyShioTime):
            self.mover.set_shio(tide_obj.cy_obj)
        elif isinstance(tide_obj.cy_obj, CyOSSMTime):
            self.mover.set_ossm(tide_obj.cy_obj)
        else:
            raise TypeError('Tide.cy_obj attribute must be either '
                            'CyOSSMTime or CyShioTime type '
                            'for CurrentCycleMover.')

        self._tide = tide_obj

    def serialize(self, json_='webapi'):
        """
        Since 'tide' property is saved as a reference when used in save file
        and 'save' option, need to add appropriate node to
        CurrentCycleMover schema
        """
        toserial = self.to_serialize(json_)
        schema = self.__class__._schema()

        if json_ == 'webapi' and 'tide' in toserial:
            schema.add(environment.TideSchema(name='tide'))

        return schema.serialize(toserial)

    @classmethod
    def deserialize(cls, json_):
        """
        append correct schema for tide object
        """
        schema = cls._schema()

        if 'tide' in json_:
            schema.add(environment.TideSchema())

        return schema.deserialize(json_)
Example #15
0
class GridCurrentMover(CurrentMoversBase, serializable.Serializable):

    _update = ['uncertain_cross', 'uncertain_along', 'current_scale']
    _save = ['uncertain_cross', 'uncertain_along', 'current_scale']
    _state = copy.deepcopy(CurrentMoversBase._state)

    _state.add(update=_update, save=_save)
    _state.add_field([
        serializable.Field('filename',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('topology_file',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False)
    ])
    _schema = GridCurrentMoverSchema

    def __init__(self,
                 filename,
                 topology_file=None,
                 extrapolate=False,
                 time_offset=0,
                 current_scale=1,
                 uncertain_along=0.5,
                 uncertain_across=0.25,
                 num_method=basic_types.numerical_methods.euler,
                 **kwargs):
        """
        Initialize a GridCurrentMover

        :param filename: absolute or relative path to the data file:
                         could be netcdf or filelist
        :param topology_file=None: absolute or relative path to topology file.
                                   If not given, the GridCurrentMover will
                                   compute the topology from the data file.
        :param active_start: datetime when the mover should be active
        :param active_stop: datetime after which the mover should be inactive
        :param current_scale: Value to scale current data
        :param uncertain_duration: how often does a given uncertain element
                                   get reset
        :param uncertain_time_delay: when does the uncertainly kick in.
        :param uncertain_cross: Scale for uncertainty perpendicular to the flow
        :param uncertain_along: Scale for uncertainty parallel to the flow
        :param extrapolate: Allow current data to be extrapolated
                            before and after file data
        :param time_offset: Time zone shift if data is in GMT
        :param num_method: Numerical method for calculating movement delta.
                           Default Euler
                           option: Runga-Kutta 4 (RK4)

        uses super, super(GridCurrentMover,self).__init__(\*\*kwargs)
        """

        # if child is calling, the self.mover is set by child - do not reset
        if type(self) == GridCurrentMover:
            self.mover = CyGridCurrentMover()

        if not os.path.exists(filename):
            raise ValueError(
                'Path for current file does not exist: {0}'.format(filename))

        if topology_file is not None:
            if not os.path.exists(topology_file):
                raise ValueError(
                    'Path for Topology file does not exist: {0}'.format(
                        topology_file))

        # check if this is stored with cy_gridcurrent_mover?
        self.filename = filename
        self.name = os.path.split(filename)[1]

        # check if this is stored with cy_gridcurrent_mover?
        self.topology_file = topology_file
        self.current_scale = current_scale
        self.uncertain_along = uncertain_along
        self.uncertain_across = uncertain_across
        self.mover.text_read(filename, topology_file)
        self.mover.extrapolate_in_time(extrapolate)
        self.mover.offset_time(time_offset * 3600.)
        self.num_method = num_method

        super(GridCurrentMover, self).__init__(**kwargs)

    def __repr__(self):
        return ('GridCurrentMover('
                'uncertain_duration={0.uncertain_duration},'
                'uncertain_time_delay={0.uncertain_time_delay}, '
                'uncertain_cross={0.uncertain_cross}, '
                'uncertain_along={0.uncertain_along}, '
                'active_start={1.active_start}, '
                'active_stop={1.active_stop}, '
                'on={1.on})'.format(self.mover, self))

    def __str__(self):
        return ('GridCurrentMover - current _state.\n'
                '  uncertain_duration={0.uncertain_duration}\n'
                '  uncertain_time_delay={0.uncertain_time_delay}\n'
                '  uncertain_cross={0.uncertain_cross}\n'
                '  uncertain_along={0.uncertain_along}\n'
                '  active_start time={1.active_start}\n'
                '  active_stop time={1.active_stop}\n'
                '  current on/off status={1.on}'.format(self.mover, self))

    # Define properties using lambda functions: uses lambda function, which are
    # accessible via fget/fset as follows:
    uncertain_cross = property(
        lambda self: self.mover.uncertain_cross,
        lambda self, val: setattr(self.mover, 'uncertain_cross', val))

    uncertain_along = property(
        lambda self: self.mover.uncertain_along,
        lambda self, val: setattr(self.mover, 'uncertain_along', val))

    current_scale = property(
        lambda self: self.mover.current_scale,
        lambda self, val: setattr(self.mover, 'current_scale', val))

    extrapolate = property(
        lambda self: self.mover.extrapolate,
        lambda self, val: setattr(self.mover, 'extrapolate', val))

    time_offset = property(
        lambda self: self.mover.time_offset / 3600.,
        lambda self, val: setattr(self.mover, 'time_offset', val * 3600.))
    num_method = property(
        lambda self: self.mover.num_method,
        lambda self, val: setattr(self.mover, 'num_method', val))

    def get_grid_data(self):
        """
            The main function for getting grid data from the mover
        """
        if self.mover._is_triangle_grid():
            return self.get_triangles()
        else:
            return self.get_cells()

    def get_center_points(self):
        if self.mover._is_triangle_grid():
            if self.mover._is_data_on_cells():
                return self.get_triangle_center_points()
            else:
                return self.get_points()
        else:
            return self.get_cell_center_points()

    def get_scaled_velocities(self, time):
        """
        :param model_time=0:
        """
        num_tri = self.mover.get_num_triangles()
        # will need to update this for regular grids
        if self.mover._is_triangle_grid():
            if self.mover._is_data_on_cells():
                num_cells = num_tri
            else:
                num_vertices = self.mover.get_num_points()
                num_cells = num_vertices
        else:
            num_cells = num_tri / 2
        vels = np.zeros(num_cells, dtype=basic_types.velocity_rec)

        self.mover.get_scaled_velocities(time, vels)

        return vels

    def export_topology(self, topology_file):
        """
        :param topology_file=None: absolute or relative path where
                                   topology file will be written.
        """
        if topology_file is None:
            raise ValueError(
                'Topology file path required: {0}'.format(topology_file))

        self.mover.export_topology(topology_file)

    def extrapolate_in_time(self, extrapolate):
        """
        :param extrapolate=false: allow current data to be extrapolated
                                  before and after file data.
        """
        self.mover.extrapolate_in_time(extrapolate)

    def offset_time(self, time_offset):
        """
        :param offset_time=0: allow data to be in GMT with a time zone offset
                              (hours).
        """
        self.mover.offset_time(time_offset * 3600.)

    def get_offset_time(self):
        """
        :param offset_time=0: allow data to be in GMT with a time zone offset
                              (hours).
        """
        return (self.mover.get_offset_time()) / 3600.

    def get_num_method(self):
        return self.mover.num_method
Example #16
0
class ShipDriftMover(Mover, serializable.Serializable):
    _state = copy.deepcopy(Mover._state)
    _state.add(update=['wind_scale','grid_type','drift_angle'], save=['wind_scale','grid_type','drift_angle'])
    _state.add_field([serializable.Field('wind_file', save=True,
                    read=True, isdatafile=True, test_for_eq=False),
                    serializable.Field('topology_file', save=True,
                    read=True, isdatafile=True, test_for_eq=False)])

    _schema = ShipDriftMoverSchema

    def __init__(self, wind_file, topology_file=None, grid_type=1,
                 drift_angle = 0, extrapolate=False, time_offset=0,
                 **kwargs):
        """
        :param wind_file: file containing wind data on a grid
        :param topology_file: Default is None. When exporting topology, it
                              is stored in this file
        :param wind_scale: Value to scale wind data
        :param extrapolate: Allow current data to be extrapolated before and
                            after file data
        :param time_offset: Time zone shift if data is in GMT

        Pass optional arguments to base class
        uses super: super(ShipDriftMover,self).__init__(\*\*kwargs)
        """

        if not os.path.exists(wind_file):
            raise ValueError('Path for wind file does not exist: {0}'
                             .format(wind_file))

        if topology_file is not None:
            if not os.path.exists(topology_file):
                raise ValueError('Path for Topology file does not exist: {0}'
                                 .format(topology_file))

        # is wind_file and topology_file is stored with cy_gridwind_mover?
        self.wind_file = wind_file
        self.topology_file = topology_file
        self.mover = Mover()
        self.grid_type = grid_type
        self.drift_angle = drift_angle
        self.grid = Grid(wind_file, topology_file, grid_type)
        self.name = os.path.split(wind_file)[1]
        self._wind_scale=kwargs.pop('wind_scale', 1)
        super(ShipDriftMover, self).__init__(**kwargs)

        #have to override any uncertainty
        #self.grid.load_data(wind_file, topology_file)

        self.model_time = 0
        self.positions = np.zeros((0, 3), dtype=world_point_type)
        self.delta = np.zeros((0, 3), dtype=world_point_type)
        self.status_codes = np.zeros((0, 1), dtype=status_code_type)

        self.array_types.update({'windages',
                                 'windage_range',
                                 'windage_persist'})


    def __repr__(self):
        """
        .. todo::
            We probably want to include more information.
        """
        return ('ShipDriftMover('
                'active_start={1.active_start}, '
                'active_stop={1.active_stop}, '
                'on={1.on})'.format(self.mover, self))

    def __str__(self):
        return ('ShipDriftMover - current _state.\n'
                '  active_start time={1.active_start}\n'
                '  active_stop time={1.active_stop}\n'
                '  current on/off status={1.on}'
                .format(self.mover, self))

    wind_scale = property(lambda self: self._wind_scale,
                          lambda self, val: setattr(self,
                                                    'wind_scale',
                                                    val))

    extrapolate = property(lambda self: self.grid.extrapolate,
                           lambda self, val: setattr(self.grid,
                                                     'extrapolate',
                                                     val))

    time_offset = property(lambda self: self.grid.time_offset / 3600.,
                           lambda self, val: setattr(self.grid,
                                                     'time_offset',
                                                     val * 3600.))

    def export_topology(self, topology_file):
        """
        :param topology_file=None: absolute or relative path where topology
                                   file will be written.
        """
        if topology_file is None:
            raise ValueError('Topology file path required: {0}'.
                             format(topology_file))

        self.grid.export_topology(topology_file)


    def prepare_for_model_run(self):
        """
        Override this method if a derived mover class needs to perform any
        actions prior to a model run
        """
        # May not need this function
        pass


    def prepare_for_model_step(self, sc, time_step, model_time_datetime):
        """
        Call base class method using super
        Also updates windage for this timestep

        :param sc: an instance of gnome.spill_container.SpillContainer class
        :param time_step: time step in seconds
        :param model_time_datetime: current time of model as a date time object
        """
        # not sure if we need to redefine this or what we want to do here
        super(ShipDriftMover, self).prepare_for_model_step(sc, time_step,
                                                          model_time_datetime)

        # if no particles released, then no need for windage
        # TODO: revisit this since sc.num_released shouldn't be None
        if sc.num_released is None  or sc.num_released == 0:
            return

        self.grid.prepare_for_model_step(model_time_datetime)
        # here we might put in drift angle stuff ?

        rand.random_with_persistance(sc['windage_range'][:, 0],
                                     sc['windage_range'][:, 1],
                                     sc['windages'],
                                     sc['windage_persist'],
                                     time_step)

    def prepare_data_for_get_move(self, sc, model_time_datetime):
        """
        organizes the spill object into inputs for calling with Cython
        wrapper's get_move(...)

        :param sc: an instance of gnome.spill_container.SpillContainer class
        :param model_time_datetime: current model time as datetime object
        """
        self.model_time = self.datetime_to_seconds(model_time_datetime)

        # Get the data:
        try:
            self.positions = sc['positions']
            self.status_codes = sc['status_codes']
        except KeyError, err:
            raise ValueError('The spill container does not have the required'
                             'data arrays\n' + err.message)

        self.positions = \
            self.positions.view(dtype=world_point).reshape(
                                                    (len(self.positions),))
        self.delta = np.zeros(len(self.positions),
                              dtype=world_point)
Example #17
0
class WindMover(WindMoversBase, serializable.Serializable):
    """
    Python wrapper around the Cython wind_mover module.
    This class inherits from CyMover and contains CyWindMover

    The real work is done by the CyWindMover object.  CyMover
    sets everything up that is common to all movers.
    """
    _state = copy.deepcopy(WindMoversBase._state)
    _state.add(update=['extrapolate'], save=['extrapolate'])
    _state.add_field(
        serializable.Field('wind', save=True, update=True,
                           save_reference=True))
    _schema = WindMoverSchema

    def __init__(self, wind=None, extrapolate=False, **kwargs):
        #def __init__(self, wind=None, **kwargs):
        """
        Uses super to call CyMover base class __init__

        :param wind: wind object -- provides the wind time series for the mover

        Remaining kwargs are passed onto WindMoversBase __init__ using super.
        See Mover documentation for remaining valid kwargs.

        .. note:: Can be initialized with wind=None; however, wind must be
            set before running. If wind is not None, toggle make_default_refs
            to False since user provided a valid Wind and does not wish to
            use the default from the Model.
        """
        self.mover = CyWindMover()

        self._wind = None
        if wind is not None:
            self.wind = wind
            kwargs['make_default_refs'] = \
                kwargs.pop('make_default_refs', False)
            kwargs['name'] = \
                kwargs.pop('name', wind.name)

        self.extrapolate = extrapolate
        # set optional attributes
        super(WindMover, self).__init__(**kwargs)

        # this will have to be updated when wind is set or changed
        if self.wind is not None:
            self.real_data_start = time_utils.sec_to_datetime(
                self.wind.ossm.get_start_time())
            self.real_data_stop = time_utils.sec_to_datetime(
                self.wind.ossm.get_end_time())

    def __repr__(self):
        """
        .. todo::
            We probably want to include more information.
        """
        return ('{0.__class__.__module__}.{0.__class__.__name__}(\n'
                '{1}'
                ')'.format(self, self._state_as_str()))

    def __str__(self):
        info = ('WindMover - current _state. '
                'See "wind" object for wind conditions:\n'
                '{0}'.format(self._state_as_str()))
        return info

    extrapolate = property(
        lambda self: self.mover.extrapolate,
        lambda self, val: setattr(self.mover, 'extrapolate', val))

    @property
    def wind(self):
        return self._wind

    @wind.setter
    def wind(self, value):
        if not isinstance(value, environment.Wind):
            raise TypeError('wind must be of type environment.Wind')
        else:
            # update reference to underlying cython object
            self._wind = value
            self.mover.set_ossm(self.wind.ossm)

    def prepare_for_model_run(self):
        '''
        if wind attribute is not set, raise ReferencedObjectNotSet excpetion
        '''
        super(WindMover, self).prepare_for_model_run()

        if self.on and self.wind is None:
            msg = "wind object not defined for WindMover"
            raise ReferencedObjectNotSet(msg)

    def serialize(self, json_='webapi'):
        """
        Since 'wind' property is saved as a reference when used in save file
        and 'save' option, need to add appropriate node to WindMover schema
        """
        toserial = self.to_serialize(json_)
        schema = self.__class__._schema()
        if json_ == 'webapi':
            # add wind schema
            schema.add(environment.WindSchema(name='wind'))

        serial = schema.serialize(toserial)

        return serial

    @classmethod
    def deserialize(cls, json_):
        """
        append correct schema for wind object
        """
        schema = cls._schema()
        if 'wind' in json_:
            schema.add(environment.WindSchema())
        _to_dict = schema.deserialize(json_)

        return _to_dict
Example #18
0
class TSVectorProp(VectorProp):

    _schema = TSVectorPropSchema
    _state = copy.deepcopy(VectorProp._state)

    _state.add_field([
        serializable.Field('timeseries', save=False, update=True),
        serializable.Field('variables',
                           save=True,
                           update=True,
                           iscollection=True),
        serializable.Field('varnames', save=True, update=False)
    ])

    def __init__(self,
                 name=None,
                 units=None,
                 time=None,
                 variables=None,
                 varnames=None,
                 **kwargs):
        '''
        This class represents a vector phenomenon using a time series
        '''

        if any([
                units is None, time is None
        ]) and not all([isinstance(v, TimeSeriesProp) for v in variables]):
            raise ValueError(
                "All attributes except name, varnames MUST be defined if variables is not a list of TimeSeriesProp objects"
            )

        if variables is None or len(variables) < 2:
            raise TypeError(
                'Variables must be an array-like of 2 or more TimeSeriesProp or array-like'
            )
        VectorProp.__init__(self, name, units, time, variables)

    @classmethod
    def constant(cls, name=None, units=None, variables=None):
        if any(var is None for var in (name, variables, units)):
            raise ValueError("name, variables, or units may not be None")

        if not isinstance(variables, collections.Iterable):
            raise TypeError('{0} variables must be an iterable'.format(name))
        t = Time.constant_time()
        return cls(name=name,
                   units=units,
                   time=t,
                   variables=[v for v in variables])

    @property
    def timeseries(self):
        '''
        Creates a representation of the time series

        :rtype: list of (datetime, (double, double)) tuples
        '''
        return map(lambda x, y, z: (x, (y, z)), self.time.time,
                   self.variables[0], self.variables[1])

    @property
    def time(self):
        return self._time

    @time.setter
    def time(self, t):
        if self.variables is not None:
            for v in self.variables:
                v.time = t
        if isinstance(t, Time):
            self._time = t
        elif isinstance(t, collections.Iterable):
            self._time = Time(t)
        else:
            raise ValueError(
                "Object being assigned must be an iterable or a Time object")

    @property
    def variables(self):
        return self._variables

    @variables.setter
    def variables(self, v):
        if v is None:
            self._variables = v
        if isinstance(v, collections.Iterable):
            self._variables = OrderedCollection(v)

    def is_constant(self):
        return len(self.variables[0]) == 1

    def in_units(self, units):
        '''
        Returns a full copy of this property in the units specified.
        WARNING: This will copy the data of the original property!
        '''
        cpy = copy.deepcopy(self)
        for i, var in enumerate(cpy._variables):
            cpy._variables[i] = var.in_units(units)
        cpy._units = units
        return cpy
Example #19
0
class IceWindMover(WindMoversBase, serializable.Serializable):

    #_update = ['wind_scale', 'extrapolate']
    #_save = ['wind_scale', 'extrapolate']
    _state = copy.deepcopy(WindMoversBase._state)

    #_state.add(update=_update, save=_save)
    _state.add_field([
        serializable.Field('filename',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('topology_file',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False)
    ])
    _schema = IceWindMoverSchema

    def __init__(self,
                 filename,
                 topology_file=None,
                 extrapolate=False,
                 time_offset=0,
                 **kwargs):
        """
        Initialize an IceWindMover

        :param filename: absolute or relative path to the data file:
                         could be netcdf or filelist
        :param topology_file=None: absolute or relative path to topology file.
                                   If not given, the IceMover will
                                   compute the topology from the data file.
        :param active_start: datetime when the mover should be active
        :param active_stop: datetime after which the mover should be inactive
        :param wind_scale: Value to scale wind data
        :param extrapolate: Allow current data to be extrapolated
                            before and after file data
        :param time_offset: Time zone shift if data is in GMT

        uses super, super(IceWindMover,self).__init__(\*\*kwargs)
        """

        # NOTE: will need to add uncertainty parameters and other dialog fields
        #       use super with kwargs to invoke base class __init__

        # if child is calling, the self.mover is set by child - do not reset
        if type(self) == IceWindMover:
            self.mover = CyIceWindMover()

        if not os.path.exists(filename):
            raise ValueError(
                'Path for current file does not exist: {0}'.format(filename))

        if topology_file is not None:
            if not os.path.exists(topology_file):
                raise ValueError(
                    'Path for Topology file does not exist: {0}'.format(
                        topology_file))

        # check if this is stored with cy_ice_wind_mover?
        self.filename = filename
        self.name = os.path.split(filename)[1]

        # check if this is stored with cy_ice_wind_mover?
        self.topology_file = topology_file

        self.mover.text_read(filename, topology_file)
        self.extrapolate = extrapolate
        self.mover.extrapolate_in_time(extrapolate)
        self.mover.offset_time(time_offset * 3600.)

        super(IceWindMover, self).__init__(**kwargs)

    def __repr__(self):
        return ('IceWindMover('
                'active_start={1.active_start}, '
                'active_stop={1.active_stop}, '
                'on={1.on})'.format(self.mover, self))

    def __str__(self):
        return ('IceWindMover - current _state.\n'
                '  active_start time={1.active_start}\n'
                '  active_stop time={1.active_stop}\n'
                '  current on/off status={1.on}'.format(self.mover, self))

    # Define properties using lambda functions: uses lambda function, which are
    # accessible via fget/fset as follows:
#     current_scale = property(lambda self: self.mover.current_scale,
#                              lambda self, val: setattr(self.mover,
#                                                        'current_scale',
#                                                        val))
#
#     extrapolate = property(lambda self: self.mover.extrapolate,
#                            lambda self, val: setattr(self.mover,
#                                                      'extrapolate',
#                                                      val))
#
#     time_offset = property(lambda self: self.mover.time_offset / 3600.,
#                            lambda self, val: setattr(self.mover,
#                                                      'time_offset',
#                                                      val * 3600.))
#

    def get_grid_data(self):
        if self.mover._is_triangle_grid():
            return self.get_triangles()
        else:
            return self.get_cells()

    def get_center_points(self):
        if self.mover._is_triangle_grid():
            return self.get_triangle_center_points()
        else:
            return self.get_cell_center_points()

    def get_scaled_velocities(self, model_time):
        """
        :param model_time=0:
        """
        num_tri = self.mover.get_num_triangles()
        if self.mover._is_triangle_grid():
            num_cells = num_tri
        else:
            num_cells = num_tri / 2
        #vels = np.zeros(num_cells, dtype=basic_types.velocity_rec)
        vels = np.zeros(num_cells, dtype=velocity_rec)
        self.mover.get_scaled_velocities(model_time, vels)

        return vels

    def get_ice_velocities(self, model_time):
        """
        :param model_time=0:
        """
        num_tri = self.mover.get_num_triangles()
        #vels = np.zeros(num_tri, dtype=basic_types.velocity_rec)
        vels = np.zeros(num_tri, dtype=velocity_rec)
        self.mover.get_ice_velocities(model_time, vels)

        return vels

    def get_movement_velocities(self, model_time):
        """
        :param model_time=0:
        """
        num_tri = self.mover.get_num_triangles()
        #vels = np.zeros(num_tri, dtype=basic_types.velocity_rec)
        vels = np.zeros(num_tri, dtype=velocity_rec)
        self.mover.get_movement_velocities(model_time, vels)

        return vels

    def get_ice_fields(self, model_time):
        """
        :param model_time=0:
        """
        num_tri = self.mover.get_num_triangles()
        num_cells = num_tri / 2
        frac_coverage = np.zeros(num_cells, dtype=np.float64)
        thickness = np.zeros(num_cells, dtype=np.float64)
        self.mover.get_ice_fields(model_time, frac_coverage, thickness)

        return frac_coverage, thickness

    def export_topology(self, topology_file):
        """
        :param topology_file=None: absolute or relative path where
                                   topology file will be written.
        """
        if topology_file is None:
            raise ValueError(
                'Topology file path required: {0}'.format(topology_file))

        self.mover.export_topology(topology_file)

    def extrapolate_in_time(self, extrapolate):
        """
        :param extrapolate=false: allow current data to be extrapolated
                                  before and after file data.
        """
        self.mover.extrapolate_in_time(extrapolate)
        self.extrapolate = extrapolate

    def offset_time(self, time_offset):
        """
        :param offset_time=0: allow data to be in GMT with a time zone offset
                              (hours).
        """
        self.mover.offset_time(time_offset * 3600.)

    def get_offset_time(self):
        """
        :param offset_time=0: allow data to be in GMT with a time zone offset
                              (hours).
        """
        return (self.mover.get_offset_time()) / 3600.
Example #20
0
class TimeSeriesProp(EnvProp, serializable.Serializable):

    _state = copy.deepcopy(EnvProp._state)
    _schema = TimeSeriesPropSchema

    _state.add_field([
        serializable.Field('timeseries', save=False, update=True),
        serializable.Field('data', save=True, update=False)
    ])

    #     _state.update('time', update=False)

    def __init__(self, name=None, units=None, time=None, data=None, **kwargs):
        '''
        A class that represents a scalar natural phenomenon using a time series

        :param name: Name
        :param units: Units
        :param time: Time axis of the data
        :param data: Underlying data source
        :type name: string
        :type units: string
        :type time: [] of datetime.datetime, netCDF4.Variable, or Time object
        :type data: numpy.array, list, or other iterable
        '''
        if len(time) != len(data):
            raise ValueError(
                "Time and data sequences are of different length.\n\
            len(time) == {0}, len(data) == {1}".format(len(time), len(data)))
        super(TimeSeriesProp, self).__init__(name, units, time, data)
        self.time = time
        if isinstance(self.data, list):
            self.data = np.asarray(self.data)

    @classmethod
    def constant(
        cls,
        name=None,
        units=None,
        data=None,
    ):
        if any(var is None for var in (name, data, units)):
            raise ValueError("name, data, or units may not be None")

        if not isinstance(data, Number):
            raise TypeError('{0} data must be a number'.format(name))
        t = Time.constant_time()
        return cls(name=name, units=units, time=t, data=[data])

    @property
    def timeseries(self):
        '''
        Creates a representation of the time series

        :rtype: list of (datetime, double) tuples
        '''
        return map(lambda x, y: (x, y), self.time.time, self.data)

    @property
    def data(self):
        return self._data

    @data.setter
    def data(self, d):
        if self.time is not None and len(d) != len(self.time):
            raise ValueError("Data/time interval mismatch")
        else:
            self._data = d

    @property
    def time(self):
        return self._time

    @time.setter
    def time(self, t):
        if self.data is not None and len(t) != len(self.data):
            raise ValueError("Data/time interval mismatch")
        if isinstance(t, Time):
            self._time = t
        elif isinstance(t, collections.Iterable):
            self._time = Time(t)
        else:
            raise ValueError(
                "Object being assigned must be an iterable or a Time object")

    def set_attr(self, name=None, units=None, time=None, data=None):
        self.name = name if name is not None else self.name
        self.units = units if units is not None else self.units
        if data is not None and time is not None:
            if len(time) != len(data):
                raise ValueError("Data/time interval mismatch")
            self._data = data
            self.time = time
        else:
            self.data = data if data is not None else self.data
            self.time = time if time is not None else self.time

    def at(self, points, time, units=None, extrapolate=False, **kwargs):
        '''
        Interpolates this property to the given points at the given time with the units specified
        :param points: A Nx2 array of lon,lat points
        :param time: A datetime object. May be None; if this is so, the variable is assumed to be gridded
        but time-invariant
        :param units: The units that the result would be converted to
        '''
        value = None
        if len(self.time) == 1:
            # single time time series (constant)
            value = np.full((points.shape[0], 1), self.data, dtype=np.float64)
            if units is not None and units != self.units:
                value = unit_conversion.convert(self.units, units, value)
            return value

        if not extrapolate:
            self.time.valid_time(time)
        t_index = self.time.index_of(time, extrapolate)
        if time > self.time.max_time:
            value = self.data[-1]
        if time <= self.time.min_time:
            value = self.data[0]
        if value is None:
            t_alphas = self.time.interp_alpha(time, extrapolate)

            d0 = self.data[t_index - 1]
            d1 = self.data[t_index]
            value = d0 + (d1 - d0) * t_alphas
        if units is not None and units != self.units:
            value = unit_conversion.convert(self.units, units, value)

        return np.full((points.shape[0], 1), value, dtype=np.float64)

    def is_constant(self):
        return len(self.data) == 1

    def __eq__(self, o):
        t1 = (self.name == o.name and self.units == o.units
              and self.time == o.time)
        t2 = all(np.isclose(self.data, o.data))
        return t1 and t2

    def __ne__(self, o):
        return not self.__eq__(o)
Example #21
0
class VectorProp(serializable.Serializable):

    _state = copy.deepcopy(serializable.Serializable._state)
    _schema = VectorPropSchema

    _state.add_field([serializable.Field('units', save=True, update=True),
                      serializable.Field('time', save=True, update=True, save_reference=True)])

    def __init__(self,
                 name=None,
                 units=None,
                 time=None,
                 variables=None,
                 **kwargs):
        '''
        A class that represents a vector natural phenomenon and provides an interface to get the value of
        the phenomenon at a position in space and time. VectorProp is the base class

        :param name: Name of the Property
        :param units: Unit of the underlying data
        :param time: Time axis of the data
        :param variables: component data arrays
        :type name: string
        :type units: string
        :type time: [] of datetime.datetime, netCDF4.Variable, or Time object
        :type variables: [] of EnvProp or numpy.array (Max len=2)
        '''

        self.name = self._units = self._time = self._variables = None

        self.name = name

        if all([isinstance(v, EnvProp) for v in variables]):
            if time is not None and not isinstance(time, Time):
                time = Time(time)
            units = variables[0].units if units is None else units
            time = variables[0].time if time is None else time
        if units is None:
            units = variables[0].units
        self._units = units
        if variables is None or len(variables) < 2:
            raise ValueError('Variables must be an array-like of 2 or more Property objects')
        self.variables = variables
        self._time = time
        unused_args = kwargs.keys() if kwargs is not None else None
        if len(unused_args) > 0:
#             print(unused_args)
            kwargs = {}
        super(VectorProp, self).__init__(**kwargs)

    @property
    def time(self):
        '''
        Time axis of data

        :rtype: gnome.environment.property.Time
        '''
        return self._time

    @property
    def units(self):
        '''
        Units of underlying data

        :rtype: string
        '''
        if hasattr(self._units, '__iter__'):
            if len(set(self._units) > 1):
                return self._units
            else:
                return self._units[0]
        else:
            return self._units

    @units.setter
    def units(self, unit):
        if unit is not None:
            if not unit_conversion.is_supported(unit):
                raise ValueError('Units of {0} are not supported'.format(unit))
        self._units = unit
        if self.variables is not None:
            for v in self.variables:
                v.units = unit

    @property
    def varnames(self):
        '''
        Names of underlying variables

        :rtype: [] of strings
        '''
        return [v.varname if hasattr(v, 'varname') else v.name for v in self.variables ]

    def _check_consistency(self):
        '''
        Checks that the attributes of each GriddedProp in varlist are the same as the GridVectorProp
        '''
        raise NotImplementedError()

    def at(self, *args, **kwargs):
        '''
        Find the value of the property at positions P at time T

        :param points: Coordinates to be queried (P)
        :param time: The time at which to query these points (T)
        :param time: Specifies the time level of the variable
        :param units: units the values will be returned in (or converted to)
        :param extrapolate: if True, extrapolation will be supported
        :type points: Nx2 array of double
        :type time: datetime.datetime object
        :type time: integer
        :type units: string such as ('m/s', 'knots', etc)
        :type extrapolate: boolean (True or False)
        :return: returns a Nx2 array of interpolated values
        :rtype: double
        '''
        return np.column_stack([var.at(*args, **kwargs) for var in self.variables])
Example #22
0
class Wind(serializable.Serializable, Timeseries, Environment):
    '''
    Defines the Wind conditions for a single point
    '''
    # object is referenced by others using this attribute name
    _ref_as = 'wind'

    # default units for input/output data
    _update = [
        'description',
        'latitude',
        'longitude',
        'source_type',
        'source_id',  # what is source ID? Buoy ID?
        'updated_at',
        'speed_uncertainty_scale'
    ]

    # used to create new obj or as readonly parameter
    _create = []
    _create.extend(_update)

    _state = copy.deepcopy(Environment._state)
    _state.add(save=_create, update=_update)
    _schema = WindSchema

    # add 'filename' as a Field object
    _state.add_field([
        serializable.Field('filename',
                           isdatafile=True,
                           save=True,
                           read=True,
                           test_for_eq=False),
        serializable.Field('timeseries', save=False, update=True),
        # test for equality of units a little differently
        serializable.Field('units', save=True, update=True, test_for_eq=False),
    ])
    _state['name'].test_for_eq = False

    # list of valid velocity units for timeseries
    valid_vel_units = _valid_units('Velocity')

    def __init__(self,
                 timeseries=None,
                 units=None,
                 filename=None,
                 format='r-theta',
                 latitude=None,
                 longitude=None,
                 speed_uncertainty_scale=0.0,
                 **kwargs):
        """
        todo: update docstrings!
        """
        self.updated_at = kwargs.pop('updated_at', None)
        self.source_id = kwargs.pop('source_id', 'undefined')
        self.longitude = longitude
        self.latitude = latitude
        self.description = kwargs.pop('description', 'Wind Object')
        self.speed_uncertainty_scale = speed_uncertainty_scale

        if filename is not None:
            self.source_type = kwargs.pop('source_type', 'file')
            super(Wind, self).__init__(filename=filename, format=format)
            self.name = kwargs.pop('name', os.path.split(self.filename)[1])
            # set _user_units attribute to match user_units read from file.
            self._user_units = self.ossm.user_units

            if units is not None:
                self.units = units
        else:
            if kwargs.get('source_type') in basic_types.wind_datasource._attr:
                self.source_type = kwargs.pop('source_type')
            else:
                self.source_type = 'undefined'

            # either timeseries is given or nothing is given
            # create an empty default object
            super(Wind, self).__init__(format=format)

            self.units = 'mps'  # units for default object
            if timeseries is not None:
                if units is None:
                    raise TypeError('Units must be provided with timeseries')

                self.set_wind_data(timeseries, units, format)

            self.name = kwargs.pop('name', self.__class__.__name__)

    def _check_units(self, units):
        '''
        Checks the user provided units are in list Wind.valid_vel_units
        '''
        if units not in Wind.valid_vel_units:
            raise uc.InvalidUnitError((units, 'Velocity'))

    def __repr__(self):
        self_ts = self.timeseries.__repr__()
        return ('{0.__class__.__module__}.{0.__class__.__name__}('
                'description="{0.description}", '
                'source_id="{0.source_id}", '
                'source_type="{0.source_type}", '
                'units="{0.units}", '
                'updated_at="{0.updated_at}", '
                'timeseries={1}'
                ')').format(self, self_ts)

    # user_units = property( lambda self: self._user_units)

    @property
    def timeseries(self):
        '''
        returns entire timeseries in 'r-theta' format in the units in which
        the data was entered or as specified by units attribute
        '''
        return self.get_wind_data(units=self.units)

    @timeseries.setter
    def timeseries(self, value):
        '''
        set the timeseries for wind. The units for value are as specified by
        self.units attribute. Property converts the units to 'm/s' so Cython/
        C++ object stores timeseries in 'm/s'
        '''
        self.set_wind_data(value, units=self.units)

    def timeseries_to_dict(self):
        '''
        when serializing data - round it to 2 decimal places
        '''
        ts = self.get_wind_data(units=self.units)
        ts['value'][:] = np.round(ts['value'], 2)
        return ts

    @property
    def units(self):
        '''
        define units in which wind data is input/output
        '''
        return self._user_units

    @units.setter
    def units(self, value):
        """
        User can set default units for input/output data

        These are given as string - derived classes should override
        _check_units() to customize for their data. Base class first checks
        units, then sets it - derived classes can raise an error in
        _check_units if units are incorrect for their type of data
        """
        self._check_units(value)
        self._user_units = value

    def _convert_units(self, data, ts_format, from_unit, to_unit):
        '''
        method to convert units for the 'value' stored in the
        date/time value pair
        '''
        if from_unit != to_unit:
            data[:, 0] = uc.convert('Velocity', from_unit, to_unit, data[:, 0])

            if ts_format == basic_types.ts_format.uv:
                # TODO: avoid clobbering the 'ts_format' namespace
                data[:, 1] = uc.convert('Velocity', from_unit, to_unit,
                                        data[:, 1])

        return data

    def save(self, saveloc, references=None, name=None):
        '''
        Write Wind timeseries to file or to zip,
        then call save method using super
        '''
        name = (name, 'Wind.json')[name is None]
        ts_name = os.path.splitext(name)[0] + '_data.WND'

        if zipfile.is_zipfile(saveloc):
            self._write_timeseries_to_zip(saveloc, ts_name)
            self._filename = ts_name
        else:
            datafile = os.path.join(saveloc, ts_name)
            self._write_timeseries_to_file(datafile)
            self._filename = datafile
        return super(Wind, self).save(saveloc, references, name)

    def _write_timeseries_to_zip(self, saveloc, ts_name):
        '''
        use a StringIO type of file descriptor and write directly to zipfile
        '''
        fd = StringIO.StringIO()
        self._write_timeseries_to_fd(fd)
        self._write_to_zip(saveloc, ts_name, fd.getvalue())

    def _write_timeseries_to_file(self, datafile):
        '''write timeseries data to file '''
        with open(datafile, 'w') as fd:
            self._write_timeseries_to_fd(fd)

    def _write_timeseries_to_fd(self, fd):
        '''
        Takes a general file descriptor as input and writes data to it.

        Writes the "OSSM format" with the full header
        '''
        if self.units in ossm_wind_units.values():
            data_units = self.units
        else:
            # we know C++ understands this unit
            data_units = 'meters per second'

        header = ('Station Name\n'
                  'Position\n'
                  '{0}\n'
                  'LTime\n'
                  '0,0,0,0,0,0,0,0\n').format(data_units)
        data = self.get_wind_data(units=data_units)
        val = data['value']
        dt = data['time'].astype(datetime.datetime)

        fd.write(header)

        for i, idt in enumerate(dt):
            fd.write('{0.day:02}, '
                     '{0.month:02}, '
                     '{0.year:04}, '
                     '{0.hour:02}, '
                     '{0.minute:02}, '
                     '{1:02.2f}, {2:02.2f}\n'.format(idt, round(val[i, 0], 4),
                                                     round(val[i, 1], 4)))

    def update_from_dict(self, data):
        '''
        update attributes from dict - override base class because we want to
        set the units before updating the data so conversion is done correctly.
        Internally all data is stored in SI units.
        '''
        updated = self.update_attr('units', data.pop('units', self.units))
        if super(Wind, self).update_from_dict(data):
            return True
        else:
            return updated

    def get_wind_data(self, datetime=None, units=None, format='r-theta'):
        """
        Returns the timeseries in the requested format. If datetime=None,
        then the original timeseries that was entered is returned.
        If datetime is a list containing datetime objects, then the value
        for each of those date times is determined by the underlying
        C++ object and the timeseries is returned.

        The output format is defined by the strings 'r-theta', 'uv'

        :param datetime: [optional] datetime object or list of datetime
                         objects for which the value is desired
        :type datetime: datetime object
        :param units: [optional] outputs data in these units. Default is to
            output data without unit conversion
        :type units: string. Uses the unit_conversion module.
        :param format: output format for the times series:
                       either 'r-theta' or 'uv'
        :type format: either string or integer value defined by
                      basic_types.ts_format.* (see cy_basic_types.pyx)

        :returns: numpy array containing dtype=basic_types.datetime_value_2d.
                  Contains user specified datetime and the corresponding
                  values in user specified ts_format

        .. note:: Invokes self._convert_units() to do the unit conversion.
            Override this method to define the derived object's unit conversion
            functionality

        todo: return data in appropriate significant digits
        """
        datetimeval = super(Wind, self).get_timeseries(datetime, format)
        units = (units, self._user_units)[units is None]

        datetimeval['value'] = self._convert_units(datetimeval['value'],
                                                   format, 'meter per second',
                                                   units)

        return datetimeval

    def set_wind_data(self, wind_data, units, format='r-theta'):
        """
        Sets the timeseries of the Wind object to the new value given by
        a numpy array.  The format for the input data defaults to
        basic_types.format.magnitude_direction but can be changed by the user.
        Units are also required with the data.

        :param datetime_value_2d: timeseries of wind data defined in a
                                  numpy array
        :type datetime_value_2d: numpy array of dtype
                                 basic_types.datetime_value_2d
        :param units: units associated with the data. Valid units defined in
                      Wind.valid_vel_units list
        :param format: output format for the times series; as defined by
                       basic_types.format.
        :type format: either string or integer value defined by
                      basic_types.format.* (see cy_basic_types.pyx)
        """
        if self._check_timeseries(wind_data):
            self._check_units(units)
            self.units = units

            wind_data = self._xform_input_timeseries(wind_data)
            wind_data['value'] = self._convert_units(wind_data['value'],
                                                     format, units,
                                                     'meter per second')

            super(Wind, self).set_timeseries(wind_data, format)
        else:
            raise ValueError('Bad timeseries as input')

    def get_value(self, time):
        '''
        Return the value at specified time and location. Wind timeseries are
        independent of location; however, a gridded datafile may require
        location so this interface may get refactored if it needs to support
        different types of wind data. It returns the data in SI units (m/s)
        in 'r-theta' format (speed, direction)

        :param time: the time(s) you want the data for
        :type time: datetime object or sequence of datetime objects.

        .. note:: It invokes get_wind_data(..) function
        '''
        data = self.get_wind_data(time, 'm/s', 'r-theta')
        return tuple(data[0]['value'])

    def set_speed_uncertainty(self, up_or_down=None):
        '''
        This function shifts the wind speed values in our time series
        based on a single parameter Rayleigh distribution method,
        and scaled by a value in the range [0.0 ... 0.5].
        This range represents a plus-or-minus percent of uncertainty that
        the distribution function should calculate

        For each wind value in our time series:

        * We assume it to be the average speed for that sample time
        * We calculate its respective Rayleigh distribution mode (sigma).
        * We determine either an upper percent uncertainty or a
          lower percent uncertainty based on a passed in parameter.
        * Using the Rayleigh Quantile method and our calculated percent,
          we determine the wind speed that is just at or above the
          fractional area under the Probability distribution.
        * We assign the wind speed to its new calculated value.

        Since we are irreversibly changing the wind speed values,
        we should probably do this only once.
        '''
        if up_or_down not in ('up', 'down'):
            return False

        if (self.speed_uncertainty_scale <= 0.0
                or self.speed_uncertainty_scale > 0.5):
            return False
        else:
            percent_uncertainty = self.speed_uncertainty_scale

        time_series = self.get_wind_data()

        for tse in time_series:
            sigma = rayleigh.sigma_from_wind(tse['value'][0])
            if up_or_down == 'up':
                tse['value'][0] = rayleigh.quantile(0.5 + percent_uncertainty,
                                                    sigma)
            elif up_or_down == 'down':
                tse['value'][0] = rayleigh.quantile(0.5 - percent_uncertainty,
                                                    sigma)

        self.set_wind_data(time_series, self.units)

        return True

    def __eq__(self, other):
        '''
        invoke super to check equality for all 'save' parameters. Also invoke
        __eq__ for Timeseries object to check equality of timeseries. Super
        is not used in any of the __eq__ methods
        '''
        if not super(Wind, self).__eq__(other):
            return False

        if not Timeseries.__eq__(self, other):
            return False

        return True

    def validate(self):
        '''
        only issues warning - object is always valid
        '''
        msgs = []
        if np.all(self.timeseries['value'][:, 0] == 0.0):
            msg = 'wind speed is 0'
            self.logger.warning(msg)
            msgs.append(self._warn_pre + msg)

        return (msgs, True)
Example #23
0
class EnvProp(serializable.Serializable):

    _state = copy.deepcopy(serializable.Serializable._state)
    _schema = PropertySchema

    _state.add_field([serializable.Field('units', save=True, update=True),
                      serializable.Field('time', save=True, update=True, save_reference=True)])

    def __init__(self,
                 name=None,
                 units=None,
                 time=None,
                 data=None,
                 **kwargs):
        '''
        A class that represents a natural phenomenon and provides an interface to get
        the value of the phenomenon at a position in space and time. EnvProp is the base
        class, and returns only a single value regardless of the time.

        :param name: Name
        :param units: Units
        :param time: Time axis of the data
        :param data: Value of the property
        :type name: string
        :type units: string
        :type time: [] of datetime.datetime, netCDF4.Variable, or Time object
        :type data: netCDF4.Variable or numpy.array
        '''

        self.name = self._units = self._time = self._data = None

        self.name = name
        self.units = units
        self.data = data
        self.time = time
        for k in kwargs:
            setattr(self, k, kwargs[k])

    '''
    Subclasses should override\add any attribute property function getter/setters as needed
    '''

#     @property
#     def data(self):
#         '''
#         Underlying data
#
#         :rtype: netCDF4.Variable or numpy.array
#         '''
#         return self._data

    @property
    def units(self):
        '''
        Units of underlying data

        :rtype: string
        '''
        return self._units

    @units.setter
    def units(self, unit):
        if unit is not None:
            if not unit_conversion.is_supported(unit):
                raise ValueError('Units of {0} are not supported'.format(unit))
        self._units = unit

    @property
    def time(self):
        '''
        Time axis of data

        :rtype: gnome.environment.property.Time
        '''
        return self._time

    @time.setter
    def time(self, t):
        if t is None:
            self._time = None
        elif isinstance(t, Time):
            self._time = t
        elif isinstance(t, collections.Iterable):
            self._time = Time(t)
        else:
            raise ValueError("Object being assigned must be an iterable or a Time object")

    def at(self, *args, **kwargs):
        '''
        Find the value of the property at positions P at time T

        :param points: Coordinates to be queried (P)
        :param time: The time at which to query these points (T)
        :param time: Specifies the time level of the variable
        :param units: units the values will be returned in (or converted to)
        :param extrapolate: if True, extrapolation will be supported
        :type points: Nx2 array of double
        :type time: datetime.datetime object
        :type time: integer
        :type units: string such as ('m/s', 'knots', etc)
        :type extrapolate: boolean (True or False)
        :return: returns a Nx1 array of interpolated values
        :rtype: double
        '''

        raise NotImplementedError()

    def in_units(self, unit):
        '''
        Returns a full cpy of this property in the units specified.
        WARNING: This will cpy the data of the original property!

        :param units: Units to convert to
        :type units: string
        :return: Copy of self converted to new units
        :rtype: Same as self
        '''
        cpy = copy.copy(self)
        if hasattr(cpy.data, '__mul__'):
            cpy.data = unit_conversion.convert(cpy.units, unit, cpy.data)
        else:
            warnings.warn('Data was not converted to new units and was not copied because it does not support multiplication')
        cpy._units = unit
        return cpy
Example #24
0
class PyCurrentMover(movers.PyMover, serializable.Serializable):

    _state = copy.deepcopy(movers.PyMover._state)

    _state.add_field([
        serializable.Field('filename',
                           save=True,
                           read=True,
                           isdatafile=True,
                           test_for_eq=False),
        serializable.Field('current',
                           save=True,
                           read=True,
                           save_reference=True)
    ])
    _state.add(update=['uncertain_duration', 'uncertain_time_delay'],
               save=['uncertain_duration', 'uncertain_time_delay'])
    _schema = PyCurrentMoverSchema

    _ref_as = 'py_current_movers'

    _req_refs = {'current': GridCurrent}

    def __init__(self,
                 current=None,
                 filename=None,
                 extrapolate=False,
                 time_offset=0,
                 current_scale=1,
                 uncertain_duration=24 * 3600,
                 uncertain_time_delay=0,
                 uncertain_along=.5,
                 uncertain_across=.25,
                 uncertain_cross=.25,
                 default_num_method='Trapezoid',
                 **kwargs):
        self.current = current
        self.filename = filename
        self.extrapolate = extrapolate
        self.current_scale = current_scale
        self.uncertain_along = uncertain_along
        self.uncertain_across = uncertain_across
        self.uncertain_duration = uncertain_duration
        self.uncertain_time_delay = uncertain_time_delay
        self.model_time = 0
        self.positions = np.zeros((0, 3), dtype=world_point_type)
        self.delta = np.zeros((0, 3), dtype=world_point_type)
        self.status_codes = np.zeros((0, 1), dtype=status_code_type)

        # either a 1, or 2 depending on whether spill is certain or not
        self.spill_type = 0

        super(PyCurrentMover,
              self).__init__(default_num_method=default_num_method, **kwargs)

    def _attach_default_refs(self, ref_dict):
        pass
        return serializable.Serializable._attach_default_refs(self, ref_dict)

    @classmethod
    def from_netCDF(cls,
                    filename=None,
                    extrapolate=False,
                    time_offset=0,
                    current_scale=1,
                    uncertain_duration=24 * 3600,
                    uncertain_time_delay=0,
                    uncertain_along=.5,
                    uncertain_across=.25,
                    uncertain_cross=.25,
                    **kwargs):
        current = GridCurrent.from_netCDF(filename, **kwargs)
        return cls(current=current,
                   filename=filename,
                   extrapolate=extrapolate,
                   time_offset=time_offset,
                   current_scale=current_scale,
                   uncertain_along=uncertain_along,
                   uncertain_across=uncertain_across,
                   uncertain_cross=uncertain_cross,
                   **kwargs)

    def get_scaled_velocities(self, time):
        """
        :param model_time=0:
        """
        points = None
        vels = self.grid.interpolated_velocities(time, points)

        return vels

    def get_move(self, sc, time_step, model_time_datetime, num_method=None):
        """
        Compute the move in (long,lat,z) space. It returns the delta move
        for each element of the spill as a numpy array of size
        (number_elements X 3) and dtype = gnome.basic_types.world_point_type

        Base class returns an array of numpy.nan for delta to indicate the
        get_move is not implemented yet.

        Each class derived from Mover object must implement it's own get_move

        :param sc: an instance of gnome.spill_container.SpillContainer class
        :param time_step: time step in seconds
        :param model_time_datetime: current model time as datetime object

        All movers must implement get_move() since that's what the model calls
        """
        method = None
        if num_method is None:
            method = self.num_methods[self.default_num_method]
        else:
            method = self.num_method[num_method]

        status = sc['status_codes'] != oil_status.in_water
        positions = sc['positions']
        pos = positions[:]

        res = method(sc, time_step, model_time_datetime, pos, self.current)
        if res.shape[1] == 2:
            deltas = np.zeros_like(positions)
            deltas[:, 0:2] = res
        else:
            deltas = res

        deltas = FlatEarthProjection.meters_to_lonlat(deltas, positions)
        deltas[status] = (0, 0, 0)
        return deltas